summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 07:08:36 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-10-21 07:08:36 +0000
commit48aff82709769b098321c738f3444b9bdaa694c6 (patch)
treee00c7c43e2d9b603a5a6af576b1685e400410dee /spec
parent879f5329ee916a948223f8f43d77fba4da6cd028 (diff)
downloadgitlab-ce-13.5.0-rc42.tar.gz
Add latest changes from gitlab-org/gitlab@13-5-stable-eev13.5.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/feature_flag_spec.rb13
-rw-r--r--spec/channels/application_cable/connection_spec.rb52
-rw-r--r--spec/config/object_store_settings_spec.rb15
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb38
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb5
-rw-r--r--spec/controllers/admin/hooks_controller_spec.rb8
-rw-r--r--spec/controllers/admin/instance_review_controller_spec.rb68
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb14
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb17
-rw-r--r--spec/controllers/admin/sessions_controller_spec.rb2
-rw-r--r--spec/controllers/admin/users_controller_spec.rb90
-rw-r--r--spec/controllers/application_controller_spec.rb12
-rw-r--r--spec/controllers/boards/lists_controller_spec.rb11
-rw-r--r--spec/controllers/concerns/controller_with_feature_category/config_spec.rb53
-rw-r--r--spec/controllers/concerns/controller_with_feature_category_spec.rb38
-rw-r--r--spec/controllers/concerns/issuable_collections_spec.rb34
-rw-r--r--spec/controllers/concerns/redis_tracking_spec.rb99
-rw-r--r--spec/controllers/dashboard/labels_controller_spec.rb23
-rw-r--r--spec/controllers/dashboard_controller_spec.rb10
-rw-r--r--spec/controllers/every_controller_spec.rb20
-rw-r--r--spec/controllers/graphql_controller_spec.rb10
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb5
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb54
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb38
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb79
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb7
-rw-r--r--spec/controllers/groups/registry/repositories_controller_spec.rb2
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb21
-rw-r--r--spec/controllers/groups_controller_spec.rb98
-rw-r--r--spec/controllers/help_controller_spec.rb63
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb179
-rw-r--r--spec/controllers/import/manifest_controller_spec.rb99
-rw-r--r--spec/controllers/invites_controller_spec.rb89
-rw-r--r--spec/controllers/jira_connect/events_controller_spec.rb37
-rw-r--r--spec/controllers/profiles_controller_spec.rb13
-rw-r--r--spec/controllers/projects/alert_management_controller_spec.rb2
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb4
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb6
-rw-r--r--spec/controllers/projects/feature_flags_clients_controller_spec.rb57
-rw-r--r--spec/controllers/projects/feature_flags_controller_spec.rb1604
-rw-r--r--spec/controllers/projects/feature_flags_user_lists_controller_spec.rb113
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb51
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb8
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb2
-rw-r--r--spec/controllers/projects/incidents_controller_spec.rb103
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb18
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb94
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb95
-rw-r--r--spec/controllers/projects/merge_requests/conflicts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb4
-rw-r--r--spec/controllers/projects/pipelines/stages_controller_spec.rb11
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb80
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb52
-rw-r--r--spec/controllers/projects/registry/tags_controller_spec.rb2
-rw-r--r--spec/controllers/projects/releases/evidences_controller_spec.rb32
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb8
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb41
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb16
-rw-r--r--spec/controllers/projects/settings/access_tokens_controller_spec.rb171
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb32
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb115
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb283
-rw-r--r--spec/controllers/projects/static_site_editor_controller_spec.rb92
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb21
-rw-r--r--spec/controllers/projects/tracings_controller_spec.rb62
-rw-r--r--spec/controllers/projects/web_ide_terminals_controller_spec.rb23
-rw-r--r--spec/controllers/projects_controller_spec.rb4
-rw-r--r--spec/controllers/registrations/experience_levels_controller_spec.rb49
-rw-r--r--spec/controllers/registrations_controller_spec.rb282
-rw-r--r--spec/controllers/runner_setup_controller_spec.rb21
-rw-r--r--spec/controllers/search_controller_spec.rb2
-rw-r--r--spec/controllers/sessions_controller_spec.rb33
-rw-r--r--spec/controllers/snippets_controller_spec.rb338
-rw-r--r--spec/db/schema_spec.rb39
-rw-r--r--spec/factories/alert_management/alerts.rb22
-rw-r--r--spec/factories/alert_management/http_integrations.rb14
-rw-r--r--spec/factories/alerting/alert.rb25
-rw-r--r--spec/factories/authentication_event.rb11
-rw-r--r--spec/factories/bulk_import.rb8
-rw-r--r--spec/factories/bulk_import/entities.rb21
-rw-r--r--spec/factories/ci/bridge.rb13
-rw-r--r--spec/factories/ci/build_pending_states.rb2
-rw-r--r--spec/factories/ci/build_trace_chunks.rb13
-rw-r--r--spec/factories/ci/builds.rb3
-rw-r--r--spec/factories/ci/deleted_object.rb9
-rw-r--r--spec/factories/ci/pipelines.rb18
-rw-r--r--spec/factories/ci/test_case.rb2
-rw-r--r--spec/factories/design_management/designs.rb2
-rw-r--r--spec/factories/events.rb11
-rw-r--r--spec/factories/group_import_states.rb1
-rw-r--r--spec/factories/groups.rb4
-rw-r--r--spec/factories/instance_statistics/measurement.rb8
-rw-r--r--spec/factories/issue_email_participants.rb8
-rw-r--r--spec/factories/merge_request_diffs.rb8
-rw-r--r--spec/factories/merge_requests.rb6
-rw-r--r--spec/factories/namespaces.rb8
-rw-r--r--spec/factories/packages.rb164
-rw-r--r--spec/factories/packages/package_file.rb165
-rw-r--r--spec/factories/pages_deployments.rb9
-rw-r--r--spec/factories/project_repository_storage_moves.rb1
-rw-r--r--spec/factories/project_tracing_settings.rb8
-rw-r--r--spec/factories/projects.rb8
-rw-r--r--spec/factories/prometheus_alert.rb4
-rw-r--r--spec/factories/prometheus_metrics.rb1
-rw-r--r--spec/factories/resource_weight_events.rb8
-rw-r--r--spec/factories/services.rb6
-rw-r--r--spec/factories/terraform/state.rb10
-rw-r--r--spec/factories/terraform/state_version.rb8
-rw-r--r--spec/factories/todos.rb4
-rw-r--r--spec/factories/usage_data.rb23
-rw-r--r--spec/factories/users.rb8
-rw-r--r--spec/factories/wiki_pages.rb3
-rw-r--r--spec/factories/wikis.rb6
-rw-r--r--spec/factories_spec.rb30
-rw-r--r--spec/features/admin/admin_groups_spec.rb2
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb10
-rw-r--r--spec/features/admin/admin_settings_spec.rb37
-rw-r--r--spec/features/admin/admin_users_spec.rb97
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb2
-rw-r--r--spec/features/admin/clusters/eks_spec.rb2
-rw-r--r--spec/features/admin/dashboard_spec.rb6
-rw-r--r--spec/features/alert_management/alert_details_spec.rb86
-rw-r--r--spec/features/alert_management/alert_management_list_spec.rb58
-rw-r--r--spec/features/alert_management/user_filters_alerts_by_status_spec.rb56
-rw-r--r--spec/features/alert_management/user_searches_alerts_spec.rb31
-rw-r--r--spec/features/alert_management/user_updates_alert_status_spec.rb36
-rw-r--r--spec/features/alert_management_spec.rb62
-rw-r--r--spec/features/boards/add_issues_modal_spec.rb12
-rw-r--r--spec/features/boards/boards_spec.rb40
-rw-r--r--spec/features/boards/sidebar_spec.rb2
-rw-r--r--spec/features/calendar_spec.rb2
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb12
-rw-r--r--spec/features/commits_spec.rb1
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb4
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb6
-rw-r--r--spec/features/dashboard/todos/todos_filtering_spec.rb8
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb19
-rw-r--r--spec/features/discussion_comments/snippets_spec.rb1
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb1
-rw-r--r--spec/features/file_uploads/maven_package_spec.rb26
-rw-r--r--spec/features/groups/clusters/eks_spec.rb2
-rw-r--r--spec/features/groups/clusters/user_spec.rb8
-rw-r--r--spec/features/groups/members/leave_group_spec.rb2
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb113
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb75
-rw-r--r--spec/features/groups/navbar_spec.rb8
-rw-r--r--spec/features/groups/packages_spec.rb2
-rw-r--r--spec/features/groups/show_spec.rb13
-rw-r--r--spec/features/incidents/incident_details_spec.rb52
-rw-r--r--spec/features/incidents/incidents_list_spec.rb38
-rw-r--r--spec/features/incidents/user_creates_new_incident_spec.rb55
-rw-r--r--spec/features/incidents/user_filters_incidents_by_status_spec.rb59
-rw-r--r--spec/features/incidents/user_searches_incidents_spec.rb30
-rw-r--r--spec/features/invites_spec.rb48
-rw-r--r--spec/features/issuables/close_reopen_report_toggle_spec.rb12
-rw-r--r--spec/features/issuables/issuable_list_spec.rb8
-rw-r--r--spec/features/issuables/markdown_references/internal_references_spec.rb8
-rw-r--r--spec/features/issuables/merge_request_discussion_lock_spec.rb79
-rw-r--r--spec/features/issues/csv_spec.rb4
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb24
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb136
-rw-r--r--spec/features/issues/todo_spec.rb4
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb436
-rw-r--r--spec/features/issues/user_sees_live_update_spec.rb2
-rw-r--r--spec/features/issues/user_views_issue_spec.rb4
-rw-r--r--spec/features/labels_hierarchy_spec.rb1
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb15
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb7
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb9
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb47
-rw-r--r--spec/features/merge_request/user_edits_mr_spec.rb18
-rw-r--r--spec/features/merge_request/user_expands_diff_spec.rb1
-rw-r--r--spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb29
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb2
-rw-r--r--spec/features/merge_request/user_reopens_merge_request_spec.rb6
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_page_metadata_spec.rb17
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_suggest_pipeline_spec.rb35
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb7
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb19
-rw-r--r--spec/features/merge_requests/user_filters_by_approvals_spec.rb82
-rw-r--r--spec/features/merge_requests/user_filters_by_deployments_spec.rb93
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb37
-rw-r--r--spec/features/milestone_spec.rb4
-rw-r--r--spec/features/milestones/user_views_milestone_spec.rb76
-rw-r--r--spec/features/milestones/user_views_milestones_spec.rb3
-rw-r--r--spec/features/operations_sidebar_link_spec.rb81
-rw-r--r--spec/features/profiles/keys_spec.rb36
-rw-r--r--spec/features/projects/activity/user_sees_design_comment_spec.rb2
-rw-r--r--spec/features/projects/badges/list_spec.rb8
-rw-r--r--spec/features/projects/blobs/edit_spec.rb16
-rw-r--r--spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb17
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb2
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb2
-rw-r--r--spec/features/projects/branches_spec.rb8
-rw-r--r--spec/features/projects/ci/lint_spec.rb137
-rw-r--r--spec/features/projects/clusters/eks_spec.rb2
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb12
-rw-r--r--spec/features/projects/clusters/user_spec.rb8
-rw-r--r--spec/features/projects/clusters_spec.rb16
-rw-r--r--spec/features/projects/commit/builds_spec.rb2
-rw-r--r--spec/features/projects/commit/user_comments_on_commit_spec.rb48
-rw-r--r--spec/features/projects/compare_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb2
-rw-r--r--spec/features/projects/environments/environments_spec.rb4
-rw-r--r--spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb63
-rw-r--r--spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb21
-rw-r--r--spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb21
-rw-r--r--spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb200
-rw-r--r--spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb31
-rw-r--r--spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb147
-rw-r--r--spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb195
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb41
-rw-r--r--spec/features/projects/files/user_browses_lfs_files_spec.rb24
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb6
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb173
-rw-r--r--spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb29
-rw-r--r--spec/features/projects/issues/viewing_relocated_issues_spec.rb40
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb4
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb43
-rw-r--r--spec/features/projects/members/list_spec.rb2
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb75
-rw-r--r--spec/features/projects/navbar_spec.rb10
-rw-r--r--spec/features/projects/pages_spec.rb2
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb17
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb4
-rw-r--r--spec/features/projects/releases/user_creates_release_spec.rb31
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb49
-rw-r--r--spec/features/projects/releases/user_views_release_spec.rb55
-rw-r--r--spec/features/projects/releases/user_views_releases_spec.rb68
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb4
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb70
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb8
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb22
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb134
-rw-r--r--spec/features/projects/snippets/show_spec.rb10
-rw-r--r--spec/features/projects/snippets/user_comments_on_snippet_spec.rb1
-rw-r--r--spec/features/projects/snippets/user_deletes_snippet_spec.rb11
-rw-r--r--spec/features/projects/snippets/user_updates_snippet_spec.rb70
-rw-r--r--spec/features/projects/tracings_spec.rb63
-rw-r--r--spec/features/projects/tree/tree_show_spec.rb2
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb61
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb168
-rw-r--r--spec/features/projects/wiki/shortcuts_spec.rb20
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb360
-rw-r--r--spec/features/projects/wiki/user_deletes_wiki_page_spec.rb22
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb263
-rw-r--r--spec/features/projects/wiki/user_views_wiki_empty_spec.rb138
-rw-r--r--spec/features/projects/wikis_spec.rb20
-rw-r--r--spec/features/projects_spec.rb37
-rw-r--r--spec/features/protected_branches_spec.rb16
-rw-r--r--spec/features/reportable_note/snippets_spec.rb1
-rw-r--r--spec/features/runners_spec.rb4
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb1
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb26
-rw-r--r--spec/features/search/user_uses_search_filters_spec.rb38
-rw-r--r--spec/features/sentry_js_spec.rb2
-rw-r--r--spec/features/snippets/internal_snippet_spec.rb11
-rw-r--r--spec/features/snippets/notes_on_personal_snippets_spec.rb1
-rw-r--r--spec/features/snippets/private_snippets_spec.rb7
-rw-r--r--spec/features/snippets/public_snippets_spec.rb19
-rw-r--r--spec/features/snippets/show_spec.rb12
-rw-r--r--spec/features/snippets/spam_snippets_spec.rb25
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb196
-rw-r--r--spec/features/snippets/user_deletes_snippet_spec.rb12
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb128
-rw-r--r--spec/features/snippets_spec.rb12
-rw-r--r--spec/features/static_site_editor_spec.rb67
-rw-r--r--spec/features/tags/developer_deletes_tag_spec.rb27
-rw-r--r--spec/features/tags/developer_views_tags_spec.rb13
-rw-r--r--spec/features/task_lists_spec.rb115
-rw-r--r--spec/features/triggers_spec.rb186
-rw-r--r--spec/features/users/overview_spec.rb70
-rw-r--r--spec/features/users/show_spec.rb52
-rw-r--r--spec/features/users/signup_spec.rb214
-rw-r--r--spec/features/users/terms_spec.rb15
-rw-r--r--spec/finders/alert_management/alerts_finder_spec.rb115
-rw-r--r--spec/finders/ci/pipelines_for_merge_request_finder_spec.rb2
-rw-r--r--spec/finders/environment_names_finder_spec.rb63
-rw-r--r--spec/finders/group_labels_finder_spec.rb42
-rw-r--r--spec/finders/groups_finder_spec.rb56
-rw-r--r--spec/finders/issues_finder_spec.rb10
-rw-r--r--spec/finders/labels_finder_spec.rb36
-rw-r--r--spec/finders/merge_requests/by_approvals_finder_spec.rb82
-rw-r--r--spec/finders/merge_requests_finder_spec.rb87
-rw-r--r--spec/finders/packages/generic/package_finder_spec.rb31
-rw-r--r--spec/finders/projects_finder_spec.rb32
-rw-r--r--spec/finders/releases_finder_spec.rb28
-rw-r--r--spec/fixtures/api/schemas/entities/group_group_link.json13
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_basic.json1
-rw-r--r--spec/fixtures/api/schemas/entities/test_case.json1
-rw-r--r--spec/fixtures/api/schemas/entities/trigger.json39
-rw-r--r--spec/fixtures/api/schemas/environment.json7
-rw-r--r--spec/fixtures/api/schemas/feature_flag.json23
-rw-r--r--spec/fixtures/api/schemas/feature_flag_scope.json18
-rw-r--r--spec/fixtures/api/schemas/feature_flag_strategy.json13
-rw-r--r--spec/fixtures/api/schemas/feature_flags.json13
-rw-r--r--spec/fixtures/api/schemas/feature_flags_client_token.json10
-rw-r--r--spec/fixtures/api/schemas/group_member.json19
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag.json15
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json22
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json17
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag_scopes.json9
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag_strategy.json13
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flags.json9
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/operations/scope.json9
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/operations/strategy.json14
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/package.json3
-rw-r--r--spec/fixtures/api/schemas/registry/repository.json3
-rw-r--r--spec/fixtures/api/schemas/unleash/unleash.json20
-rw-r--r--spec/fixtures/api/schemas/unleash/unleash_feature.json27
-rw-r--r--spec/fixtures/api/schemas/unleash/unleash_strategy.json24
-rw-r--r--spec/fixtures/invalid_manifest.xml4
-rw-r--r--spec/fixtures/lib/backup/design_repo.bundlebin0 -> 490 bytes
-rw-r--r--spec/fixtures/lib/backup/personal_snippet_repo.bundlebin0 -> 686 bytes
-rw-r--r--spec/fixtures/lib/backup/project_repo.bundlebin0 -> 387 bytes
-rw-r--r--spec/fixtures/lib/backup/project_snippet_repo.bundlebin0 -> 696 bytes
-rw-r--r--spec/fixtures/lib/backup/wiki_repo.bundlebin0 -> 365 bytes
-rw-r--r--spec/fixtures/lib/gitlab/import_export/sample_data/tree/project.json1
-rw-r--r--spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/issues.ndjson10
-rw-r--r--spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/labels.ndjson2
-rw-r--r--spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/milestones.ndjson3
-rw-r--r--spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb1
-rw-r--r--spec/fixtures/packages/generic/myfile.tar.gzbin0 -> 1149 bytes
-rw-r--r--spec/frontend/alert_handler_spec.js65
-rw-r--r--spec/frontend/alert_management/components/alert_details_spec.js140
-rw-r--r--spec/frontend/alert_management/components/alert_management_empty_state_spec.js20
-rw-r--r--spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js50
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js370
-rw-r--r--spec/frontend/alert_management/components/alert_status_spec.js151
-rw-r--r--spec/frontend/alert_management/components/alert_summary_row_spec.js40
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js4
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js17
-rw-r--r--spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js6
-rw-r--r--spec/frontend/alert_management/mocks/alerts_provide_config.json13
-rw-r--r--spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap48
-rw-r--r--spec/frontend/alert_settings/alert_settings_form_spec.js51
-rw-r--r--spec/frontend/alert_settings/alerts_integrations_list_spec.js89
-rw-r--r--spec/frontend/analytics/instance_statistics/apollo_mock_data.js30
-rw-r--r--spec/frontend/analytics/instance_statistics/components/__snapshots__/pipelines_chart_spec.js.snap161
-rw-r--r--spec/frontend/analytics/instance_statistics/components/app_spec.js34
-rw-r--r--spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js54
-rw-r--r--spec/frontend/analytics/instance_statistics/components/pipelines_chart_spec.js189
-rw-r--r--spec/frontend/analytics/instance_statistics/components/users_chart_spec.js200
-rw-r--r--spec/frontend/analytics/instance_statistics/mock_data.js42
-rw-r--r--spec/frontend/analytics/instance_statistics/utils_spec.js84
-rw-r--r--spec/frontend/analytics/shared/components/metric_card_spec.js129
-rw-r--r--spec/frontend/api_spec.js80
-rw-r--r--spec/frontend/awards_handler_spec.js24
-rw-r--r--spec/frontend/badges/components/badge_settings_spec.js120
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js16
-rw-r--r--spec/frontend/batch_comments/components/publish_button_spec.js11
-rw-r--r--spec/frontend/batch_comments/components/publish_dropdown_spec.js87
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js90
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js52
-rw-r--r--spec/frontend/behaviors/load_startup_css_spec.js44
-rw-r--r--spec/frontend/behaviors/shortcuts/keybindings_spec.js66
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap2
-rw-r--r--spec/frontend/blob/pipeline_tour_success_modal_spec.js5
-rw-r--r--spec/frontend/blob/sketch/index_spec.js7
-rw-r--r--spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js67
-rw-r--r--spec/frontend/blob/viewer/index_spec.js55
-rw-r--r--spec/frontend/blob_edit/blob_bundle_spec.js15
-rw-r--r--spec/frontend/blob_edit/edit_blob_spec.js33
-rw-r--r--spec/frontend/boards/board_blank_state_spec.js95
-rw-r--r--spec/frontend/boards/board_list_new_spec.js234
-rw-r--r--spec/frontend/boards/board_list_spec.js3
-rw-r--r--spec/frontend/boards/boards_store_spec.js19
-rw-r--r--spec/frontend/boards/components/board_configuration_options_spec.js59
-rw-r--r--spec/frontend/boards/components/board_content_spec.js3
-rw-r--r--spec/frontend/boards/components/sidebar/board_editable_item_spec.js26
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js143
-rw-r--r--spec/frontend/boards/mock_data.js8
-rw-r--r--spec/frontend/boards/stores/actions_spec.js236
-rw-r--r--spec/frontend/boards/stores/getters_spec.js30
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js135
-rw-r--r--spec/frontend/ci_lint/components/ci_lint_results_spec.js114
-rw-r--r--spec/frontend/ci_lint/components/ci_lint_spec.js77
-rw-r--r--spec/frontend/ci_lint/components/ci_lint_warnings_spec.js54
-rw-r--r--spec/frontend/ci_lint/mock_data.js49
-rw-r--r--spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js102
-rw-r--r--spec/frontend/ci_settings_pipeline_triggers/mock_data.js30
-rw-r--r--spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js)32
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js24
-rw-r--r--spec/frontend/ci_variable_list/store/getters_spec.js2
-rw-r--r--spec/frontend/ci_variable_list/store/mutations_spec.js2
-rw-r--r--spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap2
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap90
-rw-r--r--spec/frontend/clusters/components/fluentd_output_settings_spec.js4
-rw-r--r--spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js10
-rw-r--r--spec/frontend/clusters/components/knative_domain_editor_spec.js4
-rw-r--r--spec/frontend/clusters/services/crossplane_provider_stack_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js20
-rw-r--r--spec/frontend/clusters_list/components/node_error_help_text_spec.js33
-rw-r--r--spec/frontend/clusters_list/mock_data.js10
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js2
-rw-r--r--spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap1
-rw-r--r--spec/frontend/collapsed_sidebar_todo_spec.js6
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js4
-rw-r--r--spec/frontend/commit/pipelines/pipelines_spec.js82
-rw-r--r--spec/frontend/confidential_merge_request/components/dropdown_spec.js31
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js8
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js1
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/actions_spec.js32
-rw-r--r--spec/frontend/cycle_analytics/stage_nav_item_spec.js4
-rw-r--r--spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js6
-rw-r--r--spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap18
-rw-r--r--spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap20
-rw-r--r--spec/frontend/design_management/components/__snapshots__/design_scaler_spec.js.snap6
-rw-r--r--spec/frontend/design_management/components/__snapshots__/image_spec.js.snap10
-rw-r--r--spec/frontend/design_management/components/design_note_pin_spec.js17
-rw-r--r--spec/frontend/design_management/components/design_overlay_spec.js6
-rw-r--r--spec/frontend/design_management/components/design_sidebar_spec.js30
-rw-r--r--spec/frontend/design_management/components/design_todo_button_spec.js2
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap2
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap6
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap9
-rw-r--r--spec/frontend/design_management/components/toolbar/design_navigation_spec.js4
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap3
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap68
-rw-r--r--spec/frontend/design_management/mock_data/apollo_mock.js29
-rw-r--r--spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap23
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap47
-rw-r--r--spec/frontend/design_management/pages/index_spec.js94
-rw-r--r--spec/frontend/design_management/router_spec.js2
-rw-r--r--spec/frontend/design_management/utils/design_management_utils_spec.js4
-rw-r--r--spec/frontend/diff_comments_store_spec.js136
-rw-r--r--spec/frontend/diffs/components/app_spec.js4
-rw-r--r--spec/frontend/diffs/components/collapsed_files_warning_spec.js2
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js55
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js10
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js203
-rw-r--r--spec/frontend/diffs/components/diff_table_cell_spec.js279
-rw-r--r--spec/frontend/diffs/components/edit_button_spec.js75
-rw-r--r--spec/frontend/diffs/components/inline_diff_table_row_spec.js33
-rw-r--r--spec/frontend/diffs/components/parallel_diff_table_row_spec.js26
-rw-r--r--spec/frontend/diffs/mock_data/diff_discussions.js3
-rw-r--r--spec/frontend/diffs/mock_data/diff_file.js5
-rw-r--r--spec/frontend/diffs/mock_data/diff_file_unreadable.js5
-rw-r--r--spec/frontend/diffs/store/actions_spec.js8
-rw-r--r--spec/frontend/diffs/store/getters_spec.js90
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js8
-rw-r--r--spec/frontend/editor/editor_lite_spec.js147
-rw-r--r--spec/frontend/emoji/emoji_spec.js157
-rw-r--r--spec/frontend/environment.js12
-rw-r--r--spec/frontend/environments/enable_review_app_modal_spec.js (renamed from spec/frontend/environments/enable_review_app_button_spec.js)20
-rw-r--r--spec/frontend/environments/environment_actions_spec.js15
-rw-r--r--spec/frontend/environments/environments_app_spec.js99
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js17
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js12
-rw-r--r--spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js159
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js183
-rw-r--r--spec/frontend/feature_flags/components/environments_dropdown_spec.js147
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_spec.js371
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_tab_spec.js168
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_table_spec.js266
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js493
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js105
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js136
-rw-r--r--spec/frontend/feature_flags/components/strategies/default_spec.js10
-rw-r--r--spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js116
-rw-r--r--spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js51
-rw-r--r--spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js50
-rw-r--r--spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js78
-rw-r--r--spec/frontend/feature_flags/components/strategies/users_with_id_spec.js38
-rw-r--r--spec/frontend/feature_flags/components/strategy_parameters_spec.js83
-rw-r--r--spec/frontend/feature_flags/components/strategy_spec.js264
-rw-r--r--spec/frontend/feature_flags/components/user_lists_table_spec.js98
-rw-r--r--spec/frontend/feature_flags/mock_data.js155
-rw-r--r--spec/frontend/feature_flags/store/edit/actions_spec.js303
-rw-r--r--spec/frontend/feature_flags/store/edit/mutations_spec.js134
-rw-r--r--spec/frontend/feature_flags/store/helpers_spec.js514
-rw-r--r--spec/frontend/feature_flags/store/index/actions_spec.js563
-rw-r--r--spec/frontend/feature_flags/store/index/mutations_spec.js307
-rw-r--r--spec/frontend/feature_flags/store/new/actions_spec.js192
-rw-r--r--spec/frontend/feature_flags/store/new/mutations_spec.js49
-rw-r--r--spec/frontend/fixtures/blob.rb10
-rw-r--r--spec/frontend/fixtures/releases.rb146
-rw-r--r--spec/frontend/fixtures/snippet.rb1
-rw-r--r--spec/frontend/fixtures/static/issue_sidebar_label.html26
-rw-r--r--spec/frontend/fixtures/static/pipeline_graph.html2
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js117
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap22
-rw-r--r--spec/frontend/group_settings/components/shared_runners_form_spec.js169
-rw-r--r--spec/frontend/groups/components/group_item_spec.js2
-rw-r--r--spec/frontend/groups/components/item_actions_spec.js125
-rw-r--r--spec/frontend/groups/components/item_caret_spec.js58
-rw-r--r--spec/frontend/groups/components/item_stats_spec.js131
-rw-r--r--spec/frontend/groups/components/item_stats_value_spec.js111
-rw-r--r--spec/frontend/groups/components/item_type_icon_spec.js80
-rw-r--r--spec/frontend/groups/members/components/app_spec.js89
-rw-r--r--spec/frontend/groups/members/index_spec.js26
-rw-r--r--spec/frontend/groups/members/utils_spec.js51
-rw-r--r--spec/frontend/helpers/dom_shims/create_object_url.js3
-rw-r--r--spec/frontend/helpers/dom_shims/index.js1
-rw-r--r--spec/frontend/helpers/emoji.js88
-rw-r--r--spec/frontend/helpers/experimentation_helper.js14
-rw-r--r--spec/frontend/helpers/keep_alive_component_helper.js29
-rw-r--r--spec/frontend/helpers/keep_alive_component_helper_spec.js32
-rw-r--r--spec/frontend/helpers/local_storage_helper.js2
-rw-r--r--spec/frontend/helpers/local_storage_helper_spec.js4
-rw-r--r--spec/frontend/helpers/startup_css_helper_spec.js2
-rw-r--r--spec/frontend/helpers/vue_test_utils_helper.js7
-rw-r--r--spec/frontend/helpers/wait_for_text.js3
-rw-r--r--spec/frontend/ide/components/commit_sidebar/actions_spec.js4
-rw-r--r--spec/frontend/ide/components/commit_sidebar/form_spec.js37
-rw-r--r--spec/frontend/ide/components/ide_review_spec.js78
-rw-r--r--spec/frontend/ide/components/ide_side_bar_spec.js81
-rw-r--r--spec/frontend/ide/components/ide_tree_list_spec.js8
-rw-r--r--spec/frontend/ide/components/ide_tree_spec.js41
-rw-r--r--spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap2
-rw-r--r--spec/frontend/ide/components/jobs/detail/scroll_button_spec.js2
-rw-r--r--spec/frontend/ide/components/new_dropdown/upload_spec.js17
-rw-r--r--spec/frontend/ide/components/repo_commit_section_spec.js21
-rw-r--r--spec/frontend/ide/lib/errors_spec.js46
-rw-r--r--spec/frontend/ide/lib/languages/hcl_spec.js290
-rw-r--r--spec/frontend/ide/stores/actions/file_spec.js14
-rw-r--r--spec/frontend/ide/stores/getters_spec.js10
-rw-r--r--spec/frontend/ide/stores/modules/commit/actions_spec.js82
-rw-r--r--spec/frontend/ide/stores/mutations/file_spec.js26
-rw-r--r--spec/frontend/ide/stores/utils_spec.js10
-rw-r--r--spec/frontend/ide/utils_spec.js40
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js302
-rw-r--r--spec/frontend/incidents/mocks/incidents.json6
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap27
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap3
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap55
-rw-r--r--spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js7
-rw-r--r--spec/frontend/integrations/edit/components/confirmation_modal_spec.js51
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js23
-rw-r--r--spec/frontend/integrations/edit/mock_data.js1
-rw-r--r--spec/frontend/invite_member/components/invite_member_modal_spec.js63
-rw-r--r--spec/frontend/invite_member/components/invite_member_trigger_mock_data.js7
-rw-r--r--spec/frontend/invite_member/components/invite_member_trigger_spec.js48
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js115
-rw-r--r--spec/frontend/invite_members/components/invite_members_trigger_spec.js58
-rw-r--r--spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js5
-rw-r--r--spec/frontend/issuable/related_issues/components/issue_token_spec.js257
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_block_spec.js5
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_list_spec.js5
-rw-r--r--spec/frontend/issuable_create/components/issuable_form_spec.js1
-rw-r--r--spec/frontend/issuable_list/mock_data.js12
-rw-r--r--spec/frontend/issuable_show/components/issuable_body_spec.js140
-rw-r--r--spec/frontend/issuable_show/components/issuable_description_spec.js41
-rw-r--r--spec/frontend/issuable_show/components/issuable_edit_form_spec.js122
-rw-r--r--spec/frontend/issuable_show/components/issuable_header_spec.js132
-rw-r--r--spec/frontend/issuable_show/components/issuable_show_root_spec.js123
-rw-r--r--spec/frontend/issuable_show/components/issuable_title_spec.js100
-rw-r--r--spec/frontend/issuable_show/mock_data.js34
-rw-r--r--spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js199
-rw-r--r--spec/frontend/issue_show/components/incidents/highlight_bar_spec.js74
-rw-r--r--spec/frontend/issue_show/components/incidents/incident_tabs_spec.js17
-rw-r--r--spec/frontend/issue_show/issue_spec.js8
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap8
-rw-r--r--spec/frontend/jobs/components/job_container_item_spec.js2
-rw-r--r--spec/frontend/jobs/components/log/line_spec.js65
-rw-r--r--spec/frontend/jobs/store/utils_spec.js8
-rw-r--r--spec/frontend/labels_issue_sidebar_spec.js98
-rw-r--r--spec/frontend/lib/dompurify_spec.js98
-rw-r--r--spec/frontend/lib/utils/axios_startup_calls_spec.js49
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js65
-rw-r--r--spec/frontend/lib/utils/experimentation_spec.js20
-rw-r--r--spec/frontend/lib/utils/number_utility_spec.js11
-rw-r--r--spec/frontend/lib/utils/text_markdown_spec.js99
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js71
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js16
-rw-r--r--spec/frontend/logs/components/log_simple_filters_spec.js8
-rw-r--r--spec/frontend/merge_request_spec.js63
-rw-r--r--spec/frontend/milestones/stores/actions_spec.js140
-rw-r--r--spec/frontend/milestones/stores/getter_spec.js15
-rw-r--r--spec/frontend/milestones/stores/mutations_spec.js159
-rw-r--r--spec/frontend/mini_pipeline_graph_dropdown_spec.js2
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap1
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap181
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_spec.js4
-rw-r--r--spec/frontend/monitoring/components/group_empty_state_spec.js33
-rw-r--r--spec/frontend/monitoring/router_spec.js3
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js28
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js38
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js17
-rw-r--r--spec/frontend/notes/components/sort_discussion_spec.js22
-rw-r--r--spec/frontend/notes/components/timeline_toggle_spec.js117
-rw-r--r--spec/frontend/notes/stores/actions_spec.js9
-rw-r--r--spec/frontend/notes/stores/getters_spec.js13
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js3
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap214
-rw-r--r--spec/frontend/packages/details/components/composer_installation_spec.js56
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js34
-rw-r--r--spec/frontend/packages/details/utils_spec.js24
-rw-r--r--spec/frontend/packages/list/coming_soon/helpers_spec.js36
-rw-r--r--spec/frontend/packages/list/coming_soon/mock_data.js90
-rw-r--r--spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js138
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap756
-rw-r--r--spec/frontend/packages/list/components/packages_list_app_spec.js1
-rw-r--r--spec/frontend/packages/list/components/packages_title_spec.js71
-rw-r--r--spec/frontend/packages/list/stores/mutations_spec.js1
-rw-r--r--spec/frontend/packages/mock_data.js4
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap26
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap3
-rw-r--r--spec/frontend/packages/shared/components/package_list_row_spec.js8
-rw-r--r--spec/frontend/packages/shared/components/package_path_spec.js86
-rw-r--r--spec/frontend/packages/shared/utils_spec.js2
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap3
-rw-r--r--spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap91
-rw-r--r--spec/frontend/pages/projects/graphs/code_coverage_spec.js18
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js21
-rw-r--r--spec/frontend/performance_bar/index_spec.js1
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js165
-rw-r--r--spec/frontend/pipeline_new/mock_data.js6
-rw-r--r--spec/frontend/pipelines/components/dag/dag_graph_spec.js2
-rw-r--r--spec/frontend/pipelines/components/dag/dag_spec.js9
-rw-r--r--spec/frontend/pipelines/components/dag/drawing_utils_spec.js2
-rw-r--r--spec/frontend/pipelines/components/dag/parsing_utils_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js102
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js5
-rw-r--r--spec/frontend/pipelines/graph/job_name_component_spec.js7
-rw-r--r--spec/frontend/pipelines/header_component_spec.js177
-rw-r--r--spec/frontend/pipelines/legacy_header_component_spec.js116
-rw-r--r--spec/frontend/pipelines/mock_data.js78
-rw-r--r--spec/frontend/pipelines/pipeline_graph/mock_data.js21
-rw-r--r--spec/frontend/pipelines/pipeline_graph/utils_spec.js307
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js383
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js2
-rw-r--r--spec/frontend/pipelines/test_reports/mock_data.js19
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js30
-rw-r--r--spec/frontend/project_find_file_spec.js5
-rw-r--r--spec/frontend/projects/commit_box/info/load_branches_spec.js68
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap1
-rw-r--r--spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap3
-rw-r--r--spec/frontend/projects/settings/access_dropdown_spec.js21
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js4
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js6
-rw-r--r--spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js71
-rw-r--r--spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js23
-rw-r--r--spec/frontend/registry/explorer/components/list_page/registry_header_spec.js90
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js65
-rw-r--r--spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap7
-rw-r--r--spec/frontend/registry/settings/components/registry_settings_app_spec.js107
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js302
-rw-r--r--spec/frontend/registry/settings/graphql/cache_updated_spec.js56
-rw-r--r--spec/frontend/registry/settings/mock_data.js40
-rw-r--r--spec/frontend/registry/settings/store/actions_spec.js90
-rw-r--r--spec/frontend/registry/settings/store/getters_spec.js72
-rw-r--r--spec/frontend/registry/settings/store/mutations_spec.js80
-rw-r--r--spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap101
-rw-r--r--spec/frontend/registry/shared/components/expiration_policy_fields_spec.js20
-rw-r--r--spec/frontend/registry/shared/stubs.js11
-rw-r--r--spec/frontend/registry/shared/utils_spec.js37
-rw-r--r--spec/frontend/related_merge_requests/components/related_merge_requests_spec.js5
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap230
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js59
-rw-r--r--spec/frontend/releases/components/app_index_spec.js63
-rw-r--r--spec/frontend/releases/components/app_show_spec.js8
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js23
-rw-r--r--spec/frontend/releases/components/evidence_block_spec.js8
-rw-r--r--spec/frontend/releases/components/release_block_assets_spec.js13
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js6
-rw-r--r--spec/frontend/releases/components/release_block_metadata_spec.js67
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js26
-rw-r--r--spec/frontend/releases/components/release_block_spec.js111
-rw-r--r--spec/frontend/releases/components/release_skeleton_loader_spec.js15
-rw-r--r--spec/frontend/releases/components/releases_pagination_graphql_spec.js10
-rw-r--r--spec/frontend/releases/components/releases_pagination_rest_spec.js8
-rw-r--r--spec/frontend/releases/mock_data.js335
-rw-r--r--spec/frontend/releases/stores/getters_spec.js22
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js50
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js25
-rw-r--r--spec/frontend/releases/stores/modules/list/actions_spec.js336
-rw-r--r--spec/frontend/releases/stores/modules/list/mutations_spec.js44
-rw-r--r--spec/frontend/releases/util_spec.js68
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap58
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js6
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js2
-rw-r--r--spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap5
-rw-r--r--spec/frontend/repository/log_tree_spec.js8
-rw-r--r--spec/frontend/repository/utils/icon_spec.js23
-rw-r--r--spec/frontend/right_sidebar_spec.js13
-rw-r--r--spec/frontend/search/components/state_filter_spec.js104
-rw-r--r--spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js196
-rw-r--r--spec/frontend/search/dropdown_filter/mock_data.js5
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap12
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js4
-rw-r--r--spec/frontend/sentry/sentry_config_spec.js2
-rw-r--r--spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap2
-rw-r--r--spec/frontend/serverless/components/missing_prometheus_spec.js4
-rw-r--r--spec/frontend/serverless/components/url_spec.js2
-rw-r--r--spec/frontend/sidebar/assignee_title_spec.js17
-rw-r--r--spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js3
-rw-r--r--spec/frontend/sidebar/confidential/edit_form_buttons_spec.js15
-rw-r--r--spec/frontend/sidebar/lock/edit_form_buttons_spec.js14
-rw-r--r--spec/frontend/sidebar/lock/issuable_lock_form_spec.js13
-rw-r--r--spec/frontend/sidebar/reviewer_title_spec.js116
-rw-r--r--spec/frontend/sidebar/reviewers_spec.js169
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js1
-rw-r--r--spec/frontend/sidebar/sidebar_labels_spec.js35
-rw-r--r--spec/frontend/sidebar/sidebar_store_spec.js37
-rw-r--r--spec/frontend/snippet/snippet_bundle_spec.js87
-rw-r--r--spec/frontend/snippet/snippet_edit_spec.js44
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap5
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/edit_spec.js8
-rw-r--r--spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js25
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js18
-rw-r--r--spec/frontend/snippets/components/snippet_blob_view_spec.js6
-rw-r--r--spec/frontend/snippets_spec.js70
-rw-r--r--spec/frontend/static_site_editor/components/edit_meta_controls_spec.js99
-rw-r--r--spec/frontend/static_site_editor/components/edit_meta_modal_spec.js80
-rw-r--r--spec/frontend/static_site_editor/components/front_matter_controls_spec.js11
-rw-r--r--spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js27
-rw-r--r--spec/frontend/static_site_editor/mock_data.js11
-rw-r--r--spec/frontend/static_site_editor/pages/home_spec.js99
-rw-r--r--spec/frontend/static_site_editor/pages/success_spec.js110
-rw-r--r--spec/frontend/static_site_editor/services/front_matterify_spec.js47
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js53
-rw-r--r--spec/frontend/static_site_editor/services/templater_spec.js8
-rw-r--r--spec/frontend/test_setup.js3
-rw-r--r--spec/frontend/tracking_spec.js2
-rw-r--r--spec/frontend/user_lists/components/add_user_modal_spec.js50
-rw-r--r--spec/frontend/user_lists/components/edit_user_list_spec.js150
-rw-r--r--spec/frontend/user_lists/components/new_user_list_spec.js93
-rw-r--r--spec/frontend/user_lists/components/user_list_form_spec.js40
-rw-r--r--spec/frontend/user_lists/components/user_list_spec.js196
-rw-r--r--spec/frontend/user_lists/store/edit/actions_spec.js121
-rw-r--r--spec/frontend/user_lists/store/edit/mutations_spec.js61
-rw-r--r--spec/frontend/user_lists/store/new/actions_spec.js69
-rw-r--r--spec/frontend/user_lists/store/new/mutations_spec.js38
-rw-r--r--spec/frontend/user_lists/store/show/actions_spec.js117
-rw-r--r--spec/frontend/user_lists/store/show/mutations_spec.js86
-rw-r--r--spec/frontend/user_lists/store/utils_spec.js23
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js22
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js14
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js15
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js5
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js5
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_spec.js5
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js1
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js102
-rw-r--r--spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap8
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap14
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap8
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap37
-rw-r--r--spec/frontend/vue_shared/components/actions_button_spec.js28
-rw-r--r--spec/frontend/vue_shared/components/alert_detail_table_spec.js74
-rw-r--r--spec/frontend/vue_shared/components/alert_details_table_spec.js139
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/confirm_modal_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/deprecated_modal_2_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/editor_lite_spec.js144
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js448
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mock_data.js50
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mutations_spec.js116
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js11
-rw-r--r--spec/frontend/vue_shared/components/local_storage_sync_spec.js150
-rw-r--r--spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap1
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js213
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/access_request_action_buttons_spec.js108
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/approve_access_request_button_spec.js74
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/invite_action_buttons_spec.js85
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/leave_button_spec.js59
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/remove_group_link_button_spec.js64
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/remove_member_button_spec.js66
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/resend_invite_button_spec.js66
-rw-r--r--spec/frontend/vue_shared/components/members/action_buttons/user_action_buttons_spec.js89
-rw-r--r--spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js46
-rw-r--r--spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js38
-rw-r--r--spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js115
-rw-r--r--spec/frontend/vue_shared/components/members/mock_data.js70
-rw-r--r--spec/frontend/vue_shared/components/members/modals/leave_modal_spec.js91
-rw-r--r--spec/frontend/vue_shared/components/members/modals/remove_group_link_modal_spec.js106
-rw-r--r--spec/frontend/vue_shared/components/members/table/created_at_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/members/table/expires_at_spec.js86
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_action_buttons_spec.js43
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_avatar_spec.js39
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_source_spec.js71
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js251
-rw-r--r--spec/frontend/vue_shared/components/members/table/members_table_spec.js141
-rw-r--r--spec/frontend/vue_shared/components/members/table/role_dropdown_spec.js150
-rw-r--r--spec/frontend/vue_shared/components/members/utils_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items.json15
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items_filters.json14
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js350
-rw-r--r--spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap7
-rw-r--r--spec/frontend/vue_shared/components/registry/list_item_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/registry/title_area_spec.js39
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js56
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js27
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js7
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js15
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/sidebar/toggle_sidebar_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/todo_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js30
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js71
-rw-r--r--spec/frontend/vue_shared/directives/tooltip_spec.js169
-rw-r--r--spec/frontend/vue_shared/droplab_dropdown_button_spec.js132
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js118
-rw-r--r--spec/frontend/vuex_shared/modules/members/actions_spec.js110
-rw-r--r--spec/frontend/vuex_shared/modules/members/mutations_spec.js90
-rw-r--r--spec/frontend/vuex_shared/modules/members/utils_spec.js14
-rw-r--r--spec/frontend/whats_new/components/app_spec.js42
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js33
-rw-r--r--spec/frontend/whats_new/store/mutations_spec.js7
-rw-r--r--spec/frontend/wikis_spec.js2
-rw-r--r--spec/frontend_integration/.eslintrc.yml2
-rw-r--r--spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap1
-rw-r--r--spec/frontend_integration/ide/ide_helper.js102
-rw-r--r--spec/frontend_integration/ide/ide_integration_spec.js47
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_mock_server.js9
-rw-r--r--spec/graphql/features/feature_flag_spec.rb4
-rw-r--r--spec/graphql/mutations/design_management/move_spec.rb2
-rw-r--r--spec/graphql/mutations/discussions/toggle_resolve_spec.rb4
-rw-r--r--spec/graphql/mutations/issues/create_spec.rb146
-rw-r--r--spec/graphql/mutations/issues/move_spec.rb41
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb17
-rw-r--r--spec/graphql/mutations/todos/mark_done_spec.rb3
-rw-r--r--spec/graphql/mutations/todos/restore_many_spec.rb33
-rw-r--r--spec/graphql/mutations/todos/restore_spec.rb11
-rw-r--r--spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb35
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/board_resolver_spec.rb72
-rw-r--r--spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb19
-rw-r--r--spec/graphql/resolvers/concerns/looks_ahead_spec.rb14
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb23
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb33
-rw-r--r--spec/graphql/resolvers/project_milestones_resolver_spec.rb61
-rw-r--r--spec/graphql/resolvers/projects_resolver_spec.rb65
-rw-r--r--spec/graphql/resolvers/snippets/blobs_resolver_spec.rb50
-rw-r--r--spec/graphql/resolvers/terraform/states_resolver_spec.rb33
-rw-r--r--spec/graphql/types/alert_management/alert_type_spec.rb1
-rw-r--r--spec/graphql/types/alert_management/status_enum_spec.rb8
-rw-r--r--spec/graphql/types/base_field_spec.rb4
-rw-r--r--spec/graphql/types/ci/detailed_status_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/group_type_spec.rb1
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/runner_architecture_type_spec.rb16
-rw-r--r--spec/graphql/types/ci/runner_platform_type_spec.rb17
-rw-r--r--spec/graphql/types/ci/stage_type_spec.rb1
-rw-r--r--spec/graphql/types/ci/status_action_type_spec.rb19
-rw-r--r--spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb11
-rw-r--r--spec/graphql/types/design_management/design_collection_type_spec.rb2
-rw-r--r--spec/graphql/types/environment_type_spec.rb15
-rw-r--r--spec/graphql/types/global_id_type_spec.rb26
-rw-r--r--spec/graphql/types/group_type_spec.rb1
-rw-r--r--spec/graphql/types/issue_sort_enum_spec.rb2
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb39
-rw-r--r--spec/graphql/types/package_type_enum_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb9
-rw-r--r--spec/graphql/types/query_type_spec.rb11
-rw-r--r--spec/graphql/types/range_input_type_spec.rb43
-rw-r--r--spec/graphql/types/root_storage_statistics_type_spec.rb3
-rw-r--r--spec/graphql/types/snippet_type_spec.rb54
-rw-r--r--spec/graphql/types/terraform/state_type_spec.rb21
-rw-r--r--spec/graphql/types/timeframe_type_spec.rb38
-rw-r--r--spec/haml_lint/linter/documentation_links_spec.rb98
-rw-r--r--spec/helpers/analytics/unique_visits_helper_spec.rb9
-rw-r--r--spec/helpers/application_helper_spec.rb26
-rw-r--r--spec/helpers/application_settings_helper_spec.rb20
-rw-r--r--spec/helpers/blob_helper_spec.rb48
-rw-r--r--spec/helpers/boards_helper_spec.rb59
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb21
-rw-r--r--spec/helpers/clusters_helper_spec.rb44
-rw-r--r--spec/helpers/container_expiration_policies_helper_spec.rb25
-rw-r--r--spec/helpers/emails_helper_spec.rb112
-rw-r--r--spec/helpers/external_link_helper_spec.rb11
-rw-r--r--spec/helpers/feature_flags_helper_spec.rb21
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb4
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb58
-rw-r--r--spec/helpers/icons_helper_spec.rb120
-rw-r--r--spec/helpers/invite_members_helper_spec.rb77
-rw-r--r--spec/helpers/issuables_helper_spec.rb80
-rw-r--r--spec/helpers/issues_helper_spec.rb21
-rw-r--r--spec/helpers/labels_helper_spec.rb50
-rw-r--r--spec/helpers/notes_helper_spec.rb59
-rw-r--r--spec/helpers/operations_helper_spec.rb4
-rw-r--r--spec/helpers/packages_helper_spec.rb35
-rw-r--r--spec/helpers/projects/alert_management_helper_spec.rb4
-rw-r--r--spec/helpers/projects/incidents_helper_spec.rb16
-rw-r--r--spec/helpers/projects_helper_spec.rb114
-rw-r--r--spec/helpers/releases_helper_spec.rb12
-rw-r--r--spec/helpers/search_helper_spec.rb116
-rw-r--r--spec/helpers/snippets_helper_spec.rb50
-rw-r--r--spec/helpers/startupjs_helper_spec.rb20
-rw-r--r--spec/helpers/tree_helper_spec.rb110
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb22
-rw-r--r--spec/helpers/users_helper_spec.rb24
-rw-r--r--spec/helpers/visibility_level_helper_spec.rb24
-rw-r--r--spec/helpers/whats_new_helper_spec.rb47
-rw-r--r--spec/helpers/wiki_helper_spec.rb13
-rw-r--r--spec/initializers/sidekiq_spec.rb45
-rw-r--r--spec/lib/api/entities/snippet_spec.rb26
-rw-r--r--spec/lib/api/github/entities_spec.rb31
-rw-r--r--spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb2
-rw-r--r--spec/lib/api/helpers_spec.rb19
-rw-r--r--spec/lib/backup/files_spec.rb2
-rw-r--r--spec/lib/backup/repositories_spec.rb308
-rw-r--r--spec/lib/backup/repository_spec.rb232
-rw-r--r--spec/lib/banzai/filter/design_reference_filter_spec.rb20
-rw-r--r--spec/lib/banzai/filter/external_issue_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/milestone_reference_filter_spec.rb44
-rw-r--r--spec/lib/banzai/reference_redactor_spec.rb7
-rw-r--r--spec/lib/feature/definition_spec.rb11
-rw-r--r--spec/lib/feature_spec.rb4
-rw-r--r--spec/lib/forever_spec.rb2
-rw-r--r--spec/lib/gitlab/alert_management/alert_params_spec.rb101
-rw-r--r--spec/lib/gitlab/alert_management/alert_status_counts_spec.rb4
-rw-r--r--spec/lib/gitlab/alert_management/payload/base_spec.rb103
-rw-r--r--spec/lib/gitlab/alert_management/payload/generic_spec.rb32
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb299
-rw-r--r--spec/lib/gitlab/alerting/notification_payload_parser_spec.rb204
-rw-r--r--spec/lib/gitlab/analytics/unique_visits_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb26
-rw-r--r--spec/lib/gitlab/auth/current_user_mode_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/otp/strategies/devise_spec.rb16
-rw-r--r--spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb55
-rw-r--r--spec/lib/gitlab/auth/unique_ips_limiter_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/user_access_denied_reason_spec.rb8
-rw-r--r--spec/lib/gitlab/auth_spec.rb6
-rw-r--r--spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb61
-rw-r--r--spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb65
-rw-r--r--spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb17
-rw-r--r--spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb36
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb8
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/bulk_import/client_spec.rb95
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/ansi2json/line_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/ansi2json_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/artifact_file_reader_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb62
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb108
-rw-r--r--spec/lib/gitlab/ci/config/entry/include_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb147
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/variables_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/entry/variables_spec.rb127
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb43
-rw-r--r--spec/lib/gitlab/ci/parsers/test/junit_spec.rb110
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/reports/test_case_spec.rb55
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/runner/backoff_spec.rb126
-rw-r--r--spec/lib/gitlab/ci/status/bridge/common_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/status/bridge/factory_spec.rb61
-rw-r--r--spec/lib/gitlab/ci/status/canceled_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/created_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/failed_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/pending_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/preparing_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/running_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/scheduled_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/skipped_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/success_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/trace/checksum_spec.rb121
-rw-r--r--spec/lib/gitlab/ci/trace/metrics_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb24
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/result_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb80
-rw-r--r--spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb12
-rw-r--r--spec/lib/gitlab/closing_issue_extractor_spec.rb19
-rw-r--r--spec/lib/gitlab/code_navigation_path_spec.rb14
-rw-r--r--spec/lib/gitlab/config/entry/composable_array_spec.rb69
-rw-r--r--spec/lib/gitlab/config/entry/composable_hash_spec.rb108
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb2
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb273
-rw-r--r--spec/lib/gitlab/danger/commit_linter_spec.rb14
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb26
-rw-r--r--spec/lib/gitlab/danger/roulette_spec.rb101
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb5
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb4
-rw-r--r--spec/lib/gitlab/database/background_migration_job_spec.rb2
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb131
-rw-r--r--spec/lib/gitlab/database/bulk_update_spec.rb139
-rw-r--r--spec/lib/gitlab/database/concurrent_reindex_spec.rb207
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb343
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb17
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb116
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb255
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb68
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_action_spec.rb86
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb32
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb11
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb64
-rw-r--r--spec/lib/gitlab/database_spec.rb101
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb34
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb23
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb15
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb177
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb6
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb46
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb12
-rw-r--r--spec/lib/gitlab/git/object_pool_spec.rb4
-rw-r--r--spec/lib/gitlab/git/remote_mirror_spec.rb4
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb37
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb4
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb5
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb25
-rw-r--r--spec/lib/gitlab/git_access_spec.rb23
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb18
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb3
-rw-r--r--spec/lib/gitlab/gitpod_spec.rb31
-rw-r--r--spec/lib/gitlab/gl_repository/identifier_spec.rb12
-rw-r--r--spec/lib/gitlab/gl_repository/repo_type_spec.rb8
-rw-r--r--spec/lib/gitlab/gl_repository_spec.rb2
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb4
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb49
-rw-r--r--spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb33
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb59
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb26
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb23
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb7
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb2
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb7
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml13
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/group/relation_factory_spec.rb95
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/lfs_saver_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb73
-rw-r--r--spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb53
-rw-r--r--spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb87
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb35
-rw-r--r--spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/repo_restorer_spec.rb21
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml3
-rw-r--r--spec/lib/gitlab/issuables_count_for_state_spec.rb21
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb17
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb32
-rw-r--r--spec/lib/gitlab/lfs/client_spec.rb132
-rw-r--r--spec/lib/gitlab/lfs_token_spec.rb2
-rw-r--r--spec/lib/gitlab/manifest_import/manifest_spec.rb14
-rw-r--r--spec/lib/gitlab/manifest_import/metadata_spec.rb62
-rw-r--r--spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb82
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb121
-rw-r--r--spec/lib/gitlab/middleware/go_spec.rb16
-rw-r--r--spec/lib/gitlab/middleware/handle_null_bytes_spec.rb88
-rw-r--r--spec/lib/gitlab/middleware/rails_queue_duration_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/same_site_cookies_spec.rb4
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb30
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb6
-rw-r--r--spec/lib/gitlab/project_template_spec.rb6
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb4
-rw-r--r--spec/lib/gitlab/redis/hll_spec.rb30
-rw-r--r--spec/lib/gitlab/regex_spec.rb221
-rw-r--r--spec/lib/gitlab/relative_positioning/mover_spec.rb17
-rw-r--r--spec/lib/gitlab/repo_path_spec.rb6
-rw-r--r--spec/lib/gitlab/repository_size_checker_spec.rb59
-rw-r--r--spec/lib/gitlab/repository_size_error_message_spec.rb6
-rw-r--r--spec/lib/gitlab/sample_data_template_spec.rb66
-rw-r--r--spec/lib/gitlab/search/recent_issues_spec.rb6
-rw-r--r--spec/lib/gitlab/search/recent_merge_requests_spec.rb6
-rw-r--r--spec/lib/gitlab/search_results_spec.rb58
-rw-r--r--spec/lib/gitlab/sidekiq_cluster_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb7
-rw-r--r--spec/lib/gitlab/snippet_search_results_spec.rb6
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb37
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb245
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb38
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb101
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb53
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb50
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config_spec.rb82
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb22
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb42
-rw-r--r--spec/lib/gitlab/themes_spec.rb14
-rw-r--r--spec/lib/gitlab/tracking_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb28
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb210
-rw-r--r--spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb10
-rw-r--r--spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb119
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb17
-rw-r--r--spec/lib/gitlab/visibility_level_checker_spec.rb37
-rw-r--r--spec/lib/gitlab/webpack/manifest_spec.rb113
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb34
-rw-r--r--spec/lib/gitlab_danger_spec.rb2
-rw-r--r--spec/lib/google_api/auth_spec.rb8
-rw-r--r--spec/lib/grafana/time_window_spec.rb6
-rw-r--r--spec/lib/marginalia_spec.rb25
-rw-r--r--spec/lib/pager_duty/webhook_payload_parser_spec.rb84
-rw-r--r--spec/lib/safe_zip/extract_spec.rb36
-rw-r--r--spec/mailers/abuse_report_mailer_spec.rb10
-rw-r--r--spec/mailers/emails/merge_requests_spec.rb33
-rw-r--r--spec/mailers/emails/projects_spec.rb119
-rw-r--r--spec/mailers/notify_spec.rb118
-rw-r--r--spec/migrations/20200929052138_create_initial_versions_for_pre_versioning_terraform_states_spec.rb46
-rw-r--r--spec/migrations/20201014205300_drop_backfill_jira_tracker_deployment_type_jobs_spec.rb58
-rw-r--r--spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb22
-rw-r--r--spec/migrations/backfill_status_page_published_incidents_spec.rb2
-rw-r--r--spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb101
-rw-r--r--spec/migrations/ensure_filled_file_store_on_package_files_spec.rb40
-rw-r--r--spec/migrations/migrate_compliance_framework_enum_to_database_framework_record_spec.rb70
-rw-r--r--spec/migrations/schedule_blocked_by_links_replacement_spec.rb37
-rw-r--r--spec/migrations/schedule_migrate_u2f_webauthn_spec.rb58
-rw-r--r--spec/migrations/set_job_waiter_ttl_spec.rb30
-rw-r--r--spec/models/alert_management/alert_spec.rb171
-rw-r--r--spec/models/alert_management/http_integration_spec.rb92
-rw-r--r--spec/models/analytics/instance_statistics/measurement_spec.rb30
-rw-r--r--spec/models/application_record_spec.rb8
-rw-r--r--spec/models/application_setting/term_spec.rb5
-rw-r--r--spec/models/application_setting_spec.rb48
-rw-r--r--spec/models/audit_event_spec.rb7
-rw-r--r--spec/models/authentication_event_spec.rb36
-rw-r--r--spec/models/blob_viewer/markup_spec.rb38
-rw-r--r--spec/models/bulk_import_spec.rb18
-rw-r--r--spec/models/bulk_imports/configuration_spec.rb17
-rw-r--r--spec/models/bulk_imports/entity_spec.rb85
-rw-r--r--spec/models/ci/bridge_spec.rb91
-rw-r--r--spec/models/ci/build_pending_state_spec.rb27
-rw-r--r--spec/models/ci/build_spec.rb162
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb138
-rw-r--r--spec/models/ci/deleted_object_spec.rb95
-rw-r--r--spec/models/ci/freeze_period_status_spec.rb4
-rw-r--r--spec/models/ci/job_artifact_spec.rb4
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb4
-rw-r--r--spec/models/ci/pipeline_spec.rb63
-rw-r--r--spec/models/ci_platform_metric_spec.rb6
-rw-r--r--spec/models/clusters/agent_spec.rb14
-rw-r--r--spec/models/clusters/applications/fluentd_spec.rb2
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb2
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb4
-rw-r--r--spec/models/clusters/applications/runner_spec.rb4
-rw-r--r--spec/models/clusters/cluster_spec.rb1
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb5
-rw-r--r--spec/models/commit_status_spec.rb133
-rw-r--r--spec/models/concerns/avatarable_spec.rb4
-rw-r--r--spec/models/concerns/bulk_insertable_associations_spec.rb6
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb6
-rw-r--r--spec/models/concerns/case_sensitivity_spec.rb4
-rw-r--r--spec/models/concerns/checksummable_spec.rb16
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb30
-rw-r--r--spec/models/concerns/each_batch_spec.rb6
-rw-r--r--spec/models/concerns/featurable_spec.rb2
-rw-r--r--spec/models/concerns/has_user_type_spec.rb8
-rw-r--r--spec/models/concerns/issuable_spec.rb76
-rw-r--r--spec/models/concerns/mentionable_spec.rb6
-rw-r--r--spec/models/concerns/milestoneable_spec.rb2
-rw-r--r--spec/models/concerns/milestoneish_spec.rb8
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb11
-rw-r--r--spec/models/concerns/resolvable_discussion_spec.rb6
-rw-r--r--spec/models/concerns/routable_spec.rb2
-rw-r--r--spec/models/concerns/schedulable_spec.rb2
-rw-r--r--spec/models/concerns/subscribable_spec.rb22
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb2
-rw-r--r--spec/models/container_expiration_policy_spec.rb12
-rw-r--r--spec/models/container_repository_spec.rb27
-rw-r--r--spec/models/deploy_token_spec.rb25
-rw-r--r--spec/models/deployment_spec.rb65
-rw-r--r--spec/models/design_management/design_at_version_spec.rb23
-rw-r--r--spec/models/design_management/design_collection_spec.rb12
-rw-r--r--spec/models/design_management/design_spec.rb9
-rw-r--r--spec/models/environment_spec.rb13
-rw-r--r--spec/models/environment_status_spec.rb12
-rw-r--r--spec/models/event_spec.rb50
-rw-r--r--spec/models/group_import_state_spec.rb1
-rw-r--r--spec/models/group_spec.rb371
-rw-r--r--spec/models/import_failure_spec.rb2
-rw-r--r--spec/models/integration_spec.rb18
-rw-r--r--spec/models/issue/metrics_spec.rb14
-rw-r--r--spec/models/issue_email_participant_spec.rb19
-rw-r--r--spec/models/issue_spec.rb158
-rw-r--r--spec/models/iteration_spec.rb45
-rw-r--r--spec/models/member_spec.rb76
-rw-r--r--spec/models/merge_request_diff_spec.rb53
-rw-r--r--spec/models/merge_request_spec.rb140
-rw-r--r--spec/models/milestone_release_spec.rb2
-rw-r--r--spec/models/namespace_setting_spec.rb68
-rw-r--r--spec/models/namespace_spec.rb184
-rw-r--r--spec/models/notification_setting_spec.rb1
-rw-r--r--spec/models/operations/feature_flag_spec.rb18
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb176
-rw-r--r--spec/models/packages/package_spec.rb29
-rw-r--r--spec/models/pages_deployment_spec.rb17
-rw-r--r--spec/models/plan_limits_spec.rb1
-rw-r--r--spec/models/preloaders/merge_request_diff_preloader_spec.rb29
-rw-r--r--spec/models/project_feature_usage_spec.rb2
-rw-r--r--spec/models/project_repository_spec.rb5
-rw-r--r--spec/models/project_repository_storage_move_spec.rb12
-rw-r--r--spec/models/project_services/chat_message/deployment_message_spec.rb11
-rw-r--r--spec/models/project_services/chat_message/issue_message_spec.rb4
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb12
-rw-r--r--spec/models/project_spec.rb101
-rw-r--r--spec/models/project_statistics_spec.rb52
-rw-r--r--spec/models/project_tracing_setting_spec.rb40
-rw-r--r--spec/models/project_wiki_spec.rb1
-rw-r--r--spec/models/repository_spec.rb21
-rw-r--r--spec/models/resource_label_event_spec.rb36
-rw-r--r--spec/models/resource_milestone_event_spec.rb1
-rw-r--r--spec/models/resource_state_event_spec.rb16
-rw-r--r--spec/models/resource_weight_event_spec.rb76
-rw-r--r--spec/models/service_spec.rb144
-rw-r--r--spec/models/snippet_input_action_spec.rb2
-rw-r--r--spec/models/snippet_repository_spec.rb39
-rw-r--r--spec/models/snippet_spec.rb17
-rw-r--r--spec/models/snippet_statistics_spec.rb19
-rw-r--r--spec/models/terraform/state_spec.rb56
-rw-r--r--spec/models/terraform/state_version_spec.rb2
-rw-r--r--spec/models/todo_spec.rb46
-rw-r--r--spec/models/user_spec.rb102
-rw-r--r--spec/models/wiki_directory_spec.rb78
-rw-r--r--spec/models/wiki_page_spec.rb293
-rw-r--r--spec/models/wiki_spec.rb14
-rw-r--r--spec/policies/ci/bridge_policy_spec.rb39
-rw-r--r--spec/policies/design_management/design_policy_spec.rb36
-rw-r--r--spec/policies/global_policy_spec.rb76
-rw-r--r--spec/policies/group_policy_spec.rb70
-rw-r--r--spec/policies/project_policy_spec.rb2
-rw-r--r--spec/policies/terraform/state_policy_spec.rb33
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb17
-rw-r--r--spec/presenters/event_presenter_spec.rb30
-rw-r--r--spec/presenters/label_presenter_spec.rb14
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb21
-rw-r--r--spec/presenters/packages/detail/package_presenter_spec.rb2
-rw-r--r--spec/presenters/project_presenter_spec.rb51
-rw-r--r--spec/presenters/projects/prometheus/alert_presenter_spec.rb346
-rw-r--r--spec/presenters/release_presenter_spec.rb8
-rw-r--r--spec/presenters/sentry_error_presenter_spec.rb8
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb122
-rw-r--r--spec/presenters/snippet_presenter_spec.rb21
-rw-r--r--spec/requests/api/admin/instance_clusters_spec.rb18
-rw-r--r--spec/requests/api/api_guard/response_coercer_middleware_spec.rb55
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb30
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb45
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb3
-rw-r--r--spec/requests/api/commits_spec.rb58
-rw-r--r--spec/requests/api/composer_packages_spec.rb28
-rw-r--r--spec/requests/api/debian_group_packages_spec.rb39
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb46
-rw-r--r--spec/requests/api/doorkeeper_access_spec.rb8
-rw-r--r--spec/requests/api/feature_flag_scopes_spec.rb319
-rw-r--r--spec/requests/api/feature_flags_spec.rb1130
-rw-r--r--spec/requests/api/feature_flags_user_lists_spec.rb371
-rw-r--r--spec/requests/api/features_spec.rb2
-rw-r--r--spec/requests/api/files_spec.rb6
-rw-r--r--spec/requests/api/generic_packages_spec.rb419
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb15
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb4
-rw-r--r--spec/requests/api/graphql/group/merge_requests_spec.rb122
-rw-r--r--spec/requests/api/graphql/instance_statistics_measurements_spec.rb7
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/add_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/remove_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/boards/create_spec.rb16
-rw-r--r--spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb77
-rw-r--r--spec/requests/api/graphql/mutations/issues/create_spec.rb48
-rw-r--r--spec/requests/api/graphql/mutations/issues/move_spec.rb73
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb15
-rw-r--r--spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb32
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/note_spec.rb8
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb21
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb31
-rw-r--r--spec/requests/api/graphql/mutations/todos/mark_done_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/todos/restore_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/alert_management/alerts_spec.rb13
-rw-r--r--spec/requests/api/graphql/project/issue/designs/notes_spec.rb16
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb35
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb75
-rw-r--r--spec/requests/api/graphql/project/milestones_spec.rb202
-rw-r--r--spec/requests/api/graphql/user_query_spec.rb40
-rw-r--r--spec/requests/api/graphql_spec.rb10
-rw-r--r--spec/requests/api/group_clusters_spec.rb18
-rw-r--r--spec/requests/api/group_container_repositories_spec.rb3
-rw-r--r--spec/requests/api/group_packages_spec.rb6
-rw-r--r--spec/requests/api/groups_spec.rb133
-rw-r--r--spec/requests/api/helpers_spec.rb63
-rw-r--r--spec/requests/api/internal/base_spec.rb334
-rw-r--r--spec/requests/api/internal/lfs_spec.rb93
-rw-r--r--spec/requests/api/jobs_spec.rb16
-rw-r--r--spec/requests/api/lint_spec.rb249
-rw-r--r--spec/requests/api/maven_packages_spec.rb74
-rw-r--r--spec/requests/api/members_spec.rb31
-rw-r--r--spec/requests/api/merge_requests_spec.rb51
-rw-r--r--spec/requests/api/npm_packages_spec.rb10
-rw-r--r--spec/requests/api/project_clusters_spec.rb18
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb9
-rw-r--r--spec/requests/api/project_packages_spec.rb13
-rw-r--r--spec/requests/api/project_repository_storage_moves_spec.rb11
-rw-r--r--spec/requests/api/project_snippets_spec.rb125
-rw-r--r--spec/requests/api/projects_spec.rb2
-rw-r--r--spec/requests/api/pypi_packages_spec.rb156
-rw-r--r--spec/requests/api/releases_spec.rb185
-rw-r--r--spec/requests/api/repositories_spec.rb4
-rw-r--r--spec/requests/api/search_spec.rb67
-rw-r--r--spec/requests/api/services_spec.rb30
-rw-r--r--spec/requests/api/settings_spec.rb10
-rw-r--r--spec/requests/api/snippets_spec.rb175
-rw-r--r--spec/requests/api/terraform/state_spec.rb2
-rw-r--r--spec/requests/api/terraform/state_version_spec.rb210
-rw-r--r--spec/requests/api/unleash_spec.rb608
-rw-r--r--spec/requests/api/usage_data_spec.rb4
-rw-r--r--spec/requests/api/users_spec.rb144
-rw-r--r--spec/requests/git_http_spec.rb26
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb2
-rw-r--r--spec/requests/rack_attack_global_spec.rb2
-rw-r--r--spec/requests/request_profiler_spec.rb2
-rw-r--r--spec/requests/user_activity_spec.rb94
-rw-r--r--spec/requests/user_sends_null_bytes_spec.rb14
-rw-r--r--spec/requests/whats_new_controller_spec.rb35
-rw-r--r--spec/routing/admin_routing_spec.rb6
-rw-r--r--spec/routing/group_routing_spec.rb4
-rw-r--r--spec/routing/instance_statistics_routing_spec.rb11
-rw-r--r--spec/routing/project_routing_spec.rb21
-rw-r--r--spec/routing/routing_spec.rb29
-rw-r--r--spec/rubocop/cop/api/base_spec.rb35
-rw-r--r--spec/rubocop/cop/api/grape_api_instance_spec.rb29
-rw-r--r--spec/rubocop/cop/code_reuse/active_record_spec.rb6
-rw-r--r--spec/rubocop/cop/graphql/gid_expected_type_spec.rb26
-rw-r--r--spec/rubocop/cop/graphql/id_type_spec.rb36
-rw-r--r--spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb10
-rw-r--r--spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb22
-rw-r--r--spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb2
-rw-r--r--spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb16
-rw-r--r--spec/rubocop/cop/rspec/expect_gitlab_tracking_spec.rb65
-rw-r--r--spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb132
-rw-r--r--spec/rubocop/cop/rspec/timecop_travel_spec.rb52
-rw-r--r--spec/serializers/blob_entity_spec.rb2
-rw-r--r--spec/serializers/ci/trigger_entity_spec.rb70
-rw-r--r--spec/serializers/ci/trigger_serializer_spec.rb15
-rw-r--r--spec/serializers/cluster_serializer_spec.rb1
-rw-r--r--spec/serializers/deployment_entity_spec.rb4
-rw-r--r--spec/serializers/diff_file_base_entity_spec.rb55
-rw-r--r--spec/serializers/diffs_entity_spec.rb10
-rw-r--r--spec/serializers/discussion_entity_spec.rb8
-rw-r--r--spec/serializers/feature_flag_entity_spec.rb22
-rw-r--r--spec/serializers/feature_flag_serializer_spec.rb23
-rw-r--r--spec/serializers/feature_flag_summary_entity_spec.rb21
-rw-r--r--spec/serializers/feature_flag_summary_serializer_spec.rb22
-rw-r--r--spec/serializers/feature_flags_client_serializer_spec.rb17
-rw-r--r--spec/serializers/group_group_link_entity_spec.rb22
-rw-r--r--spec/serializers/import/bulk_import_entity_spec.rb26
-rw-r--r--spec/serializers/label_serializer_spec.rb3
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb61
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb33
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb50
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb10
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb9
-rw-r--r--spec/serializers/test_case_entity_spec.rb2
-rw-r--r--spec/services/admin/propagate_integration_service_spec.rb142
-rw-r--r--spec/services/admin/propagate_service_template_spec.rb125
-rw-r--r--spec/services/alert_management/alerts/update_service_spec.rb6
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb38
-rw-r--r--spec/services/audit_event_service_spec.rb13
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb107
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb57
-rw-r--r--spec/services/ci/build_report_result_service_spec.rb21
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb14
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb15
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb61
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb27
-rw-r--r--spec/services/ci/delete_objects_service_spec.rb133
-rw-r--r--spec/services/ci/destroy_expired_job_artifacts_service_spec.rb13
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb2
-rw-r--r--spec/services/ci/list_config_variables_service_spec.rb77
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb10
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb4
-rw-r--r--spec/services/ci/pipelines/create_artifact_service_spec.rb10
-rw-r--r--spec/services/ci/play_bridge_service_spec.rb56
-rw-r--r--spec/services/ci/play_manual_stage_service_spec.rb42
-rw-r--r--spec/services/ci/retry_build_service_spec.rb37
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb16
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb109
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb15
-rw-r--r--spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb20
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb8
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb70
-rw-r--r--spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb32
-rw-r--r--spec/services/deployments/create_service_spec.rb10
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb (renamed from spec/services/deployments/after_create_service_spec.rb)17
-rw-r--r--spec/services/design_management/copy_design_collection/copy_service_spec.rb259
-rw-r--r--spec/services/design_management/copy_design_collection/queue_service_spec.rb51
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb18
-rw-r--r--spec/services/design_management/generate_image_versions_service_spec.rb51
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb43
-rw-r--r--spec/services/feature_flags/create_service_spec.rb79
-rw-r--r--spec/services/feature_flags/destroy_service_spec.rb61
-rw-r--r--spec/services/feature_flags/disable_service_spec.rb91
-rw-r--r--spec/services/feature_flags/enable_service_spec.rb153
-rw-r--r--spec/services/feature_flags/update_service_spec.rb250
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb22
-rw-r--r--spec/services/git/wiki_push_service_spec.rb18
-rw-r--r--spec/services/groups/create_service_spec.rb96
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb9
-rw-r--r--spec/services/groups/transfer_service_spec.rb71
-rw-r--r--spec/services/groups/update_service_spec.rb44
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb194
-rw-r--r--spec/services/incident_management/create_incident_label_service_spec.rb62
-rw-r--r--spec/services/incident_management/incidents/update_severity_service_spec.rb86
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb2
-rw-r--r--spec/services/issuable/clone/attributes_rewriter_spec.rb10
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb8
-rw-r--r--spec/services/issues/build_service_spec.rb70
-rw-r--r--spec/services/issues/close_service_spec.rb23
-rw-r--r--spec/services/issues/move_service_spec.rb43
-rw-r--r--spec/services/issues/update_service_spec.rb2
-rw-r--r--spec/services/jira/requests/projects/list_service_spec.rb4
-rw-r--r--spec/services/keys/last_used_service_spec.rb2
-rw-r--r--spec/services/lfs/push_service_spec.rb48
-rw-r--r--spec/services/members/destroy_service_spec.rb18
-rw-r--r--spec/services/members/invitation_reminder_email_service_spec.rb78
-rw-r--r--spec/services/merge_requests/cleanup_refs_service_spec.rb11
-rw-r--r--spec/services/merge_requests/close_service_spec.rb67
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb2
-rw-r--r--spec/services/merge_requests/export_csv_service_spec.rb115
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb104
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb22
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb11
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb43
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb161
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb20
-rw-r--r--spec/services/merge_requests/update_service_spec.rb53
-rw-r--r--spec/services/metrics/dashboard/custom_dashboard_service_spec.rb17
-rw-r--r--spec/services/milestones/destroy_service_spec.rb2
-rw-r--r--spec/services/milestones/promote_service_spec.rb2
-rw-r--r--spec/services/milestones/transfer_service_spec.rb2
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb48
-rw-r--r--spec/services/notes/create_service_spec.rb10
-rw-r--r--spec/services/notes/update_service_spec.rb2
-rw-r--r--spec/services/notification_service_spec.rb104
-rw-r--r--spec/services/packages/composer/composer_json_service_spec.rb4
-rw-r--r--spec/services/packages/create_event_service_spec.rb54
-rw-r--r--spec/services/packages/generic/create_package_file_service_spec.rb54
-rw-r--r--spec/services/packages/generic/find_or_create_package_service_spec.rb88
-rw-r--r--spec/services/projects/after_rename_service_spec.rb2
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb49
-rw-r--r--spec/services/projects/autocomplete_service_spec.rb4
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb58
-rw-r--r--spec/services/projects/create_service_spec.rb196
-rw-r--r--spec/services/projects/destroy_service_spec.rb2
-rw-r--r--spec/services/projects/fork_service_spec.rb4
-rw-r--r--spec/services/projects/hashed_storage/base_attachment_service_spec.rb2
-rw-r--r--spec/services/projects/move_access_service_spec.rb8
-rw-r--r--spec/services/projects/move_project_group_links_service_spec.rb14
-rw-r--r--spec/services/projects/operations/update_service_spec.rb92
-rw-r--r--spec/services/projects/overwrite_project_service_spec.rb6
-rw-r--r--spec/services/projects/transfer_service_spec.rb31
-rw-r--r--spec/services/projects/unlink_fork_service_spec.rb2
-rw-r--r--spec/services/projects/update_pages_service_spec.rb34
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb71
-rw-r--r--spec/services/projects/update_service_spec.rb34
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb76
-rw-r--r--spec/services/repositories/destroy_service_spec.rb20
-rw-r--r--spec/services/repository_archive_clean_up_service_spec.rb22
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb63
-rw-r--r--spec/services/resource_access_tokens/revoke_service_spec.rb74
-rw-r--r--spec/services/search/global_service_spec.rb30
-rw-r--r--spec/services/search/group_service_spec.rb32
-rw-r--r--spec/services/search_service_spec.rb6
-rw-r--r--spec/services/snippets/repository_validation_service_spec.rb6
-rw-r--r--spec/services/snippets/update_service_spec.rb95
-rw-r--r--spec/services/static_site_editor/config_service_spec.rb86
-rw-r--r--spec/services/system_note_service_spec.rb24
-rw-r--r--spec/services/system_notes/incident_service_spec.rb59
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb142
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb201
-rw-r--r--spec/services/todos/destroy/entity_leave_service_spec.rb164
-rw-r--r--spec/services/users/approve_service_spec.rb106
-rw-r--r--spec/services/users/block_service_spec.rb10
-rw-r--r--spec/services/users/build_service_spec.rb20
-rw-r--r--spec/services/users/destroy_service_spec.rb8
-rw-r--r--spec/services/users/validate_otp_service_spec.rb34
-rw-r--r--spec/services/web_hooks/destroy_service_spec.rb56
-rw-r--r--spec/simplecov_env.rb3
-rw-r--r--spec/spec_helper.rb20
-rw-r--r--spec/support/capybara.rb8
-rw-r--r--spec/support/counter_attribute.rb6
-rw-r--r--spec/support/factory_bot.rb4
-rw-r--r--spec/support/google_api/cloud_platform_helpers.rb12
-rw-r--r--spec/support/helpers/api_internal_base_helpers.rb85
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb8
-rw-r--r--spec/support/helpers/drag_to_helper.rb7
-rw-r--r--spec/support/helpers/features/blob_spec_helpers.rb22
-rw-r--r--spec/support/helpers/features/canonical_link_helpers.rb28
-rw-r--r--spec/support/helpers/features/snippet_helpers.rb64
-rw-r--r--spec/support/helpers/git_http_helpers.rb26
-rw-r--r--spec/support/helpers/graphql_helpers.rb6
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb11
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb87
-rw-r--r--spec/support/helpers/multipart_helpers.rb2
-rw-r--r--spec/support/helpers/rack_attack_spec_helpers.rb4
-rw-r--r--spec/support/helpers/search_helpers.rb10
-rw-r--r--spec/support/helpers/snippet_helpers.rb2
-rw-r--r--spec/support/helpers/snowplow_helpers.rb12
-rw-r--r--spec/support/helpers/stub_experiments.rb4
-rw-r--r--spec/support/helpers/stub_feature_flags.rb4
-rw-r--r--spec/support/helpers/stub_object_storage.rb18
-rw-r--r--spec/support/helpers/stubbed_feature.rb26
-rw-r--r--spec/support/helpers/usage_data_helpers.rb10
-rw-r--r--spec/support/helpers/wait_for_requests.rb11
-rw-r--r--spec/support/helpers/wiki_helpers.rb10
-rw-r--r--spec/support/matchers/be_sorted.rb21
-rw-r--r--spec/support/migrations_helpers/cluster_helpers.rb10
-rw-r--r--spec/support/migrations_helpers/namespaces_helper.rb2
-rw-r--r--spec/support/migrations_helpers/schema_version_finder.rb34
-rw-r--r--spec/support/models/merge_request_without_merge_request_diff.rb7
-rw-r--r--spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/email_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb6
-rw-r--r--spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb13
-rw-r--r--spec/support/shared_contexts/mailers/notify_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb15
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb106
-rw-r--r--spec/support/shared_examples/controllers/cache_control_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/destroy_hook_shared_examples.rb36
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/unique_hll_events_examples.rb49
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb69
-rw-r--r--spec/support/shared_examples/features/editable_merge_request_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/features/navbar_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/page_description_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb95
-rw-r--r--spec/support/shared_examples/features/snippets_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb (renamed from spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb)4
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb245
-rw-r--r--spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb110
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb231
-rw-r--r--spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb (renamed from spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb)22
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb (renamed from spec/features/projects/wiki/user_views_wiki_page_spec.rb)44
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb (renamed from spec/features/projects/wiki/user_views_wiki_pages_spec.rb)18
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/graphql/mutations/boards_create_shared_examples.rb75
-rw-r--r--spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb47
-rw-r--r--spec/support/shared_examples/graphql/notes_creation_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb107
-rw-r--r--spec/support/shared_examples/lib/gitlab/repository_size_checker_shared_examples.rb51
-rw-r--r--spec/support/shared_examples/lib/gitlab/search/recent_items.rb43
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb69
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_state_filter_shared_examples.rb (renamed from spec/support/shared_examples/lib/gitlab/search_issue_state_filter_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/mailers/notify_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/models/concerns/shardable_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/models/concerns/timebox_shared_examples.rb86
-rw-r--r--spec/support/shared_examples/models/mentionable_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/models/relative_positioning_shared_examples.rb182
-rw-r--r--spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/snippet_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/throttled_touch_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb260
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb85
-rw-r--r--spec/support/shared_examples/policies/resource_access_token_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb309
-rw-r--r--spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/packages_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/requests/api/snippets_shared_examples.rb179
-rw-r--r--spec/support/shared_examples/requests/api/tracking_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/snippet_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/requests/user_activity_shared_examples.rb97
-rw-r--r--spec/support/shared_examples/serializers/note_entity_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/incident_shared_examples.rb71
-rw-r--r--spec/support/shared_examples/services/merge_request_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb20
-rw-r--r--spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb10
-rw-r--r--spec/support/test_reports/test_reports_helper.rb8
-rw-r--r--spec/support_specs/helpers/stub_feature_flags_spec.rb31
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb111
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb97
-rw-r--r--spec/tasks/gitlab/web_hook_rake_spec.rb4
-rw-r--r--spec/uploaders/file_uploader_spec.rb2
-rw-r--r--spec/uploaders/object_storage_spec.rb2
-rw-r--r--spec/uploaders/pages/deployment_uploader_spec.rb59
-rw-r--r--spec/uploaders/terraform/versioned_state_uploader_spec.rb20
-rw-r--r--spec/validators/ip_address_validator_spec.rb37
-rw-r--r--spec/views/admin/dashboard/index.html.haml_spec.rb2
-rw-r--r--spec/views/groups/edit.html.haml_spec.rb2
-rw-r--r--spec/views/jira_connect/subscriptions/index.html.haml_spec.rb30
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb18
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb30
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb10
-rw-r--r--spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb38
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb76
-rw-r--r--spec/views/projects/tracing/show.html.haml_spec.rb59
-rw-r--r--spec/views/search/_results.html.haml_spec.rb22
-rw-r--r--spec/views/shared/_label_row.html.haml_spec.rb139
-rw-r--r--spec/views/shared/milestones/_issuables.html.haml_spec.rb3
-rw-r--r--spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb12
-rw-r--r--spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb20
-rw-r--r--spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb12
-rw-r--r--spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb12
-rw-r--r--spec/workers/build_finished_worker_spec.rb2
-rw-r--r--spec/workers/ci/build_trace_chunk_flush_worker_spec.rb4
-rw-r--r--spec/workers/ci/delete_objects_worker_spec.rb49
-rw-r--r--spec/workers/ci/schedule_delete_objects_cron_worker_spec.rb15
-rw-r--r--spec/workers/cleanup_container_repository_worker_spec.rb23
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb100
-rw-r--r--spec/workers/concerns/limited_capacity/worker_spec.rb285
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb46
-rw-r--r--spec/workers/deployments/drop_older_deployments_worker_spec.rb18
-rw-r--r--spec/workers/deployments/execute_hooks_worker_spec.rb51
-rw-r--r--spec/workers/deployments/link_merge_request_worker_spec.rb71
-rw-r--r--spec/workers/deployments/success_worker_spec.rb12
-rw-r--r--spec/workers/deployments/update_environment_worker_spec.rb63
-rw-r--r--spec/workers/design_management/copy_design_collection_worker_spec.rb39
-rw-r--r--spec/workers/design_management/new_version_worker_spec.rb4
-rw-r--r--spec/workers/disallow_two_factor_for_group_worker_spec.rb22
-rw-r--r--spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb17
-rw-r--r--spec/workers/export_csv_worker_spec.rb20
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb58
-rw-r--r--spec/workers/group_export_worker_spec.rb10
-rw-r--r--spec/workers/group_import_worker_spec.rb63
-rw-r--r--spec/workers/incident_management/add_severity_system_note_worker_spec.rb60
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb2
-rw-r--r--spec/workers/incident_management/process_prometheus_alert_worker_spec.rb2
-rw-r--r--spec/workers/issuable_export_csv_worker_spec.rb73
-rw-r--r--spec/workers/member_invitation_reminder_emails_worker_spec.rb39
-rw-r--r--spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb2
-rw-r--r--spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb25
-rw-r--r--spec/workers/post_receive_spec.rb4
-rw-r--r--spec/workers/project_export_worker_spec.rb4
-rw-r--r--spec/workers/propagate_integration_group_worker_spec.rb44
-rw-r--r--spec/workers/propagate_integration_inherit_worker_spec.rb32
-rw-r--r--spec/workers/propagate_integration_project_worker_spec.rb44
-rw-r--r--spec/workers/web_hooks/destroy_worker_spec.rb59
1676 files changed, 63041 insertions, 19855 deletions
diff --git a/spec/bin/feature_flag_spec.rb b/spec/bin/feature_flag_spec.rb
index 41117880f95..185a03fc587 100644
--- a/spec/bin/feature_flag_spec.rb
+++ b/spec/bin/feature_flag_spec.rb
@@ -240,5 +240,18 @@ RSpec.describe 'bin/feature-flag' do
end
end
end
+
+ describe '.read_ee_only' do
+ where(:type, :is_ee_only) do
+ :development | false
+ :licensed | true
+ end
+
+ with_them do
+ let(:options) { OpenStruct.new(name: 'foo', type: type) }
+
+ it { expect(described_class.read_ee_only(options)).to eq(is_ee_only) }
+ end
+ end
end
end
diff --git a/spec/channels/application_cable/connection_spec.rb b/spec/channels/application_cable/connection_spec.rb
index e5f7ea1103c..7d60548f780 100644
--- a/spec/channels/application_cable/connection_spec.rb
+++ b/spec/channels/application_cable/connection_spec.rb
@@ -5,27 +5,39 @@ require 'spec_helper'
RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_shared_state do
let(:session_id) { Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') }
- before do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
+ context 'when session cookie is set' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
+ end
+
+ cookies[Gitlab::Application.config.session_options[:key]] = session_id.public_id
end
- cookies[Gitlab::Application.config.session_options[:key]] = session_id.public_id
- end
+ context 'when user is logged in' do
+ let(:user) { create(:user) }
+ let(:session_hash) { { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] } }
+
+ it 'sets current_user' do
+ connect
+
+ expect(connection.current_user).to eq(user)
+ end
- context 'when user is logged in' do
- let(:user) { create(:user) }
- let(:session_hash) { { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] } }
+ context 'with a stale password' do
+ let(:partial_password_hash) { build(:user, password: 'some_old_password').encrypted_password[0, 29] }
+ let(:session_hash) { { 'warden.user.user.key' => [[user.id], partial_password_hash] } }
- it 'sets current_user' do
- connect
+ it 'sets current_user to nil' do
+ connect
- expect(connection.current_user).to eq(user)
+ expect(connection.current_user).to be_nil
+ end
+ end
end
- context 'with a stale password' do
- let(:partial_password_hash) { build(:user, password: 'some_old_password').encrypted_password[0, 29] }
- let(:session_hash) { { 'warden.user.user.key' => [[user.id], partial_password_hash] } }
+ context 'when user is not logged in' do
+ let(:session_hash) { {} }
it 'sets current_user to nil' do
connect
@@ -35,10 +47,18 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_shared_state do
end
end
- context 'when user is not logged in' do
- let(:session_hash) { {} }
+ context 'when session cookie is not set' do
+ it 'sets current_user to nil' do
+ connect
+
+ expect(connection.current_user).to be_nil
+ end
+ end
+ context 'when session cookie is an empty string' do
it 'sets current_user to nil' do
+ cookies[Gitlab::Application.config.session_options[:key]] = ''
+
connect
expect(connection.current_user).to be_nil
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 36938c74afa..430ba1205cb 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe ObjectStoreSettings do
'lfs' => { 'enabled' => true },
'artifacts' => { 'enabled' => true },
'external_diffs' => { 'enabled' => false },
+ 'pages' => { 'enabled' => true },
'object_store' => {
'enabled' => true,
'connection' => connection,
@@ -39,6 +40,9 @@ RSpec.describe ObjectStoreSettings do
'external_diffs' => {
'bucket' => 'external_diffs',
'enabled' => false
+ },
+ 'pages' => {
+ 'bucket' => 'pages'
}
}
}
@@ -64,6 +68,11 @@ RSpec.describe ObjectStoreSettings do
expect(settings.lfs['object_store']['proxy_download']).to be true
expect(settings.lfs['object_store']['remote_directory']).to eq('lfs-objects')
+ expect(settings.pages['enabled']).to be true
+ expect(settings.pages['object_store']['enabled']).to be true
+ expect(settings.pages['object_store']['connection']).to eq(connection)
+ expect(settings.pages['object_store']['remote_directory']).to eq('pages')
+
expect(settings.external_diffs['enabled']).to be false
expect(settings.external_diffs['object_store']['enabled']).to be false
expect(settings.external_diffs['object_store']['remote_directory']).to eq('external_diffs')
@@ -75,6 +84,12 @@ RSpec.describe ObjectStoreSettings do
expect { subject }.to raise_error(/Object storage for lfs must have a bucket specified/)
end
+ it 'does not raise error if pages bucket is missing' do
+ config['object_store']['objects']['pages'].delete('bucket')
+
+ expect { subject }.not_to raise_error
+ end
+
context 'with legacy config' do
let(:legacy_settings) do
{
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 4f223811be8..f71f859a704 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -15,6 +15,37 @@ RSpec.describe Admin::ApplicationSettingsController do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
+ describe 'GET #integrations' do
+ before do
+ sign_in(admin)
+ end
+
+ context 'when GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?) { true }
+ end
+
+ it 'returns 404' do
+ get :integrations
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when not GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?) { false }
+ end
+
+ it 'renders correct template' do
+ get :integrations
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('admin/application_settings/integrations')
+ end
+ end
+ end
+
describe 'GET #usage_data with no access' do
before do
stub_usage_data_connections
@@ -56,6 +87,13 @@ RSpec.describe Admin::ApplicationSettingsController do
sign_in(admin)
end
+ it 'updates the require_admin_approval_after_user_signup setting' do
+ put :update, params: { application_setting: { require_admin_approval_after_user_signup: true } }
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ expect(ApplicationSetting.current.require_admin_approval_after_user_signup).to eq(true)
+ end
+
it 'updates the password_authentication_enabled_for_git setting' do
put :update, params: { application_setting: { password_authentication_enabled_for_git: "0" } }
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index d2a569a9d48..69bdc79c5f5 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -416,6 +416,7 @@ RSpec.describe Admin::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_platform_kubernetes_rbac
+ expect(cluster).to be_namespace_per_environment
end
end
end
@@ -585,6 +586,7 @@ RSpec.describe Admin::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
base_domain: domain
}
}
@@ -599,6 +601,7 @@ RSpec.describe Admin::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
expect(cluster.domain).to eq('test-domain.com')
end
@@ -624,6 +627,7 @@ RSpec.describe Admin::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
domain: domain
}
}
@@ -637,6 +641,7 @@ RSpec.describe Admin::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
end
end
diff --git a/spec/controllers/admin/hooks_controller_spec.rb b/spec/controllers/admin/hooks_controller_spec.rb
index 8975f746dd7..17c4222530d 100644
--- a/spec/controllers/admin/hooks_controller_spec.rb
+++ b/spec/controllers/admin/hooks_controller_spec.rb
@@ -29,4 +29,12 @@ RSpec.describe Admin::HooksController do
expect(SystemHook.first).to have_attributes(hook_params)
end
end
+
+ describe 'DELETE #destroy' do
+ let!(:hook) { create(:system_hook) }
+ let!(:log) { create(:web_hook_log, web_hook: hook) }
+ let(:params) { { id: hook } }
+
+ it_behaves_like 'Web hook destroyer'
+ end
end
diff --git a/spec/controllers/admin/instance_review_controller_spec.rb b/spec/controllers/admin/instance_review_controller_spec.rb
new file mode 100644
index 00000000000..d15894eeb5d
--- /dev/null
+++ b/spec/controllers/admin/instance_review_controller_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::InstanceReviewController do
+ include UsageDataHelpers
+
+ let(:admin) { create(:admin) }
+ let(:subscriptions_url) { ::Gitlab::SubscriptionPortal::SUBSCRIPTIONS_URL }
+
+ before do
+ sign_in(admin)
+ end
+
+ context 'GET #index' do
+ let!(:group) { create(:group) }
+ let!(:projects) { create_list(:project, 2, group: group) }
+
+ subject { post :index }
+
+ context 'with usage ping enabled' do
+ before do
+ stub_application_setting(usage_ping_enabled: true)
+ stub_usage_data_connections
+ ::Gitlab::UsageData.data(force_refresh: true)
+ subject
+ end
+
+ it 'redirects to the customers app with correct params' do
+ params = { instance_review: {
+ email: admin.email,
+ last_name: admin.name,
+ version: ::Gitlab::VERSION,
+ users_count: 5,
+ projects_count: 2,
+ groups_count: 1,
+ issues_count: 0,
+ merge_requests_count: 0,
+ internal_pipelines_count: 0,
+ external_pipelines_count: 0,
+ labels_count: 0,
+ milestones_count: 0,
+ snippets_count: 0,
+ notes_count: 0
+ } }.to_query
+
+ expect(response).to redirect_to("#{subscriptions_url}/instance_review?#{params}")
+ end
+ end
+
+ context 'with usage ping disabled' do
+ before do
+ stub_application_setting(usage_ping_enabled: false)
+ subject
+ end
+
+ it 'redirects to the customers app with correct params' do
+ params = { instance_review: {
+ email: admin.email,
+ last_name: admin.name,
+ version: ::Gitlab::VERSION
+ } }.to_query
+
+ expect(response).to redirect_to("#{subscriptions_url}/instance_review?#{params}")
+ end
+ end
+ end
+end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 4b1806a43d2..1a13d016b73 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -20,6 +20,18 @@ RSpec.describe Admin::IntegrationsController do
end
end
end
+
+ context 'when GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?) { true }
+ end
+
+ it 'returns 404' do
+ get :edit, params: { id: Service.available_services_names.sample }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
describe '#update' do
@@ -43,7 +55,7 @@ RSpec.describe Admin::IntegrationsController do
end
it 'calls to PropagateIntegrationWorker' do
- expect(PropagateIntegrationWorker).to have_received(:perform_async).with(integration.id, false)
+ expect(PropagateIntegrationWorker).to have_received(:perform_async).with(integration.id)
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 013eee19409..3fffc50475c 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -151,4 +151,21 @@ RSpec.describe Admin::RunnersController do
expect(runner.active).to eq(false)
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
diff --git a/spec/controllers/admin/sessions_controller_spec.rb b/spec/controllers/admin/sessions_controller_spec.rb
index 35982e57034..5fa7a7f278d 100644
--- a/spec/controllers/admin/sessions_controller_spec.rb
+++ b/spec/controllers/admin/sessions_controller_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Admin::SessionsController, :do_not_mock_admin_mode do
# triggering the auth form will request admin mode
get :new
- Timecop.freeze(Gitlab::Auth::CurrentUserMode::ADMIN_MODE_REQUESTED_GRACE_PERIOD.from_now) do
+ travel_to(Gitlab::Auth::CurrentUserMode::ADMIN_MODE_REQUESTED_GRACE_PERIOD.from_now) do
post :create, params: { user: { password: user.password } }
expect(response).to redirect_to(new_admin_session_path)
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index 6301da74f4a..5312a0db7f5 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -23,6 +23,12 @@ RSpec.describe Admin::UsersController do
expect(assigns(:users)).to eq([admin])
end
+
+ it 'eager loads authorized projects association' do
+ get :index
+
+ expect(assigns(:users).first.association(:authorized_projects)).to be_loaded
+ end
end
describe 'GET :id' do
@@ -96,6 +102,58 @@ RSpec.describe Admin::UsersController do
end
end
+ describe 'PUT #approve' do
+ let(:user) { create(:user, :blocked_pending_approval) }
+
+ subject { put :approve, params: { id: user.username } }
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: false)
+ end
+
+ it 'responds with access denied' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: true)
+ end
+
+ context 'when successful' do
+ it 'activates the user' do
+ subject
+
+ user.reload
+
+ expect(user).to be_active
+ expect(flash[:notice]).to eq('Successfully approved')
+ end
+ end
+
+ context 'when unsuccessful' do
+ let(:user) { create(:user, :blocked) }
+
+ it 'displays the error' do
+ subject
+
+ expect(flash[:alert]).to eq('The user you are trying to approve is not pending an approval')
+ end
+
+ it 'does not activate the user' do
+ subject
+
+ user.reload
+ expect(user).not_to be_active
+ end
+ end
+ end
+ end
+
describe 'PUT #activate' do
shared_examples 'a request that activates the user' do
it 'activates the user' do
@@ -184,6 +242,17 @@ RSpec.describe Admin::UsersController do
expect(flash[:notice]).to eq('Error occurred. A blocked user cannot be deactivated')
end
end
+
+ context 'for an internal user' do
+ it 'does not deactivate the user' do
+ internal_user = User.alert_bot
+
+ put :deactivate, params: { id: internal_user.username }
+
+ expect(internal_user.reload.deactivated?).to be_falsey
+ expect(flash[:notice]).to eq('Internal users cannot be deactivated')
+ end
+ end
end
describe 'PUT block/:id' do
@@ -321,7 +390,7 @@ RSpec.describe Admin::UsersController do
describe 'POST update' do
context 'when the password has changed' do
- def update_password(user, password = User.random_password, password_confirmation = password)
+ def update_password(user, password = User.random_password, password_confirmation = password, format = :html)
params = {
id: user.to_param,
user: {
@@ -330,7 +399,7 @@ RSpec.describe Admin::UsersController do
}
}
- post :update, params: params
+ post :update, params: params, format: format
end
context 'when admin changes their own password' do
@@ -429,6 +498,23 @@ RSpec.describe Admin::UsersController do
.not_to change { user.reload.encrypted_password }
end
end
+
+ context 'when the update fails' do
+ let(:password) { User.random_password }
+
+ before do
+ expect_next_instance_of(Users::UpdateService) do |service|
+ allow(service).to receive(:execute).and_return({ message: 'failed', status: :error })
+ end
+ end
+
+ it 'returns a 500 error' do
+ expect { update_password(admin, password, password, :json) }
+ .not_to change { admin.reload.password_expired? }
+
+ expect(response).to have_gitlab_http_status(:error)
+ end
+ end
end
context 'admin notes' do
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 188a4cb04af..d95aac2f386 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -416,13 +416,13 @@ RSpec.describe ApplicationController do
end
it 'returns false if the grace period has expired' do
- Timecop.freeze(3.hours.from_now) do
+ travel_to(3.hours.from_now) do
expect(subject).to be_falsey
end
end
it 'returns true if the grace period is still active' do
- Timecop.freeze(1.hour.from_now) do
+ travel_to(1.hour.from_now) do
expect(subject).to be_truthy
end
end
@@ -844,6 +844,8 @@ RSpec.describe ApplicationController do
describe '#set_current_context' do
controller(described_class) do
+ feature_category :issue_tracking
+
def index
Labkit::Context.with_context do |context|
render json: context.to_h
@@ -893,6 +895,12 @@ RSpec.describe ApplicationController do
expect(json_response['meta.caller_id']).to eq('AnonymousController#index')
end
+ it 'sets the feature_category as defined in the controller' do
+ get :index, format: :json
+
+ expect(json_response['meta.feature_category']).to eq('issue_tracking')
+ end
+
it 'assigns the context to a variable for logging' do
get :index, format: :json
diff --git a/spec/controllers/boards/lists_controller_spec.rb b/spec/controllers/boards/lists_controller_spec.rb
index c72d9e5053a..9b09f46d17e 100644
--- a/spec/controllers/boards/lists_controller_spec.rb
+++ b/spec/controllers/boards/lists_controller_spec.rb
@@ -260,6 +260,17 @@ RSpec.describe Boards::ListsController do
end
end
+ context 'with an error service response' do
+ it 'returns an unprocessable entity response' do
+ allow(Boards::Lists::DestroyService).to receive(:new)
+ .and_return(double(execute: ServiceResponse.error(message: 'error')))
+
+ remove_board_list user: user, board: board, list: planning
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
def remove_board_list(user:, board:, list:)
sign_in(user)
diff --git a/spec/controllers/concerns/controller_with_feature_category/config_spec.rb b/spec/controllers/concerns/controller_with_feature_category/config_spec.rb
deleted file mode 100644
index 9b8ffd2baab..00000000000
--- a/spec/controllers/concerns/controller_with_feature_category/config_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require "fast_spec_helper"
-require "rspec-parameterized"
-require_relative "../../../../app/controllers/concerns/controller_with_feature_category/config"
-
-RSpec.describe ControllerWithFeatureCategory::Config do
- describe "#matches?" do
- using RSpec::Parameterized::TableSyntax
-
- where(:only_actions, :except_actions, :if_proc, :unless_proc, :test_action, :expected) do
- nil | nil | nil | nil | "action" | true
- [:included] | nil | nil | nil | "action" | false
- [:included] | nil | nil | nil | "included" | true
- nil | [:excluded] | nil | nil | "excluded" | false
- nil | nil | true | nil | "action" | true
- [:included] | nil | true | nil | "action" | false
- [:included] | nil | true | nil | "included" | true
- nil | [:excluded] | true | nil | "excluded" | false
- nil | nil | false | nil | "action" | false
- [:included] | nil | false | nil | "action" | false
- [:included] | nil | false | nil | "included" | false
- nil | [:excluded] | false | nil | "excluded" | false
- nil | nil | nil | true | "action" | false
- [:included] | nil | nil | true | "action" | false
- [:included] | nil | nil | true | "included" | false
- nil | [:excluded] | nil | true | "excluded" | false
- nil | nil | nil | false | "action" | true
- [:included] | nil | nil | false | "action" | false
- [:included] | nil | nil | false | "included" | true
- nil | [:excluded] | nil | false | "excluded" | false
- nil | nil | true | false | "action" | true
- [:included] | nil | true | false | "action" | false
- [:included] | nil | true | false | "included" | true
- nil | [:excluded] | true | false | "excluded" | false
- nil | nil | false | true | "action" | false
- [:included] | nil | false | true | "action" | false
- [:included] | nil | false | true | "included" | false
- nil | [:excluded] | false | true | "excluded" | false
- end
-
- with_them do
- let(:config) do
- if_to_proc = if_proc.nil? ? nil : -> (_) { if_proc }
- unless_to_proc = unless_proc.nil? ? nil : -> (_) { unless_proc }
-
- described_class.new(:category, only_actions, except_actions, if_to_proc, unless_to_proc)
- end
-
- specify { expect(config.matches?(test_action)).to be(expected) }
- end
- end
-end
diff --git a/spec/controllers/concerns/controller_with_feature_category_spec.rb b/spec/controllers/concerns/controller_with_feature_category_spec.rb
index e603a7d14c4..55e84755f5c 100644
--- a/spec/controllers/concerns/controller_with_feature_category_spec.rb
+++ b/spec/controllers/concerns/controller_with_feature_category_spec.rb
@@ -2,7 +2,6 @@
require 'fast_spec_helper'
require_relative "../../../app/controllers/concerns/controller_with_feature_category"
-require_relative "../../../app/controllers/concerns/controller_with_feature_category/config"
RSpec.describe ControllerWithFeatureCategory do
describe ".feature_category_for_action" do
@@ -14,17 +13,15 @@ RSpec.describe ControllerWithFeatureCategory do
let(:controller) do
Class.new(base_controller) do
- feature_category :baz
- feature_category :foo, except: %w(update edit)
- feature_category :bar, only: %w(index show)
- feature_category :quux, only: %w(destroy)
- feature_category :quuz, only: %w(destroy)
+ feature_category :foo, %w(update edit)
+ feature_category :bar, %w(index show)
+ feature_category :quux, %w(destroy)
end
end
let(:subclass) do
Class.new(controller) do
- feature_category :qux, only: %w(index)
+ feature_category :baz, %w(subclass_index)
end
end
@@ -33,34 +30,31 @@ RSpec.describe ControllerWithFeatureCategory do
end
it "returns the expected category", :aggregate_failures do
- expect(controller.feature_category_for_action("update")).to eq(:baz)
- expect(controller.feature_category_for_action("hello")).to eq(:foo)
+ expect(controller.feature_category_for_action("update")).to eq(:foo)
expect(controller.feature_category_for_action("index")).to eq(:bar)
+ expect(controller.feature_category_for_action("destroy")).to eq(:quux)
end
- it "returns the closest match for categories defined in subclasses" do
- expect(subclass.feature_category_for_action("index")).to eq(:qux)
- expect(subclass.feature_category_for_action("show")).to eq(:bar)
+ it "returns the expected category for categories defined in subclasses" do
+ expect(subclass.feature_category_for_action("subclass_index")).to eq(:baz)
end
- it "returns the last defined feature category when multiple match" do
- expect(controller.feature_category_for_action("destroy")).to eq(:quuz)
- end
-
- it "raises an error when using including and excluding the same action" do
+ it "raises an error when defining for the controller and for individual actions" do
expect do
Class.new(base_controller) do
- feature_category :hello, only: [:world], except: [:world]
+ feature_category :hello
+ feature_category :goodbye, [:world]
end
- end.to raise_error(%r(cannot configure both `only` and `except`))
+ end.to raise_error(ArgumentError, "hello is defined for all actions, but other categories are set")
end
- it "raises an error when using unknown arguments" do
+ it "raises an error when multiple calls define the same action" do
expect do
Class.new(base_controller) do
- feature_category :hello, hello: :world
+ feature_category :hello, [:world]
+ feature_category :goodbye, ["world"]
end
- end.to raise_error(%r(unknown arguments))
+ end.to raise_error(ArgumentError, "Actions have multiple feature categories: world")
end
end
end
diff --git a/spec/controllers/concerns/issuable_collections_spec.rb b/spec/controllers/concerns/issuable_collections_spec.rb
index befdd760965..6fa273bf3d7 100644
--- a/spec/controllers/concerns/issuable_collections_spec.rb
+++ b/spec/controllers/concerns/issuable_collections_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe IssuableCollections do
+ using RSpec::Parameterized::TableSyntax
+
let(:user) { create(:user) }
let(:controller) do
@@ -25,13 +27,35 @@ RSpec.describe IssuableCollections do
end
describe '#page_count_for_relation' do
- let(:params) { { state: 'opened' } }
+ let(:relation) { double(:relation, limit_value: 20) }
+
+ context 'row count is known' do
+ let(:params) { { state: 'opened' } }
+
+ it 'returns the number of pages' do
+ pages = controller.send(:page_count_for_relation, relation, 28)
+
+ expect(pages).to eq(2)
+ end
+ end
+
+ context 'row_count is unknown' do
+ where(:page_param, :expected) do
+ nil | 2
+ 1 | 2
+ '1' | 2
+ 2 | 3
+ end
- it 'returns the number of pages' do
- relation = double(:relation, limit_value: 20)
- pages = controller.send(:page_count_for_relation, relation, 28)
+ with_them do
+ let(:params) { { state: 'opened', page: page_param } }
- expect(pages).to eq(2)
+ it 'returns current page + 1 if the row count is unknown' do
+ pages = controller.send(:page_count_for_relation, relation, -1)
+
+ expect(pages).to eq(expected)
+ end
+ end
end
end
diff --git a/spec/controllers/concerns/redis_tracking_spec.rb b/spec/controllers/concerns/redis_tracking_spec.rb
index 3795fca5576..831f5ad7bb1 100644
--- a/spec/controllers/concerns/redis_tracking_spec.rb
+++ b/spec/controllers/concerns/redis_tracking_spec.rb
@@ -3,15 +3,19 @@
require "spec_helper"
RSpec.describe RedisTracking do
- let(:event_name) { 'g_compliance_dashboard' }
- let(:feature) { 'g_compliance_dashboard_feature' }
+ let(:feature) { 'approval_rule' }
let(:user) { create(:user) }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
controller(ApplicationController) do
include RedisTracking
skip_before_action :authenticate_user!, only: :show
- track_redis_hll_event :index, :show, name: 'i_analytics_dev_ops_score', feature: :g_compliance_dashboard_feature, feature_default_enabled: true
+ track_redis_hll_event :index, :show, name: 'g_compliance_approval_rules', feature: :approval_rule, feature_default_enabled: true,
+ if: [:custom_condition_one?, :custom_condition_two?]
def index
render html: 'index'
@@ -24,51 +28,94 @@ RSpec.describe RedisTracking do
def show
render html: 'show'
end
- end
- context 'with feature disabled' do
- it 'does not track the event' do
- stub_feature_flags(feature => false)
+ private
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ def custom_condition_one?
+ true
+ end
- get :index
+ def custom_condition_two?
+ true
end
end
- context 'with usage ping disabled' do
+ def expect_tracking
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ .with(instance_of(String), 'g_compliance_approval_rules')
+ end
+
+ def expect_no_tracking
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ end
+
+ context 'with feature disabled' do
it 'does not track the event' do
- stub_feature_flags(feature => true)
- allow(Gitlab::CurrentSettings).to receive(:usage_ping_enabled?).and_return(false)
+ stub_feature_flags(feature => false)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ expect_no_tracking
get :index
end
end
- context 'with feature enabled and usage ping enabled' do
+ context 'with feature enabled' do
before do
stub_feature_flags(feature => true)
- allow(Gitlab::CurrentSettings).to receive(:usage_ping_enabled?).and_return(true)
end
context 'when user is logged in' do
- it 'tracks the event' do
+ before do
sign_in(user)
+ end
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ it 'tracks the event' do
+ expect_tracking
get :index
end
it 'passes default_enabled flag' do
- sign_in(user)
-
expect(controller).to receive(:metric_feature_enabled?).with(feature.to_sym, true)
get :index
end
+
+ it 'tracks the event if DNT is not enabled' do
+ request.headers['DNT'] = '0'
+
+ expect_tracking
+
+ get :index
+ end
+
+ it 'does not track the event if DNT is enabled' do
+ request.headers['DNT'] = '1'
+
+ expect_no_tracking
+
+ get :index
+ end
+
+ it 'does not track the event if the format is not HTML' do
+ expect_no_tracking
+
+ get :index, format: :json
+ end
+
+ it 'does not track the event if a custom condition returns false' do
+ expect(controller).to receive(:custom_condition_two?).and_return(false)
+
+ expect_no_tracking
+
+ get :index
+ end
+
+ it 'does not track the event for untracked actions' do
+ expect_no_tracking
+
+ get :new
+ end
end
context 'when user is not logged in and there is a visitor_id' do
@@ -81,26 +128,18 @@ RSpec.describe RedisTracking do
it 'tracks the event' do
cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ expect_tracking
get :show
end
end
context 'when user is not logged in and there is no visitor_id' do
- it 'does not tracks the event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ it 'does not track the event' do
+ expect_no_tracking
get :index
end
end
-
- context 'for untracked action' do
- it 'does not tracks the event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- get :new
- end
- end
end
end
diff --git a/spec/controllers/dashboard/labels_controller_spec.rb b/spec/controllers/dashboard/labels_controller_spec.rb
index 415cb821545..e7091664d1a 100644
--- a/spec/controllers/dashboard/labels_controller_spec.rb
+++ b/spec/controllers/dashboard/labels_controller_spec.rb
@@ -3,27 +3,32 @@
require 'spec_helper'
RSpec.describe Dashboard::LabelsController do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
- let!(:label) { create(:label, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+
+ let_it_be(:label) { create(:label, project: project, title: 'some_label') }
+ let_it_be(:label_with_same_title) { create(:label, project: project_2, title: 'some_label') }
+ let_it_be(:unrelated_label) { create(:label, project: create(:project, :public)) }
+
+ before_all do
+ project.add_reporter(user)
+ project_2.add_reporter(user)
+ end
before do
sign_in(user)
- project.add_reporter(user)
end
describe "#index" do
- let!(:unrelated_label) { create(:label, project: create(:project, :public)) }
-
subject { get :index, format: :json }
- it 'returns global labels for projects the user has a relationship with' do
+ it 'returns labels with unique titles for projects the user has a relationship with' do
subject
expect(json_response).to be_kind_of(Array)
expect(json_response.size).to eq(1)
- expect(json_response[0]["id"]).to be_nil
- expect(json_response[0]["title"]).to eq(label.title)
+ expect(json_response[0]['title']).to eq(label.title)
end
it_behaves_like 'disabled when using an external authorization service'
diff --git a/spec/controllers/dashboard_controller_spec.rb b/spec/controllers/dashboard_controller_spec.rb
index c838affa239..9b78f841cce 100644
--- a/spec/controllers/dashboard_controller_spec.rb
+++ b/spec/controllers/dashboard_controller_spec.rb
@@ -15,6 +15,16 @@ RSpec.describe DashboardController do
describe 'GET issues' do
it_behaves_like 'issuables list meta-data', :issue, :issues
it_behaves_like 'issuables requiring filter', :issues
+
+ it 'lists only incidents and issues' do
+ issue = create(:incident, project: project, author: user)
+ incident = create(:incident, project: project, author: user)
+ create(:quality_test_case, project: project, author: user)
+
+ get :issues, params: { author_id: user.id }
+
+ expect(assigns(:issues)).to match_array([issue, incident])
+ end
end
describe 'GET merge requests' do
diff --git a/spec/controllers/every_controller_spec.rb b/spec/controllers/every_controller_spec.rb
index 4785ee9ed8f..b1519c4ef1e 100644
--- a/spec/controllers/every_controller_spec.rb
+++ b/spec/controllers/every_controller_spec.rb
@@ -17,20 +17,20 @@ RSpec.describe "Every controller" do
.compact
.select { |route| route[:controller].present? && route[:action].present? }
.map { |route| [constantize_controller(route[:controller]), route[:action]] }
- .reject { |route| route.first.nil? || !route.first.include?(ControllerWithFeatureCategory) }
+ .select { |(controller, action)| controller&.include?(ControllerWithFeatureCategory) }
+ .reject { |(controller, action)| controller == ApplicationController || controller == Devise::UnlocksController }
end
let_it_be(:routes_without_category) do
controller_actions.map do |controller, action|
- "#{controller}##{action}" unless controller.feature_category_for_action(action)
+ next if controller.feature_category_for_action(action)
+
+ "#{controller}##{action}"
end.compact
end
it "has feature categories" do
- pending("We'll work on defining categories for all controllers: "\
- "https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/463")
-
- expect(routes_without_category).to be_empty, "#{routes_without_category.first(10)} did not have a category"
+ expect(routes_without_category).to be_empty, "#{routes_without_category} did not have a category"
end
it "completed controllers don't get new routes without categories" do
@@ -74,9 +74,9 @@ RSpec.describe "Every controller" do
end
def actions_defined_in_feature_category_config(controller)
- feature_category_configs = controller.send(:class_attributes)[:feature_category_config]
- feature_category_configs.map do |config|
- Array(config.send(:only)) + Array(config.send(:except))
- end.flatten.uniq.map(&:to_s)
+ controller.send(:class_attributes)[:feature_category_config]
+ .values
+ .flatten
+ .map(&:to_s)
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index 405f1eae482..e4aea688a69 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -98,6 +98,12 @@ RSpec.describe GraphqlController do
expect(assigns(:context)[:is_sessionless_user]).to be false
end
end
+
+ it 'includes request object in context' do
+ post :execute
+
+ expect(assigns(:context)[:request]).to eq request
+ end
end
describe 'Admin Mode' do
@@ -150,9 +156,11 @@ RSpec.describe GraphqlController do
describe '#append_info_to_payload' do
let(:graphql_query) { graphql_query_for('project', { 'fullPath' => 'foo' }, %w(id name)) }
+ let(:mock_store) { { graphql_logs: { foo: :bar } } }
let(:log_payload) { {} }
before do
+ allow(RequestStore).to receive(:store).and_return(mock_store)
allow(controller).to receive(:append_info_to_payload).and_wrap_original do |method, *|
method.call(log_payload)
end
@@ -162,7 +170,7 @@ RSpec.describe GraphqlController do
post :execute, params: { query: graphql_query, operationName: 'Foo' }
expect(controller).to have_received(:append_info_to_payload)
- expect(log_payload.dig(:metadata, :graphql, :operation_name)).to eq('Foo')
+ expect(log_payload.dig(:metadata, :graphql)).to eq({ operation_name: 'Foo', foo: :bar })
end
end
end
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 81d5bc7770f..140b7b0f2a8 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -271,6 +271,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster).to be_kubernetes
expect(cluster.provider_gcp).to be_legacy_abac
expect(cluster).to be_managed
+ expect(cluster).to be_namespace_per_environment
end
context 'when legacy_abac param is false' do
@@ -358,6 +359,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_managed
+ expect(cluster).to be_namespace_per_environment
end
end
@@ -387,6 +389,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_platform_kubernetes_rbac
+ expect(cluster).to be_namespace_per_environment
end
end
@@ -716,6 +719,7 @@ RSpec.describe Groups::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
domain: domain
}
}
@@ -729,6 +733,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
end
end
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index 07299382230..c411d9cfb63 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -15,6 +15,21 @@ RSpec.describe Groups::GroupLinksController do
shared_with_group.add_developer(group_member)
end
+ shared_examples 'placeholder is passed as `id` parameter' do |action|
+ it 'returns a 404' do
+ post(
+ action,
+ params: {
+ group_id: shared_group,
+ id: ':id'
+ },
+ format: :json
+ )
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
describe '#create' do
let(:shared_with_group_id) { shared_with_group.id }
let(:shared_group_access) { GroupGroupLink.default_access }
@@ -125,6 +140,8 @@ RSpec.describe Groups::GroupLinksController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ include_examples 'placeholder is passed as `id` parameter', :create
end
describe '#update' do
@@ -136,10 +153,15 @@ RSpec.describe Groups::GroupLinksController do
let(:expiry_date) { 1.month.from_now.to_date }
subject do
- post(:update, params: { group_id: shared_group,
- id: link.id,
- group_link: { group_access: Gitlab::Access::GUEST,
- expires_at: expiry_date } })
+ post(
+ :update,
+ params: {
+ group_id: shared_group,
+ id: link.id,
+ group_link: { group_access: Gitlab::Access::GUEST, expires_at: expiry_date }
+ },
+ format: :json
+ )
end
context 'when user has admin access to the shared group' do
@@ -160,6 +182,26 @@ RSpec.describe Groups::GroupLinksController do
expect(link.expires_at).to eq(expiry_date)
end
+ context 'when `expires_at` is set' do
+ it 'returns correct json response' do
+ travel_to Time.now.utc.beginning_of_day
+
+ subject
+
+ expect(json_response).to eq({ "expires_in" => "about 1 month", "expires_soon" => false })
+ end
+ end
+
+ context 'when `expires_at` is not set' do
+ let(:expiry_date) { nil }
+
+ it 'returns empty json response' do
+ subject
+
+ expect(json_response).to be_empty
+ end
+ end
+
it 'updates project permissions' do
expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
end
@@ -172,6 +214,8 @@ RSpec.describe Groups::GroupLinksController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ include_examples 'placeholder is passed as `id` parameter', :update
end
describe '#destroy' do
@@ -207,5 +251,7 @@ RSpec.describe Groups::GroupLinksController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ include_examples 'placeholder is passed as `id` parameter', :destroy
end
end
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index 4b9dd3629f1..5425a437c80 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -233,6 +233,42 @@ RSpec.describe Groups::GroupMembersController do
end
end
end
+
+ context 'expiration date' do
+ let(:expiry_date) { 1.month.from_now.to_date }
+
+ before do
+ travel_to Time.now.utc.beginning_of_day
+
+ put(
+ :update,
+ params: {
+ group_member: { expires_at: expiry_date },
+ group_id: group,
+ id: requester
+ },
+ format: :json
+ )
+ end
+
+ context 'when `expires_at` is set' do
+ it 'returns correct json response' do
+ expect(json_response).to eq({
+ "expires_in" => "about 1 month",
+ "expires_soon" => false,
+ "expires_at_formatted" => expiry_date.to_time.in_time_zone.to_s(:medium)
+ })
+ end
+ end
+
+ context 'when `expires_at` is not set' do
+ let(:expiry_date) { nil }
+
+ it 'returns empty json response' do
+ expect(json_response).to be_empty
+ end
+ end
+ end
end
describe 'DELETE destroy' do
@@ -441,7 +477,7 @@ RSpec.describe Groups::GroupMembersController do
group_id: group,
id: membership
},
- format: :js
+ format: :json
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index 20ee19b01d1..33041f1af9f 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe Groups::LabelsController do
before do
group.add_owner(user)
+ # by default FFs are enabled in specs so we turn it off
+ stub_feature_flags(show_inherited_labels: false)
sign_in(user)
end
@@ -32,11 +34,41 @@ RSpec.describe Groups::LabelsController do
subgroup.add_owner(user)
end
- it 'returns ancestor group labels' do
- get :index, params: { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true }, format: :json
+ RSpec.shared_examples 'returns ancestor group labels' do
+ it 'returns ancestor group labels' do
+ get :index, params: params, format: :json
- label_ids = json_response.map {|label| label['title']}
- expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
+ label_ids = json_response.map {|label| label['title']}
+ expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
+ end
+ end
+
+ context 'when include_ancestor_groups true' do
+ let(:params) { { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true } }
+
+ it_behaves_like 'returns ancestor group labels'
+ end
+
+ context 'when include_ancestor_groups false' do
+ let(:params) { { group_id: subgroup, only_group_labels: true } }
+
+ it 'does not return ancestor group labels', :aggregate_failures do
+ get :index, params: params, format: :json
+
+ label_ids = json_response.map {|label| label['title']}
+ expect(label_ids).to match_array([subgroup_label_1.title])
+ expect(label_ids).not_to include([group_label_1.title])
+ end
+ end
+
+ context 'when show_inherited_labels enabled' do
+ let(:params) { { group_id: subgroup } }
+
+ before do
+ stub_feature_flags(show_inherited_labels: true)
+ end
+
+ it_behaves_like 'returns ancestor group labels'
end
end
@@ -56,4 +88,43 @@ RSpec.describe Groups::LabelsController do
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ describe 'DELETE #destroy' do
+ context 'when current user has ability to destroy the label' do
+ before do
+ sign_in(user)
+ end
+
+ it 'removes the label' do
+ label = create(:group_label, group: group)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect { label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when label is succesfuly destroyed' do
+ it 'redirects to the group labels page' do
+ label = create(:group_label, group: group)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect(response).to redirect_to(group_labels_path)
+ end
+ end
+ end
+
+ context 'when current_user does not have ability to destroy the label' do
+ let(:another_user) { create(:user) }
+
+ before do
+ sign_in(another_user)
+ end
+
+ it 'responds with status 404' do
+ label = create(:group_label, group: group)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 5c7b88a218a..2c85fe482e2 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Groups::MilestonesController do
let(:user) { create(:user) }
let(:title) { '肯定不是中文的问题' }
let(:milestone) { create(:milestone, project: project) }
- let(:milestone_path) { group_milestone_path(group, milestone.safe_title, title: milestone.title) }
let(:milestone_params) do
{
@@ -25,6 +24,12 @@ RSpec.describe Groups::MilestonesController do
project.add_maintainer(user)
end
+ it_behaves_like 'milestone tabs' do
+ let(:milestone) { create(:milestone, group: group) }
+ let(:milestone_path) { group_milestone_path(group, milestone.iid) }
+ let(:request_params) { { group_id: group, id: milestone.iid } }
+ end
+
describe '#index' do
describe 'as HTML' do
render_views
diff --git a/spec/controllers/groups/registry/repositories_controller_spec.rb b/spec/controllers/groups/registry/repositories_controller_spec.rb
index ddac8fc5002..ae982b02a4f 100644
--- a/spec/controllers/groups/registry/repositories_controller_spec.rb
+++ b/spec/controllers/groups/registry/repositories_controller_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Groups::Registry::RepositoriesController do
it_behaves_like 'with name parameter'
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
context 'with project in subgroup' do
let_it_be(:test_group) { create(:group, parent: group ) }
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index f11bb66caab..880d5fe8951 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -225,4 +225,25 @@ RSpec.describe Groups::Settings::CiCdController do
end
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', group_id: group }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', group_id: group }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 35d8c0b7c6d..df7e018b35e 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -2,18 +2,18 @@
require 'spec_helper'
-RSpec.describe GroupsController do
+RSpec.describe GroupsController, factory_default: :keep do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
- let(:group) { create(:group, :public) }
- let(:project) { create(:project, namespace: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let!(:owner) { group.add_owner(create(:user)).user }
- let!(:maintainer) { group.add_maintainer(create(:user)).user }
- let!(:developer) { group.add_developer(create(:user)).user }
- let!(:guest) { group.add_guest(create(:user)).user }
+ let_it_be_with_refind(:group) { create_default(:group, :public) }
+ let_it_be_with_refind(:project) { create(:project, namespace: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:group_member) { create(:group_member, group: group, user: user) }
+ let_it_be(:owner) { group.add_owner(create(:user)).user }
+ let_it_be(:maintainer) { group.add_maintainer(create(:user)).user }
+ let_it_be(:developer) { group.add_developer(create(:user)).user }
+ let_it_be(:guest) { group.add_guest(create(:user)).user }
shared_examples 'member with ability to create subgroups' do
it 'renders the new page' do
@@ -57,7 +57,6 @@ RSpec.describe GroupsController do
describe 'GET #show' do
before do
sign_in(user)
- project
end
let(:format) { :html }
@@ -82,7 +81,6 @@ RSpec.describe GroupsController do
describe 'GET #details' do
before do
sign_in(user)
- project
end
let(:format) { :html }
@@ -131,12 +129,9 @@ RSpec.describe GroupsController do
end
describe 'GET #activity' do
- render_views
-
context 'as json' do
before do
sign_in(user)
- project
end
it 'includes events from all projects in group and subgroups', :sidekiq_might_not_need_inline do
@@ -157,10 +152,6 @@ RSpec.describe GroupsController do
end
context 'when user has no permission to see the event' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
-
let(:project_with_restricted_access) do
create(:project, :public, issues_access_level: ProjectFeature::PRIVATE, group: group)
end
@@ -398,8 +389,8 @@ RSpec.describe GroupsController do
end
describe 'GET #issues', :sidekiq_might_not_need_inline do
- let(:issue_1) { create(:issue, project: project, title: 'foo') }
- let(:issue_2) { create(:issue, project: project, title: 'bar') }
+ let_it_be(:issue_1) { create(:issue, project: project, title: 'foo') }
+ let_it_be(:issue_2) { create(:issue, project: project, title: 'bar') }
before do
create_list(:award_emoji, 3, awardable: issue_2)
@@ -409,6 +400,15 @@ RSpec.describe GroupsController do
sign_in(user)
end
+ it 'lists only incidents and issues' do
+ incident = create(:incident, project: project)
+ create(:quality_test_case, project: project)
+
+ get :issues, params: { id: group.to_param }
+
+ expect(assigns(:issues)).to match_array([issue_1, issue_2, incident])
+ end
+
context 'sorting by votes' do
it 'sorts most popular issues' do
get :issues, params: { id: group.to_param, sort: 'upvotes_desc' }
@@ -551,9 +551,38 @@ RSpec.describe GroupsController do
end
end
- context 'when there is a conflicting group path' do
- render_views
+ context "updating default_branch_name" do
+ let(:example_branch_name) { "example_branch_name" }
+
+ subject(:update_action) do
+ put :update,
+ params: {
+ id: group.to_param,
+ group: { default_branch_name: example_branch_name }
+ }
+ end
+
+ it "updates the attribute" do
+ expect { subject }
+ .to change { group.namespace_settings.reload.default_branch_name }
+ .from(nil)
+ .to(example_branch_name)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ context "to empty string" do
+ let(:example_branch_name) { '' }
+
+ it "does not update the attribute" do
+ subject
+
+ expect(group.namespace_settings.reload.default_branch_name).not_to eq('')
+ end
+ end
+ end
+
+ context 'when there is a conflicting group path' do
let!(:conflict_group) { create(:group, path: SecureRandom.hex(12) ) }
let!(:old_name) { group.name }
@@ -794,6 +823,7 @@ RSpec.describe GroupsController do
context 'when transferring to a subgroup goes right' do
let(:new_parent_group) { create(:group, :public) }
+ let(:group) { create(:group, :public) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let!(:new_parent_group_member) { create(:group_member, :owner, group: new_parent_group, user: user) }
@@ -805,11 +835,8 @@ RSpec.describe GroupsController do
}
end
- it 'returns a notice' do
+ it 'returns a notice and redirects to the new path' do
expect(flash[:notice]).to eq("Group '#{group.name}' was successfully transferred.")
- end
-
- it 'redirects to the new path' do
expect(response).to redirect_to("/#{new_parent_group.path}/#{group.path}")
end
end
@@ -826,17 +853,15 @@ RSpec.describe GroupsController do
}
end
- it 'returns a notice' do
+ it 'returns a notice and redirects to the new path' do
expect(flash[:notice]).to eq("Group '#{group.name}' was successfully transferred.")
- end
-
- it 'redirects to the new path' do
expect(response).to redirect_to("/#{group.path}")
end
end
context 'When the transfer goes wrong' do
let(:new_parent_group) { create(:group, :public) }
+ let(:group) { create(:group, :public) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let!(:new_parent_group_member) { create(:group_member, :owner, group: new_parent_group, user: user) }
@@ -850,17 +875,15 @@ RSpec.describe GroupsController do
}
end
- it 'returns an alert' do
+ it 'returns an alert and redirects to the current path' do
expect(flash[:alert]).to eq "Transfer failed: namespace directory cannot be moved"
- end
-
- it 'redirects to the current path' do
expect(response).to redirect_to(edit_group_path(group))
end
end
context 'when the user is not allowed to transfer the group' do
let(:new_parent_group) { create(:group, :public) }
+ let(:group) { create(:group, :public) }
let!(:group_member) { create(:group_member, :guest, group: group, user: user) }
let!(:new_parent_group_member) { create(:group_member, :guest, group: new_parent_group, user: user) }
@@ -879,6 +902,7 @@ RSpec.describe GroupsController do
context 'transferring when a project has container images' do
let(:group) { create(:group, :public, :nested) }
+ let(:project) { create(:project, namespace: group) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
before do
@@ -979,6 +1003,8 @@ RSpec.describe GroupsController do
end
context 'when there is no file available to download' do
+ let(:admin) { create(:admin) }
+
before do
sign_in(admin)
end
@@ -1149,9 +1175,7 @@ RSpec.describe GroupsController do
describe "GET #activity as JSON" do
include DesignManagementTestHelpers
- render_views
- let(:project) { create(:project, :public, group: group) }
let(:other_project) { create(:project, :public, group: group) }
def get_activity
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 3049396dd0f..9ac42cbc3ec 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe HelpController do
+ include StubVersion
+
let(:user) { create(:user) }
before do
@@ -108,8 +110,56 @@ RSpec.describe HelpController do
end
it 'renders HTML' do
- expect(response).to render_template('show.html.haml')
- expect(response.media_type).to eq 'text/html'
+ aggregate_failures do
+ expect(response).to render_template('show.html.haml')
+ expect(response.media_type).to eq 'text/html'
+ end
+ end
+ end
+
+ context 'when a custom help_page_documentation_url is set' do
+ before do
+ stub_application_setting(help_page_documentation_base_url: documentation_base_url)
+ stub_version(gitlab_version, 'deadbeaf')
+ end
+
+ subject { get :show, params: { path: path }, format: 'html' }
+
+ let(:gitlab_version) { '13.4.0-ee' }
+ let(:documentation_base_url) { 'https://docs.gitlab.com' }
+ let(:path) { 'ssh/README' }
+
+ it 'redirects user to custom documentation url with a specified version' do
+ is_expected.to redirect_to("#{documentation_base_url}/13.4/ee/#{path}.html")
+ end
+
+ context 'when documentation url ends with a slash' do
+ let(:documentation_base_url) { 'https://docs.gitlab.com/' }
+
+ it 'redirects user to custom documentation url without slash duplicates' do
+ is_expected.to redirect_to("https://docs.gitlab.com/13.4/ee/#{path}.html")
+ end
+ end
+
+ context 'when it is a pre-release' do
+ let(:gitlab_version) { '13.4.0-pre' }
+
+ it 'redirects user to custom documentation url without a version' do
+ is_expected.to redirect_to("#{documentation_base_url}/ee/#{path}.html")
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(help_page_documentation_redirect: false)
+ end
+
+ it 'renders HTML' do
+ aggregate_failures do
+ is_expected.to render_template('show.html.haml')
+ expect(response.media_type).to eq 'text/html'
+ end
+ end
end
end
@@ -129,9 +179,12 @@ RSpec.describe HelpController do
path: 'user/img/markdown_logo'
},
format: :png
- expect(response).to be_successful
- expect(response.media_type).to eq 'image/png'
- expect(response.headers['Content-Disposition']).to match(/^inline;/)
+
+ aggregate_failures do
+ expect(response).to be_successful
+ expect(response.media_type).to eq 'image/png'
+ expect(response.headers['Content-Disposition']).to match(/^inline;/)
+ end
end
end
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
new file mode 100644
index 00000000000..f3850ff844e
--- /dev/null
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::BulkImportsController do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when user is signed in' do
+ context 'when bulk_import feature flag is enabled' do
+ before do
+ stub_feature_flags(bulk_import: true)
+ end
+
+ describe 'POST configure' do
+ context 'when no params are passed in' do
+ it 'clears out existing session' do
+ post :configure
+
+ expect(session[:bulk_import_gitlab_access_token]).to be_nil
+ expect(session[:bulk_import_gitlab_url]).to be_nil
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(status_import_bulk_import_url)
+ end
+ end
+
+ it 'sets the session variables' do
+ token = 'token'
+ url = 'https://gitlab.example'
+
+ post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url }
+
+ expect(session[:bulk_import_gitlab_access_token]).to eq(token)
+ expect(session[:bulk_import_gitlab_url]).to eq(url)
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(status_import_bulk_import_url)
+ end
+
+ it 'strips access token with spaces' do
+ token = 'token'
+
+ post :configure, params: { bulk_import_gitlab_access_token: " #{token} " }
+
+ expect(session[:bulk_import_gitlab_access_token]).to eq(token)
+ expect(controller).to redirect_to(status_import_bulk_import_url)
+ end
+ end
+
+ describe 'GET status' do
+ let(:client) { Gitlab::BulkImport::Client.new(uri: 'http://gitlab.example', token: 'token') }
+
+ describe 'serialized group data' do
+ let(:client_response) do
+ [
+ { 'id' => 1, 'full_name' => 'group1', 'full_path' => 'full/path/group1' },
+ { 'id' => 2, 'full_name' => 'group2', 'full_path' => 'full/path/group2' }
+ ]
+ end
+
+ before do
+ allow(controller).to receive(:client).and_return(client)
+ allow(client).to receive(:get).with('groups', top_level_only: true).and_return(client_response)
+ end
+
+ it 'returns serialized group data' do
+ get :status, format: :json
+
+ expect(response.parsed_body).to eq({ importable_data: client_response }.as_json)
+ end
+ end
+
+ context 'when host url is local or not http' do
+ %w[https://localhost:3000 http://192.168.0.1 ftp://testing].each do |url|
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ session[:bulk_import_gitlab_access_token] = 'test'
+ session[:bulk_import_gitlab_url] = url
+ end
+
+ it 'denies network request' do
+ get :status
+
+ expect(controller).to redirect_to(new_group_path)
+ expect(flash[:alert]).to eq('Specified URL cannot be used: "Only allowed schemes are http, https"')
+ end
+ end
+
+ context 'when local requests are allowed' do
+ %w[https://localhost:3000 http://192.168.0.1].each do |url|
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+
+ session[:bulk_import_gitlab_access_token] = 'test'
+ session[:bulk_import_gitlab_url] = url
+ end
+
+ it 'allows network request' do
+ get :status
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+ end
+
+ context 'when connection error occurs' do
+ before do
+ allow(controller).to receive(:client).and_return(client)
+ allow(client).to receive(:get).and_raise(Gitlab::BulkImport::Client::ConnectionError)
+ end
+
+ it 'returns 422' do
+ get :status, format: :json
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+
+ it 'clears session' do
+ get :status, format: :json
+
+ expect(session[:gitlab_url]).to be_nil
+ expect(session[:gitlab_access_token]).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when gitlab_api_imports feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_import: false)
+ end
+
+ context 'POST configure' do
+ it 'returns 404' do
+ post :configure
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'GET status' do
+ it 'returns 404' do
+ get :status
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ context 'when user is signed out' do
+ before do
+ sign_out(user)
+ end
+
+ context 'POST configure' do
+ it 'redirects to sign in page' do
+ post :configure
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'GET status' do
+ it 'redirects to sign in page' do
+ get :status
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/import/manifest_controller_spec.rb b/spec/controllers/import/manifest_controller_spec.rb
index ec8bd45b65c..6b21b45e698 100644
--- a/spec/controllers/import/manifest_controller_spec.rb
+++ b/spec/controllers/import/manifest_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Import::ManifestController do
+RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state do
include ImportSpecHelper
let_it_be(:user) { create(:user) }
@@ -16,42 +16,93 @@ RSpec.describe Import::ManifestController do
sign_in(user)
end
- def assign_session_group
- session[:manifest_import_repositories] = []
- session[:manifest_import_group_id] = group.id
+ describe 'POST upload' do
+ context 'with a valid manifest' do
+ it 'saves the manifest and redirects to the status page', :aggregate_failures do
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
+ }
+
+ metadata = Gitlab::ManifestImport::Metadata.new(user)
+
+ expect(metadata.group_id).to eq(group.id)
+ expect(metadata.repositories.size).to eq(660)
+ expect(metadata.repositories.first).to include(name: 'platform/build', path: 'build/make')
+
+ expect(response).to redirect_to(status_import_manifest_path)
+ end
+ end
+
+ context 'with an invalid manifest' do
+ it 'displays an error' do
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/invalid_manifest.xml')
+ }
+
+ expect(assigns(:errors)).to be_present
+ end
+ end
+
+ context 'when the user cannot create projects in the group' do
+ it 'displays an error' do
+ sign_in(create(:user))
+
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
+ }
+
+ expect(assigns(:errors)).to be_present
+ end
+ end
end
describe 'GET status' do
- let(:repo1) { OpenStruct.new(id: 'test1', url: 'http://demo.host/test1') }
- let(:repo2) { OpenStruct.new(id: 'test2', url: 'http://demo.host/test2') }
+ let(:repo1) { { id: 'test1', url: 'http://demo.host/test1' } }
+ let(:repo2) { { id: 'test2', url: 'http://demo.host/test2' } }
let(:repos) { [repo1, repo2] }
- before do
- assign_session_group
+ shared_examples 'status action' do
+ it "returns variables for json request" do
+ project = create(:project, import_type: 'manifest', creator_id: user.id)
- session[:manifest_import_repositories] = repos
- end
+ get :status, format: :json
- it "returns variables for json request" do
- project = create(:project, import_type: 'manifest', creator_id: user.id)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
+ expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1[:id])
+ expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2[:id])
+ expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
+ end
- get :status, format: :json
+ it "does not show already added project" do
+ project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1[:url])
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
- expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1.id)
- expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2.id)
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
+ get :status, format: :json
+
+ expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
+ expect(json_response.dig("provider_repos").length).to eq(1)
+ expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1[:id])
+ end
end
- it "does not show already added project" do
- project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1.url)
+ context 'when the data is stored via Gitlab::ManifestImport::Metadata' do
+ before do
+ Gitlab::ManifestImport::Metadata.new(user).save(repos, group.id)
+ end
+
+ include_examples 'status action'
+ end
- get :status, format: :json
+ context 'when the data is stored in the user session' do
+ before do
+ session[:manifest_import_repositories] = repos
+ session[:manifest_import_group_id] = group.id
+ end
- expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
- expect(json_response.dig("provider_repos").length).to eq(1)
- expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1.id)
+ include_examples 'status action'
end
end
end
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index a083cfac981..75a972d2f95 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -17,8 +17,53 @@ RSpec.describe InvitesController, :snowplow do
}
end
- before do
- controller.instance_variable_set(:@member, member)
+ shared_examples 'invalid token' do
+ context 'when invite token is not valid' do
+ let(:params) { { id: '_bogus_token_' } }
+
+ it 'renders the 404 page' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ shared_examples "tracks the 'accepted' event for the invitation reminders experiment" do
+ before do
+ stub_experiment(invitation_reminders: true)
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).with(:invitation_reminders, member.invite_email).and_return(experimental_group)
+ end
+
+ context 'when in the control group' do
+ let(:experimental_group) { false }
+
+ it "tracks the 'accepted' event" do
+ request
+
+ expect_snowplow_event(
+ category: 'Growth::Acquisition::Experiment::InvitationReminders',
+ label: md5_member_global_id,
+ property: 'control_group',
+ action: 'accepted'
+ )
+ end
+ end
+
+ context 'when in the experimental group' do
+ let(:experimental_group) { true }
+
+ it "tracks the 'accepted' event" do
+ request
+
+ expect_snowplow_event(
+ category: 'Growth::Acquisition::Experiment::InvitationReminders',
+ label: md5_member_global_id,
+ property: 'experimental_group',
+ action: 'accepted'
+ )
+ end
+ end
end
describe 'GET #show' do
@@ -39,7 +84,7 @@ RSpec.describe InvitesController, :snowplow do
end
it 'forces re-confirmation if email does not match signed in user' do
- member.invite_email = 'bogus@email.com'
+ member.update!(invite_email: 'bogus@email.com')
expect do
request
@@ -64,8 +109,8 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as experiment group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'opened'))
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'opened'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
@@ -76,10 +121,13 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as control group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'opened'))
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'opened'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
+
+ it_behaves_like "tracks the 'accepted' event for the invitation reminders experiment"
+ it_behaves_like 'invalid token'
end
context 'when not logged in' do
@@ -125,7 +173,7 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as experiment group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
@@ -136,8 +184,31 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as control group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
+
+ it_behaves_like "tracks the 'accepted' event for the invitation reminders experiment"
+ it_behaves_like 'invalid token'
+ end
+
+ describe 'POST #decline for link in UI' do
+ before do
+ sign_in(user)
+ end
+
+ subject(:request) { post :decline, params: params }
+
+ it_behaves_like 'invalid token'
+ end
+
+ describe 'GET #decline for link in email' do
+ before do
+ sign_in(user)
+ end
+
+ subject(:request) { get :decline, params: params }
+
+ it_behaves_like 'invalid token'
end
end
diff --git a/spec/controllers/jira_connect/events_controller_spec.rb b/spec/controllers/jira_connect/events_controller_spec.rb
index d1a2dd6e7af..8a07f69e480 100644
--- a/spec/controllers/jira_connect/events_controller_spec.rb
+++ b/spec/controllers/jira_connect/events_controller_spec.rb
@@ -4,14 +4,20 @@ require 'spec_helper'
RSpec.describe JiraConnect::EventsController do
describe '#installed' do
- subject do
- post :installed, params: {
- clientKey: '1234',
- sharedSecret: 'secret',
+ let(:client_key) { '1234' }
+ let(:shared_secret) { 'secret' }
+ let(:params) do
+ {
+ clientKey: client_key,
+ sharedSecret: shared_secret,
baseUrl: 'https://test.atlassian.net'
}
end
+ subject do
+ post :installed, params: params
+ end
+
it 'saves the jira installation data' do
expect { subject }.to change { JiraConnectInstallation.count }.by(1)
end
@@ -19,15 +25,15 @@ RSpec.describe JiraConnect::EventsController do
it 'saves the correct values' do
subject
- installation = JiraConnectInstallation.find_by_client_key('1234')
+ installation = JiraConnectInstallation.find_by_client_key(client_key)
- expect(installation.shared_secret).to eq('secret')
+ expect(installation.shared_secret).to eq(shared_secret)
expect(installation.base_url).to eq('https://test.atlassian.net')
end
context 'client key already exists' do
it 'returns 422' do
- create(:jira_connect_installation, client_key: '1234')
+ create(:jira_connect_installation, client_key: client_key)
subject
@@ -35,6 +41,23 @@ RSpec.describe JiraConnect::EventsController do
end
end
+ context 'when it is a version update and shared_secret is not sent' do
+ let(:params) do
+ {
+ clientKey: client_key,
+ baseUrl: 'https://test.atlassian.net'
+ }
+ end
+
+ it 'validates the JWT token in authorization header and returns 200 without creating a new installation' do
+ create(:jira_connect_installation, client_key: client_key, shared_secret: shared_secret)
+ request.headers["Authorization"] = "Bearer #{Atlassian::Jwt.encode({ iss: client_key }, shared_secret)}"
+
+ expect { subject }.not_to change { JiraConnectInstallation.count }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
describe '#uninstalled' do
let!(:installation) { create(:jira_connect_installation) }
let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/events/uninstalled', 'POST', 'https://gitlab.test') }
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index e08c92da87f..249e6322d1c 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -101,6 +101,19 @@ RSpec.describe ProfilesController, :request_store do
end
end
+ describe 'GET audit_log' do
+ it 'tracks search event', :snowplow do
+ sign_in(user)
+
+ get :audit_log
+
+ expect_snowplow_event(
+ category: 'ProfilesController',
+ action: 'search_audit_event'
+ )
+ end
+ end
+
describe 'PUT update_username' do
let(:namespace) { user.namespace }
let(:gitlab_shell) { Gitlab::Shell.new }
diff --git a/spec/controllers/projects/alert_management_controller_spec.rb b/spec/controllers/projects/alert_management_controller_spec.rb
index 6a1952f949b..d80147b5c59 100644
--- a/spec/controllers/projects/alert_management_controller_spec.rb
+++ b/spec/controllers/projects/alert_management_controller_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Projects::AlertManagementController do
let(:role) { :reporter }
it 'shows 404' do
- get :index, params: { namespace_id: project.namespace, project_id: project }
+ get :details, params: { namespace_id: project.namespace, project_id: project, id: id }
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index b998dee09b2..a56425c2a22 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -349,7 +349,7 @@ RSpec.describe Projects::BlobController do
end
it_behaves_like 'tracking unique hll events', :track_editor_edit_actions do
- subject { put :update, params: default_params, format: format }
+ subject(:request) { put :update, params: default_params }
let(:target_id) { 'g_edit_by_sfe' }
let(:expected_type) { instance_of(Integer) }
@@ -465,7 +465,7 @@ RSpec.describe Projects::BlobController do
end
it_behaves_like 'tracking unique hll events', :track_editor_edit_actions do
- subject { post :create, params: default_params, format: format }
+ subject(:request) { post :create, params: default_params }
let(:target_id) { 'g_edit_by_sfe' }
let(:expected_type) { instance_of(Integer) }
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 51a451570c5..52cd6869b04 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -251,6 +251,7 @@ RSpec.describe Projects::ClustersController do
cluster: {
name: 'new-cluster',
managed: '1',
+ namespace_per_environment: '0',
provider_gcp_attributes: {
gcp_project_id: 'gcp-project-12345',
legacy_abac: legacy_abac_param
@@ -278,6 +279,7 @@ RSpec.describe Projects::ClustersController do
expect(project.clusters.first).to be_kubernetes
expect(project.clusters.first.provider_gcp).to be_legacy_abac
expect(project.clusters.first.managed?).to be_truthy
+ expect(project.clusters.first.namespace_per_environment?).to be_falsy
end
context 'when legacy_abac param is false' do
@@ -369,6 +371,7 @@ RSpec.describe Projects::ClustersController do
expect(project.clusters.first).to be_user
expect(project.clusters.first).to be_kubernetes
+ expect(project.clusters.first).to be_namespace_per_environment
end
end
@@ -400,6 +403,7 @@ RSpec.describe Projects::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_platform_kubernetes_rbac
+ expect(cluster).to be_namespace_per_environment
end
end
@@ -726,6 +730,7 @@ RSpec.describe Projects::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: {
namespace: 'my-namespace'
}
@@ -742,6 +747,7 @@ RSpec.describe Projects::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
expect(cluster.platform_kubernetes.namespace).to eq('my-namespace')
end
diff --git a/spec/controllers/projects/feature_flags_clients_controller_spec.rb b/spec/controllers/projects/feature_flags_clients_controller_spec.rb
new file mode 100644
index 00000000000..f527d2ba430
--- /dev/null
+++ b/spec/controllers/projects/feature_flags_clients_controller_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::FeatureFlagsClientsController do
+ include Gitlab::Routing
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe 'POST reset_token.json' do
+ subject(:reset_token) do
+ post :reset_token,
+ params: { namespace_id: project.namespace, project_id: project },
+ format: :json
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when user is a project maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'and feature flags client exist' do
+ it 'regenerates feature flags client token' do
+ project.create_operations_feature_flags_client!
+ expect { reset_token }.to change { project.reload.feature_flags_client_token }
+
+ expect(json_response['token']).to eq(project.feature_flags_client_token)
+ end
+ end
+
+ context 'but feature flags client does not exist' do
+ it 'returns 404' do
+ reset_token
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when user is not a project maintainer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns 404' do
+ reset_token
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/feature_flags_controller_spec.rb b/spec/controllers/projects/feature_flags_controller_spec.rb
new file mode 100644
index 00000000000..96eeb6f239f
--- /dev/null
+++ b/spec/controllers/projects/feature_flags_controller_spec.rb
@@ -0,0 +1,1604 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::FeatureFlagsController do
+ include Gitlab::Routing
+ include FeatureFlagHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let(:user) { developer }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET index' do
+ render_views
+
+ subject { get(:index, params: view_params) }
+
+ context 'when there is no feature flags' do
+ it 'responds with success' do
+ is_expected.to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'for a list of feature flags' do
+ let!(:feature_flags) { create_list(:operations_feature_flag, 50, project: project) }
+
+ it 'responds with success' do
+ is_expected.to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when the user is a reporter' do
+ let(:user) { reporter }
+
+ it 'responds with not found' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET #index.json' do
+ subject { get(:index, params: view_params, format: :json) }
+
+ let!(:feature_flag_active) do
+ create(:operations_feature_flag, project: project, active: true, name: 'feature_flag_a')
+ end
+
+ let!(:feature_flag_inactive) do
+ create(:operations_feature_flag, project: project, active: false, name: 'feature_flag_b')
+ end
+
+ it 'returns all feature flags as json response' do
+ subject
+
+ expect(json_response['feature_flags'].count).to eq(2)
+ expect(json_response['feature_flags'].first['name']).to eq(feature_flag_active.name)
+ expect(json_response['feature_flags'].second['name']).to eq(feature_flag_inactive.name)
+ end
+
+ it 'returns CRUD paths' do
+ subject
+
+ expected_edit_path = edit_project_feature_flag_path(project, feature_flag_active)
+ expected_update_path = project_feature_flag_path(project, feature_flag_active)
+ expected_destroy_path = project_feature_flag_path(project, feature_flag_active)
+
+ feature_flag_json = json_response['feature_flags'].first
+
+ expect(feature_flag_json['edit_path']).to eq(expected_edit_path)
+ expect(feature_flag_json['update_path']).to eq(expected_update_path)
+ expect(feature_flag_json['destroy_path']).to eq(expected_destroy_path)
+ end
+
+ it 'returns the summary of feature flags' do
+ subject
+
+ expect(json_response['count']['all']).to eq(2)
+ expect(json_response['count']['enabled']).to eq(1)
+ expect(json_response['count']['disabled']).to eq(1)
+ end
+
+ it 'matches json schema' do
+ is_expected.to match_response_schema('feature_flags')
+ end
+
+ it 'returns false for active when the feature flag is inactive even if it has an active scope' do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag_inactive,
+ environment_scope: 'production',
+ active: true)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ feature_flag_json = json_response['feature_flags'].second
+
+ expect(feature_flag_json['active']).to eq(false)
+ end
+
+ it 'returns the feature flag iid' do
+ subject
+
+ feature_flag_json = json_response['feature_flags'].first
+
+ expect(feature_flag_json['iid']).to eq(feature_flag_active.iid)
+ end
+
+ context 'when scope is specified' do
+ let(:view_params) do
+ { namespace_id: project.namespace, project_id: project, scope: scope }
+ end
+
+ context 'when all feature flags are requested' do
+ let(:scope) { 'all' }
+
+ it 'returns all feature flags' do
+ subject
+
+ expect(json_response['feature_flags'].count).to eq(2)
+ end
+ end
+
+ context 'when enabled feature flags are requested' do
+ let(:scope) { 'enabled' }
+
+ it 'returns enabled feature flags' do
+ subject
+
+ expect(json_response['feature_flags'].count).to eq(1)
+ expect(json_response['feature_flags'].first['active']).to be_truthy
+ end
+ end
+
+ context 'when disabled feature flags are requested' do
+ let(:scope) { 'disabled' }
+
+ it 'returns disabled feature flags' do
+ subject
+
+ expect(json_response['feature_flags'].count).to eq(1)
+ expect(json_response['feature_flags'].first['active']).to be_falsy
+ end
+ end
+ end
+
+ context 'when feature flags have additional scopes' do
+ let!(:feature_flag_active_scope) do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag_active,
+ environment_scope: 'production',
+ active: false)
+ end
+
+ let!(:feature_flag_inactive_scope) do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag_inactive,
+ environment_scope: 'staging',
+ active: false)
+ end
+
+ it 'returns a correct summary' do
+ subject
+
+ expect(json_response['count']['all']).to eq(2)
+ expect(json_response['count']['enabled']).to eq(1)
+ expect(json_response['count']['disabled']).to eq(1)
+ end
+
+ it 'recognizes feature flag 1 as active' do
+ subject
+
+ expect(json_response['feature_flags'].first['active']).to be_truthy
+ end
+
+ it 'recognizes feature flag 2 as inactive' do
+ subject
+
+ expect(json_response['feature_flags'].second['active']).to be_falsy
+ end
+
+ it 'has ordered scopes' do
+ subject
+
+ expect(json_response['feature_flags'][0]['scopes'][0]['id'])
+ .to be < json_response['feature_flags'][0]['scopes'][1]['id']
+ expect(json_response['feature_flags'][1]['scopes'][0]['id'])
+ .to be < json_response['feature_flags'][1]['scopes'][1]['id']
+ end
+
+ it 'does not have N+1 problem' do
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+
+ related_count = recorded.log
+ .count { |query| query.include?('operations_feature_flag') }
+
+ expect(related_count).to be_within(5).of(2)
+ end
+ end
+
+ context 'with version 1 and 2 feature flags' do
+ let!(:new_version_feature_flag) do
+ create(:operations_feature_flag, :new_version_flag, project: project, name: 'feature_flag_c')
+ end
+
+ it 'returns all feature flags as json response' do
+ subject
+
+ expect(json_response['feature_flags'].count).to eq(3)
+ end
+
+ it 'returns only version 1 flags when new version flags are disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expected = [feature_flag_active.name, feature_flag_inactive.name].sort
+ expect(json_response['feature_flags'].map { |f| f['name'] }.sort).to eq(expected)
+ end
+ end
+ end
+
+ describe 'GET new' do
+ render_views
+
+ subject { get(:new, params: view_params) }
+
+ it 'renders the form' do
+ is_expected.to have_gitlab_http_status(:ok)
+ end
+ end
+
+ describe 'GET #show.json' do
+ subject { get(:show, params: params, format: :json) }
+
+ let!(:feature_flag) do
+ create(:operations_feature_flag, project: project)
+ end
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid
+ }
+ end
+
+ it 'returns the feature flag as json response' do
+ subject
+
+ expect(json_response['name']).to eq(feature_flag.name)
+ expect(json_response['active']).to eq(feature_flag.active)
+ expect(json_response['version']).to eq('legacy_flag')
+ end
+
+ it 'matches json schema' do
+ is_expected.to match_response_schema('feature_flag')
+ end
+
+ it 'routes based on iid' do
+ other_project = create(:project)
+ other_project.add_developer(user)
+ other_feature_flag = create(:operations_feature_flag, project: other_project,
+ name: 'other_flag')
+ params = {
+ namespace_id: other_project.namespace,
+ project_id: other_project,
+ iid: other_feature_flag.iid
+ }
+
+ get(:show, params: params, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq(other_feature_flag.name)
+ end
+
+ it 'routes based on iid when new version flags are disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+ other_project = create(:project)
+ other_project.add_developer(user)
+ other_feature_flag = create(:operations_feature_flag, project: other_project,
+ name: 'other_flag')
+ params = {
+ namespace_id: other_project.namespace,
+ project_id: other_project,
+ iid: other_feature_flag.iid
+ }
+
+ get(:show, params: params, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq(other_feature_flag.name)
+ end
+
+ context 'when feature flag is not found' do
+ let!(:feature_flag) { }
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: 1
+ }
+ end
+
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when feature flags have additional scopes' do
+ context 'when there is at least one active scope' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, project: project, active: false)
+ end
+
+ let!(:feature_flag_scope_production) do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'review/*',
+ active: true)
+ end
+
+ it 'returns false for active' do
+ subject
+
+ expect(json_response['active']).to eq(false)
+ end
+ end
+
+ context 'when all scopes are inactive' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, project: project, active: false)
+ end
+
+ let!(:feature_flag_scope_production) do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'production',
+ active: false)
+ end
+
+ it 'recognizes the feature flag as inactive' do
+ subject
+
+ expect(json_response['active']).to be_falsy
+ end
+ end
+ end
+
+ context 'with a version 2 feature flag' do
+ let!(:new_version_feature_flag) do
+ create(:operations_feature_flag, :new_version_flag, project: project)
+ end
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: new_version_feature_flag.iid
+ }
+ end
+
+ it 'returns the feature flag' do
+ subject
+
+ expect(json_response['name']).to eq(new_version_feature_flag.name)
+ expect(json_response['active']).to eq(new_version_feature_flag.active)
+ expect(json_response['version']).to eq('new_version_flag')
+ end
+
+ it 'returns a 404 when new version flags are disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns strategies ordered by id' do
+ first_strategy = create(:operations_strategy, feature_flag: new_version_feature_flag)
+ second_strategy = create(:operations_strategy, feature_flag: new_version_feature_flag)
+
+ subject
+
+ expect(json_response['strategies'].map { |s| s['id'] }).to eq([first_strategy.id, second_strategy.id])
+ end
+ end
+ end
+
+ describe 'POST create.json' do
+ subject { post(:create, params: params, format: :json) }
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true
+ }
+ }
+ end
+
+ it 'returns 200' do
+ is_expected.to have_gitlab_http_status(:ok)
+ end
+
+ it 'creates a new feature flag' do
+ subject
+
+ expect(json_response['name']).to eq('my_feature_flag')
+ expect(json_response['active']).to be_truthy
+ end
+
+ it 'creates a default scope' do
+ subject
+
+ expect(json_response['scopes'].count).to eq(1)
+ expect(json_response['scopes'].first['environment_scope']).to eq('*')
+ expect(json_response['scopes'].first['active']).to be_truthy
+ end
+
+ it 'matches json schema' do
+ is_expected.to match_response_schema('feature_flag')
+ end
+
+ context 'when the same named feature flag has already existed' do
+ before do
+ create(:operations_feature_flag, name: 'my_feature_flag', project: project)
+ end
+
+ it 'returns 400' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['message']).to include('Name has already been taken')
+ end
+ end
+
+ context 'without the active parameter' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag'
+ }
+ }
+ end
+
+ it 'creates a flag with active set to true' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('feature_flag')
+ expect(json_response['active']).to eq(true)
+ expect(Operations::FeatureFlag.last.active).to eq(true)
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when creates additional scope' do
+ let(:params) do
+ view_params.merge({
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ scopes_attributes: [{ environment_scope: '*', active: true },
+ { environment_scope: 'production', active: false }]
+ }
+ })
+ end
+
+ it 'creates feature flag scopes successfully' do
+ expect { subject }.to change { Operations::FeatureFlagScope.count }.by(2)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'creates feature flag scopes in a correct order' do
+ subject
+
+ expect(json_response['scopes'].first['environment_scope']).to eq('*')
+ expect(json_response['scopes'].second['environment_scope']).to eq('production')
+ end
+
+ context 'when default scope is not placed first' do
+ let(:params) do
+ view_params.merge({
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ scopes_attributes: [{ environment_scope: 'production', active: false },
+ { environment_scope: '*', active: true }]
+ }
+ })
+ end
+
+ it 'returns 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message'])
+ .to include('Default scope has to be the first element')
+ end
+ end
+ end
+
+ context 'when creates additional scope with a percentage rollout' do
+ it 'creates a strategy for the scope' do
+ params = view_params.merge({
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ scopes_attributes: [{ environment_scope: '*', active: true },
+ { environment_scope: 'production', active: false,
+ strategies: [{ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '42' } }] }]
+ }
+ })
+
+ post(:create, params: params, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ production_strategies_json = json_response['scopes'].second['strategies']
+ expect(production_strategies_json).to eq([{
+ 'name' => 'gradualRolloutUserId',
+ 'parameters' => { "groupId" => "default", "percentage" => "42" }
+ }])
+ end
+ end
+
+ context 'when creates additional scope with a userWithId strategy' do
+ it 'creates a strategy for the scope' do
+ params = view_params.merge({
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ scopes_attributes: [{ environment_scope: '*', active: true },
+ { environment_scope: 'production', active: false,
+ strategies: [{ name: 'userWithId',
+ parameters: { userIds: '123,4,6722' } }] }]
+ }
+ })
+
+ post(:create, params: params, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ production_strategies_json = json_response['scopes'].second['strategies']
+ expect(production_strategies_json).to eq([{
+ 'name' => 'userWithId',
+ 'parameters' => { "userIds" => "123,4,6722" }
+ }])
+ end
+ end
+
+ context 'when creates an additional scope without a strategy' do
+ it 'creates a default strategy' do
+ params = view_params.merge({
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ scopes_attributes: [{ environment_scope: '*', active: true }]
+ }
+ })
+
+ post(:create, params: params, format: :json)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ default_strategies_json = json_response['scopes'].first['strategies']
+ expect(default_strategies_json).to eq([{ "name" => "default", "parameters" => {} }])
+ end
+ end
+
+ context 'when creating a version 2 feature flag' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'new_version_flag'
+ }
+ }
+ end
+
+ it 'creates a new feature flag' do
+ subject
+
+ expect(json_response['name']).to eq('my_feature_flag')
+ expect(json_response['active']).to be_truthy
+ expect(json_response['version']).to eq('new_version_flag')
+ end
+ end
+
+ context 'when creating a version 2 feature flag with strategies and scopes' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'new_version_flag',
+ strategies_attributes: [{
+ name: 'userWithId',
+ parameters: { userIds: 'user1' },
+ scopes_attributes: [{ environment_scope: '*' }]
+ }]
+ }
+ }
+ end
+
+ it 'creates a new feature flag with the strategies and scopes' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq('my_feature_flag')
+ expect(json_response['active']).to eq(true)
+ expect(json_response['strategies'].count).to eq(1)
+
+ strategy_json = json_response['strategies'].first
+ expect(strategy_json).to have_key('id')
+ expect(strategy_json['name']).to eq('userWithId')
+ expect(strategy_json['parameters']).to eq({ 'userIds' => 'user1' })
+ expect(strategy_json['scopes'].count).to eq(1)
+
+ scope_json = strategy_json['scopes'].first
+ expect(scope_json).to have_key('id')
+ expect(scope_json['environment_scope']).to eq('*')
+ end
+ end
+
+ context 'when creating a version 2 feature flag with a gradualRolloutUserId strategy' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'new_version_flag',
+ strategies_attributes: [{
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '15' },
+ scopes_attributes: [{ environment_scope: 'production' }]
+ }]
+ }
+ }
+ end
+
+ it 'creates the new strategy' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ strategy_json = json_response['strategies'].first
+ expect(strategy_json['name']).to eq('gradualRolloutUserId')
+ expect(strategy_json['parameters']).to eq({ 'groupId' => 'default', 'percentage' => '15' })
+ expect(strategy_json['scopes'].count).to eq(1)
+
+ scope_json = strategy_json['scopes'].first
+ expect(scope_json['environment_scope']).to eq('production')
+ end
+ end
+
+ context 'when creating a version 2 feature flag with a flexibleRollout strategy' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'new_version_flag',
+ strategies_attributes: [{
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '15', stickiness: 'DEFAULT' },
+ scopes_attributes: [{ environment_scope: 'production' }]
+ }]
+ }
+ }
+ end
+
+ it 'creates the new strategy' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ strategy_json = json_response['strategies'].first
+ expect(strategy_json['name']).to eq('flexibleRollout')
+ expect(strategy_json['parameters']).to eq({ 'groupId' => 'default', 'rollout' => '15', 'stickiness' => 'DEFAULT' })
+ expect(strategy_json['scopes'].count).to eq(1)
+
+ scope_json = strategy_json['scopes'].first
+ expect(scope_json['environment_scope']).to eq('production')
+ end
+ end
+
+ context 'when creating a version 2 feature flag with a gitlabUserList strategy' do
+ let!(:user_list) do
+ create(:operations_feature_flag_user_list, project: project,
+ name: 'My List', user_xids: 'user1,user2')
+ end
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'new_version_flag',
+ strategies_attributes: [{
+ name: 'gitlabUserList',
+ parameters: {},
+ user_list_id: user_list.id,
+ scopes_attributes: [{ environment_scope: 'production' }]
+ }]
+ }
+ }
+ end
+
+ it 'creates the new strategy' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to match([a_hash_including({
+ 'name' => 'gitlabUserList',
+ 'parameters' => {},
+ 'user_list' => {
+ 'id' => user_list.id,
+ 'iid' => user_list.iid,
+ 'name' => 'My List',
+ 'user_xids' => 'user1,user2'
+ },
+ 'scopes' => [a_hash_including({
+ 'environment_scope' => 'production'
+ })]
+ })])
+ end
+ end
+
+ context 'when version parameter is invalid' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'bad_version'
+ }
+ }
+ end
+
+ it 'returns a 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'message' => 'Version is invalid' })
+ expect(Operations::FeatureFlag.count).to eq(0)
+ end
+ end
+
+ context 'when version 2 flags are disabled' do
+ context 'and attempting to create a version 2 flag' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true,
+ version: 'new_version_flag'
+ }
+ }
+ end
+
+ it 'returns a 400' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(Operations::FeatureFlag.count).to eq(0)
+ end
+ end
+
+ context 'and attempting to create a version 1 flag' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ operations_feature_flag: {
+ name: 'my_feature_flag',
+ active: true
+ }
+ }
+ end
+
+ it 'creates the flag' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(Operations::FeatureFlag.count).to eq(1)
+ expect(json_response['version']).to eq('legacy_flag')
+ end
+ end
+ end
+ end
+
+ describe 'DELETE destroy.json' do
+ subject { delete(:destroy, params: params, format: :json) }
+
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid
+ }
+ end
+
+ it 'returns 200' do
+ is_expected.to have_gitlab_http_status(:ok)
+ end
+
+ it 'deletes one feature flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
+ end
+
+ it 'destroys the default scope' do
+ expect { subject }.to change { Operations::FeatureFlagScope.count }.by(-1)
+ end
+
+ it 'matches json schema' do
+ is_expected.to match_response_schema('feature_flag')
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when the feature flag does not exist' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: 0
+ }
+ end
+
+ it 'returns not found' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when there is an additional scope' do
+ let!(:scope) { create_scope(feature_flag, 'production', false) }
+
+ it 'destroys the default scope and production scope' do
+ expect { subject }.to change { Operations::FeatureFlagScope.count }.by(-2)
+ end
+ end
+
+ context 'with a version 2 flag' do
+ let!(:new_version_flag) { create(:operations_feature_flag, :new_version_flag, project: project) }
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: new_version_flag.iid
+ }
+ end
+
+ it 'deletes the flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
+ end
+
+ context 'when new version flags are disabled' do
+ it 'returns a 404' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ expect { subject }.not_to change { Operations::FeatureFlag.count }
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ describe 'PUT update.json' do
+ def put_request(feature_flag, feature_flag_params)
+ params = {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: feature_flag_params
+ }
+
+ put(:update, params: params, format: :json, as: :json)
+ end
+
+ before do
+ stub_feature_flags(
+ feature_flags_legacy_read_only: false,
+ feature_flags_legacy_read_only_override: false
+ )
+ end
+
+ subject { put(:update, params: params, format: :json) }
+
+ let!(:feature_flag) do
+ create(:operations_feature_flag,
+ :legacy_flag,
+ name: 'ci_live_trace',
+ active: true,
+ project: project)
+ end
+
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ name: 'ci_new_live_trace'
+ }
+ }
+ end
+
+ it 'returns 200' do
+ is_expected.to have_gitlab_http_status(:ok)
+ end
+
+ it 'updates the name of the feature flag name' do
+ subject
+
+ expect(json_response['name']).to eq('ci_new_live_trace')
+ end
+
+ it 'matches json schema' do
+ is_expected.to match_response_schema('feature_flag')
+ end
+
+ context 'when updates active' do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ active: false
+ }
+ }
+ end
+
+ it 'updates active from true to false' do
+ expect { subject }
+ .to change { feature_flag.reload.active }.from(true).to(false)
+ end
+
+ it "does not change default scope's active" do
+ expect { subject }
+ .not_to change { feature_flag.default_scope.reload.active }.from(true)
+ end
+
+ it 'updates active from false to true when an inactive feature flag has an active scope' do
+ feature_flag = create(:operations_feature_flag, project: project, name: 'my_flag', active: false)
+ create(:operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: 'production', active: true)
+
+ put_request(feature_flag, { active: true })
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('feature_flag')
+ expect(json_response['active']).to eq(true)
+ expect(feature_flag.reload.active).to eq(true)
+ expect(feature_flag.default_scope.reload.active).to eq(false)
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns 404' do
+ is_expected.to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context "when creates an additional scope for production environment" do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [{ environment_scope: 'production', active: false }]
+ }
+ }
+ end
+
+ it 'creates a production scope' do
+ expect { subject }.to change { feature_flag.reload.scopes.count }.by(1)
+
+ expect(json_response['scopes'].last['environment_scope']).to eq('production')
+ expect(json_response['scopes'].last['active']).to be_falsy
+ end
+ end
+
+ context "when creates a default scope" do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [{ environment_scope: '*', active: false }]
+ }
+ }
+ end
+
+ it 'returns 400' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "when updates a default scope's active value" do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [
+ {
+ id: feature_flag.default_scope.id,
+ environment_scope: '*',
+ active: false
+ }
+ ]
+ }
+ }
+ end
+
+ it "updates successfully" do
+ subject
+
+ expect(json_response['scopes'].first['environment_scope']).to eq('*')
+ expect(json_response['scopes'].first['active']).to be_falsy
+ end
+ end
+
+ context "when changes default scope's spec" do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [
+ {
+ id: feature_flag.default_scope.id,
+ environment_scope: 'review/*'
+ }
+ ]
+ }
+ }
+ end
+
+ it 'returns 400' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "when destroys the default scope" do
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [
+ {
+ id: feature_flag.default_scope.id,
+ _destroy: 1
+ }
+ ]
+ }
+ }
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::ReadOnlyRecord)
+ end
+ end
+
+ context "when destroys a production scope" do
+ let!(:production_scope) { create_scope(feature_flag, 'production', true) }
+ let(:params) do
+ {
+ namespace_id: project.namespace,
+ project_id: project,
+ iid: feature_flag.iid,
+ operations_feature_flag: {
+ scopes_attributes: [
+ {
+ id: production_scope.id,
+ _destroy: 1
+ }
+ ]
+ }
+ }
+ end
+
+ it 'destroys successfully' do
+ subject
+
+ scopes = json_response['scopes']
+ expect(scopes.any? { |scope| scope['environment_scope'] == 'production' })
+ .to be_falsy
+ end
+ end
+
+ describe "updating the strategy" do
+ it 'creates a default strategy' do
+ scope = create_scope(feature_flag, 'production', true, [])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: [{ name: 'default', parameters: {} }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([{
+ "name" => "default",
+ "parameters" => {}
+ }])
+ end
+
+ it 'creates a gradualRolloutUserId strategy' do
+ scope = create_scope(feature_flag, 'production', true, [])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: [{ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: "70" } }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => {
+ "groupId" => "default",
+ "percentage" => "70"
+ }
+ }])
+ end
+
+ it 'creates a userWithId strategy' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: [{ name: 'userWithId', parameters: { userIds: 'sam,fred' } }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([{
+ "name" => "userWithId",
+ "parameters" => { "userIds" => "sam,fred" }
+ }])
+ end
+
+ it 'updates an existing strategy' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: [{ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: "50" } }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => {
+ "groupId" => "default",
+ "percentage" => "50"
+ }
+ }])
+ end
+
+ it 'clears an existing strategy' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: []
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([])
+ end
+
+ it 'accepts multiple strategies' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: [
+ { name: 'gradualRolloutUserId', parameters: { groupId: 'mygroup', percentage: '55' } },
+ { name: 'userWithId', parameters: { userIds: 'joe' } }
+ ]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies'].length).to eq(2)
+ expect(scope_json['strategies']).to include({
+ "name" => "gradualRolloutUserId",
+ "parameters" => { "groupId" => "mygroup", "percentage" => "55" }
+ })
+ expect(scope_json['strategies']).to include({
+ "name" => "userWithId",
+ "parameters" => { "userIds" => "joe" }
+ })
+ end
+
+ it 'does not modify strategies when there is no strategies key in the params' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
+
+ put_request(feature_flag, scopes_attributes: [{ id: scope.id }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([{
+ "name" => "default",
+ "parameters" => {}
+ }])
+ end
+
+ it 'leaves an existing strategy when there are no strategies in the params' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '10' } }])
+
+ put_request(feature_flag, scopes_attributes: [{ id: scope.id }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ scope_json = json_response['scopes'].find do |s|
+ s['environment_scope'] == 'production'
+ end
+ expect(scope_json['strategies']).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => { "groupId" => "default", "percentage" => "10" }
+ }])
+ end
+
+ it 'does not accept extra parameters in the strategy params' do
+ scope = create_scope(feature_flag, 'production', true, [{ name: 'default', parameters: {} }])
+
+ put_request(feature_flag, scopes_attributes: [{
+ id: scope.id,
+ strategies: [{ name: 'userWithId', parameters: { userIds: 'joe', groupId: 'default' } }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(["Scopes strategies parameters are invalid"])
+ end
+ end
+
+ context 'when legacy feature flags are set to be read only' do
+ it 'does not update the flag' do
+ stub_feature_flags(feature_flags_legacy_read_only: true)
+
+ put_request(feature_flag, name: 'ci_new_live_trace')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq(["Legacy feature flags are read-only"])
+ end
+
+ it 'updates the flag if the legacy read-only override is enabled for a particular project' do
+ stub_feature_flags(
+ feature_flags_legacy_read_only: true,
+ feature_flags_legacy_read_only_override: project
+ )
+
+ put_request(feature_flag, name: 'ci_new_live_trace')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq('ci_new_live_trace')
+ end
+ end
+
+ context 'with a version 2 feature flag' do
+ let!(:new_version_flag) do
+ create(:operations_feature_flag,
+ :new_version_flag,
+ name: 'new-feature',
+ active: true,
+ project: project)
+ end
+
+ it 'creates a new strategy and scope' do
+ put_request(new_version_flag, strategies_attributes: [{
+ name: 'userWithId',
+ parameters: { userIds: 'user1' },
+ scopes_attributes: [{
+ environment_scope: 'production'
+ }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies'].count).to eq(1)
+ strategy_json = json_response['strategies'].first
+ expect(strategy_json['name']).to eq('userWithId')
+ expect(strategy_json['parameters']).to eq({
+ 'userIds' => 'user1'
+ })
+ expect(strategy_json['scopes'].count).to eq(1)
+ scope_json = strategy_json['scopes'].first
+ expect(scope_json['environment_scope']).to eq('production')
+ end
+
+ it 'creates a gradualRolloutUserId strategy' do
+ put_request(new_version_flag, strategies_attributes: [{
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '30' }
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies'].count).to eq(1)
+ strategy_json = json_response['strategies'].first
+ expect(strategy_json['name']).to eq('gradualRolloutUserId')
+ expect(strategy_json['parameters']).to eq({
+ 'groupId' => 'default',
+ 'percentage' => '30'
+ })
+ expect(strategy_json['scopes']).to eq([])
+ end
+
+ it 'creates a flexibleRollout strategy' do
+ put_request(new_version_flag, strategies_attributes: [{
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '30', stickiness: 'DEFAULT' }
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies'].count).to eq(1)
+ strategy_json = json_response['strategies'].first
+ expect(strategy_json['name']).to eq('flexibleRollout')
+ expect(strategy_json['parameters']).to eq({
+ 'groupId' => 'default',
+ 'rollout' => '30',
+ 'stickiness' => 'DEFAULT'
+ })
+ expect(strategy_json['scopes']).to eq([])
+ end
+
+ it 'creates a gitlabUserList strategy' do
+ user_list = create(:operations_feature_flag_user_list, project: project, name: 'My List', user_xids: 'user1,user2')
+
+ put_request(new_version_flag, strategies_attributes: [{
+ name: 'gitlabUserList',
+ parameters: {},
+ user_list_id: user_list.id
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to match([a_hash_including({
+ 'id' => an_instance_of(Integer),
+ 'name' => 'gitlabUserList',
+ 'parameters' => {},
+ 'user_list' => {
+ 'id' => user_list.id,
+ 'iid' => user_list.iid,
+ 'name' => 'My List',
+ 'user_xids' => 'user1,user2'
+ },
+ 'scopes' => []
+ })])
+ end
+
+ it 'supports switching the associated user list for an existing gitlabUserList strategy' do
+ user_list = create(:operations_feature_flag_user_list, project: project, name: 'My List', user_xids: 'user1,user2')
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'gitlabUserList', parameters: {}, user_list: user_list)
+ other_user_list = create(:operations_feature_flag_user_list, project: project, name: 'Other List', user_xids: 'user3')
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ user_list_id: other_user_list.id
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to eq([{
+ 'id' => strategy.id,
+ 'name' => 'gitlabUserList',
+ 'parameters' => {},
+ 'user_list' => {
+ 'id' => other_user_list.id,
+ 'iid' => other_user_list.iid,
+ 'name' => 'Other List',
+ 'user_xids' => 'user3'
+ },
+ 'scopes' => []
+ }])
+ end
+
+ it 'automatically dissociates the user list when switching the type of an existing gitlabUserList strategy' do
+ user_list = create(:operations_feature_flag_user_list, project: project, name: 'My List', user_xids: 'user1,user2')
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'gitlabUserList', parameters: {}, user_list: user_list)
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ name: 'gradualRolloutUserId',
+ parameters: {
+ groupId: 'default',
+ percentage: '25'
+ }
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to eq([{
+ 'id' => strategy.id,
+ 'name' => 'gradualRolloutUserId',
+ 'parameters' => {
+ 'groupId' => 'default',
+ 'percentage' => '25'
+ },
+ 'scopes' => []
+ }])
+ end
+
+ it 'does not delete a user list when deleting a gitlabUserList strategy' do
+ user_list = create(:operations_feature_flag_user_list, project: project, name: 'My List', user_xids: 'user1,user2')
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'gitlabUserList', parameters: {}, user_list: user_list)
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ _destroy: true
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to eq([])
+ expect(::Operations::FeatureFlags::Strategy.count).to eq(0)
+ expect(::Operations::FeatureFlags::StrategyUserList.count).to eq(0)
+ expect(::Operations::FeatureFlags::UserList.first).to eq(user_list)
+ end
+
+ it 'returns not found when trying to create a gitlabUserList strategy with an invalid user list id' do
+ put_request(new_version_flag, strategies_attributes: [{
+ name: 'gitlabUserList',
+ parameters: {},
+ user_list_id: 1
+ }])
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'updates an existing strategy' do
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'default', parameters: {})
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ name: 'userWithId',
+ parameters: { userIds: 'user2,user3' }
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to eq([{
+ 'id' => strategy.id,
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user2,user3' },
+ 'scopes' => []
+ }])
+ end
+
+ it 'updates an existing scope' do
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'default', parameters: {})
+ scope = create(:operations_scope, strategy: strategy, environment_scope: 'staging')
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ scopes_attributes: [{
+ id: scope.id,
+ environment_scope: 'sandbox'
+ }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies'].first['scopes']).to eq([{
+ 'id' => scope.id,
+ 'environment_scope' => 'sandbox'
+ }])
+ end
+
+ it 'deletes an existing strategy' do
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'default', parameters: {})
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ _destroy: true
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies']).to eq([])
+ end
+
+ it 'deletes an existing scope' do
+ strategy = create(:operations_strategy, feature_flag: new_version_flag, name: 'default', parameters: {})
+ scope = create(:operations_scope, strategy: strategy, environment_scope: 'staging')
+
+ put_request(new_version_flag, strategies_attributes: [{
+ id: strategy.id,
+ scopes_attributes: [{
+ id: scope.id,
+ _destroy: true
+ }]
+ }])
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['strategies'].first['scopes']).to eq([])
+ end
+
+ it 'does not update the flag if version 2 flags are disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ put_request(new_version_flag, { name: 'some-other-name' })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(new_version_flag.reload.name).to eq('new-feature')
+ end
+
+ it 'updates the flag when legacy feature flags are set to be read only' do
+ stub_feature_flags(feature_flags_legacy_read_only: true)
+
+ put_request(new_version_flag, name: 'some-other-name')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(new_version_flag.reload.name).to eq('some-other-name')
+ end
+ end
+ end
+
+ private
+
+ def view_params
+ { namespace_id: project.namespace, project_id: project }
+ end
+end
diff --git a/spec/controllers/projects/feature_flags_user_lists_controller_spec.rb b/spec/controllers/projects/feature_flags_user_lists_controller_spec.rb
new file mode 100644
index 00000000000..e0d1d3765b2
--- /dev/null
+++ b/spec/controllers/projects/feature_flags_user_lists_controller_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::FeatureFlagsUserListsController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ before_all do
+ project.add_reporter(reporter)
+ project.add_developer(developer)
+ end
+
+ def request_params(extra_params = {})
+ { namespace_id: project.namespace, project_id: project }.merge(extra_params)
+ end
+
+ describe 'GET #new' do
+ it 'redirects when the user is unauthenticated' do
+ get(:new, params: request_params)
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+
+ it 'returns not found if the user does not belong to the project' do
+ user = create(:user)
+ sign_in(user)
+
+ get(:new, params: request_params)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns not found for a reporter' do
+ sign_in(reporter)
+
+ get(:new, params: request_params)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'renders the new page for a developer' do
+ sign_in(developer)
+
+ get(:new, params: request_params)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ describe 'GET #edit' do
+ before do
+ sign_in(developer)
+ end
+
+ it 'renders the edit page for a developer' do
+ list = create(:operations_feature_flag_user_list, project: project)
+
+ get(:edit, params: request_params(iid: list.iid))
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns not found with an iid that does not exist' do
+ list = create(:operations_feature_flag_user_list, project: project)
+
+ get(:edit, params: request_params(iid: list.iid + 1))
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns not found for a list belonging to a another project' do
+ other_project = create(:project)
+ list = create(:operations_feature_flag_user_list, project: other_project)
+
+ get(:edit, params: request_params(iid: list.iid))
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ describe 'GET #show' do
+ before do
+ sign_in(developer)
+ end
+
+ it 'renders the page for a developer' do
+ list = create(:operations_feature_flag_user_list, project: project)
+
+ get(:show, params: request_params(iid: list.iid))
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns not found with an iid that does not exist' do
+ list = create(:operations_feature_flag_user_list, project: project)
+
+ get(:show, params: request_params(iid: list.iid + 1))
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns not found for a list belonging to a another project' do
+ other_project = create(:project)
+ list = create(:operations_feature_flag_user_list, project: other_project)
+
+ get(:show, params: request_params(iid: list.iid))
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+end
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index 762ef795f6e..3baadde46dc 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe Projects::GroupLinksController do
- let(:group) { create(:group, :private) }
- let(:group2) { create(:group, :private) }
- let(:project) { create(:project, :private, group: group2) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:group2) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :private, group: group2) }
+ let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
@@ -142,4 +142,47 @@ RSpec.describe Projects::GroupLinksController do
end
end
end
+
+ describe '#update' do
+ let_it_be(:link) do
+ create(
+ :project_group_link,
+ {
+ project: project,
+ group: group
+ }
+ )
+ end
+
+ let(:expiry_date) { 1.month.from_now.to_date }
+
+ before do
+ travel_to Time.now.utc.beginning_of_day
+
+ put(
+ :update,
+ params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project.to_param,
+ id: link.id,
+ group_link: { group_access: Gitlab::Access::GUEST, expires_at: expiry_date }
+ },
+ format: :json
+ )
+ end
+
+ context 'when `expires_at` is set' do
+ it 'returns correct json response' do
+ expect(json_response).to eq({ "expires_in" => "about 1 month", "expires_soon" => false })
+ end
+ end
+
+ context 'when `expires_at` is not set' do
+ let(:expiry_date) { nil }
+
+ it 'returns empty json response' do
+ expect(json_response).to be_empty
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index bd543cebeec..b9c008d2950 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -48,6 +48,14 @@ RSpec.describe Projects::HooksController do
end
end
+ describe 'DELETE #destroy' do
+ let!(:hook) { create(:project_hook, project: project) }
+ let!(:log) { create(:web_hook_log, web_hook: hook) }
+ let(:params) { { namespace_id: project.namespace, project_id: project, id: hook } }
+
+ it_behaves_like 'Web hook destroyer'
+ end
+
describe '#test' do
let(:hook) { create(:project_hook, project: project) }
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
index b82735a56b3..37a7fce0c23 100644
--- a/spec/controllers/projects/import/jira_controller_spec.rb
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -12,7 +12,6 @@ RSpec.describe Projects::Import::JiraController do
def ensure_correct_config
sign_in(user)
project.add_maintainer(user)
- stub_feature_flags(jira_issue_import: true)
stub_jira_service_test
end
@@ -77,7 +76,6 @@ RSpec.describe Projects::Import::JiraController do
before do
sign_in(user)
project.add_maintainer(user)
- stub_feature_flags(jira_issue_import: true)
end
context 'when Jira service is not enabled for the project' do
diff --git a/spec/controllers/projects/incidents_controller_spec.rb b/spec/controllers/projects/incidents_controller_spec.rb
index 2baae0661cb..ddd15b9b1dd 100644
--- a/spec/controllers/projects/incidents_controller_spec.rb
+++ b/spec/controllers/projects/incidents_controller_spec.rb
@@ -3,44 +3,119 @@
require 'spec_helper'
RSpec.describe Projects::IncidentsController do
- let_it_be(:project) { create(:project) }
+ let_it_be_with_refind(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
+ let_it_be(:anonymous) { nil }
before_all do
- project.add_developer(developer)
project.add_guest(guest)
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(user) if user
+ end
+
+ subject { make_request }
+
+ shared_examples 'not found' do
+ include_examples 'returning response status', :not_found
+ end
+
+ shared_examples 'login required' do
+ it 'redirects to the login page' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
end
describe 'GET #index' do
def make_request
- get :index, params: { namespace_id: project.namespace, project_id: project }
+ get :index, params: project_params
end
- it 'shows the page for user with developer role' do
- sign_in(developer)
- make_request
+ let(:user) { developer }
+
+ it 'shows the page' do
+ subject
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
context 'when user is unauthorized' do
- it 'redirects to the login page' do
- sign_out(developer)
- make_request
+ let(:user) { anonymous }
+
+ it_behaves_like 'login required'
+ end
+
+ context 'when user is a guest' do
+ let(:user) { guest }
+
+ it 'shows the page' do
+ subject
- expect(response).to redirect_to(new_user_session_path)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
end
end
+ end
+
+ describe 'GET #show' do
+ def make_request
+ get :show, params: project_params(id: resource)
+ end
+
+ let_it_be(:resource) { create(:incident, project: project) }
+ let(:user) { developer }
+
+ it 'renders incident page' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
+
+ expect(assigns(:incident)).to be_present
+ expect(assigns(:incident).author.association(:status)).to be_loaded
+ expect(assigns(:issue)).to be_present
+ expect(assigns(:noteable)).to eq(assigns(:incident))
+ end
+
+ context 'with non existing id' do
+ let(:resource) { non_existing_record_id }
+
+ it_behaves_like 'not found'
+ end
+
+ context 'for issue' do
+ let_it_be(:resource) { create(:issue, project: project) }
+
+ it_behaves_like 'not found'
+ end
context 'when user is a guest' do
- it 'shows 404' do
- sign_in(guest)
- make_request
+ let(:user) { guest }
- expect(response).to have_gitlab_http_status(:not_found)
+ it 'shows the page' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
end
end
+
+ context 'when unauthorized' do
+ let(:user) { anonymous }
+
+ it_behaves_like 'login required'
+ end
+ end
+
+ private
+
+ def project_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index ed5198bf015..f956baa0e22 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -388,15 +388,23 @@ RSpec.describe Projects::IssuesController do
# Rails router. A controller-style spec matches the wrong route, and
# session['user_return_to'] becomes incorrect.
describe 'Redirect after sign in', type: :request do
- context 'with an AJAX request' do
+ before_all do
+ project.add_developer(user)
+ end
+
+ before do
+ login_as(user)
+ end
+
+ context 'with a JSON request' do
it 'does not store the visited URL' do
- get project_issue_path(project, issue), xhr: true
+ get project_issue_path(project, issue, format: :json)
expect(session['user_return_to']).to be_blank
end
end
- context 'without an AJAX request' do
+ context 'with an HTML request' do
it 'stores the visited URL' do
get project_issue_path(project, issue)
@@ -1642,7 +1650,7 @@ RSpec.describe Projects::IssuesController do
end
it 'allows CSV export' do
- expect(ExportCsvWorker).to receive(:perform_async).with(viewer.id, project.id, anything)
+ expect(IssuableExportCsvWorker).to receive(:perform_async).with(:issue, viewer.id, project.id, anything)
request_csv
@@ -1657,7 +1665,7 @@ RSpec.describe Projects::IssuesController do
it 'redirects to the sign in page' do
request_csv
- expect(ExportCsvWorker).not_to receive(:perform_async)
+ expect(IssuableExportCsvWorker).not_to receive(:perform_async)
expect(response).to redirect_to(new_user_session_path)
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 94cce1964ca..80cb16966e5 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -121,13 +121,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:build).id).to eq(job.id)
end
-
- it 'has the correct build collection' do
- builds = assigns(:builds).map(&:id)
-
- expect(builds).to include(job.id, second_job.id)
- expect(builds).not_to include(third_job.id)
- end
end
context 'when job does not exist' do
@@ -204,16 +197,40 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'with not expiry date' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
- it 'exposes needed information' do
- get_show_json
+ context 'when artifacts are unlocked' do
+ before do
+ job.pipeline.unlocked!
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('job/job_details')
- expect(json_response['artifact']['download_path']).to match(%r{artifacts/download})
- expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse})
- expect(json_response['artifact']).not_to have_key('keep_path')
- expect(json_response['artifact']).not_to have_key('expired')
- expect(json_response['artifact']).not_to have_key('expired_at')
+ it 'exposes needed information' do
+ get_show_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['artifact']['download_path']).to match(%r{artifacts/download})
+ expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse})
+ expect(json_response['artifact']).not_to have_key('keep_path')
+ expect(json_response['artifact']).not_to have_key('expired')
+ expect(json_response['artifact']).not_to have_key('expired_at')
+ end
+ end
+
+ context 'when artifacts are locked' do
+ before do
+ job.pipeline.artifacts_locked!
+ end
+
+ it 'exposes needed information' do
+ get_show_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['artifact']['download_path']).to match(%r{artifacts/download})
+ expect(json_response['artifact']['browse_path']).to match(%r{artifacts/browse})
+ expect(json_response['artifact']).not_to have_key('keep_path')
+ expect(json_response['artifact']).not_to have_key('expired')
+ expect(json_response['artifact']).not_to have_key('expired_at')
+ end
end
end
@@ -740,19 +757,21 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
name: 'master', project: project)
sign_in(user)
-
- post_play
end
context 'when job is playable' do
let(:job) { create(:ci_build, :playable, pipeline: pipeline) }
it 'redirects to the played job page' do
+ post_play
+
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(namespace_project_job_path(id: job.id))
end
it 'transits to pending' do
+ post_play
+
expect(job.reload).to be_pending
end
@@ -760,15 +779,54 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:variable_attributes) { [{ key: 'first', secret_value: 'first' }] }
it 'assigns the job variables' do
+ post_play
+
expect(job.reload.job_variables.map(&:key)).to contain_exactly('first')
end
end
+
+ context 'when job is bridge' do
+ let(:downstream_project) { create(:project) }
+ let(:job) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: downstream_project) }
+
+ before do
+ downstream_project.add_developer(user)
+ end
+
+ it 'redirects to the pipeline page' do
+ post_play
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(pipeline_path(pipeline))
+ builds_namespace_project_pipeline_path(id: pipeline.id)
+ end
+
+ it 'transits to pending' do
+ post_play
+
+ expect(job.reload).to be_pending
+ end
+
+ context 'when FF ci_manual_bridges is disabled' do
+ before do
+ stub_feature_flags(ci_manual_bridges: false)
+ end
+
+ it 'returns 404' do
+ post_play
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
context 'when job is not playable' do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'renders unprocessable_entity' do
+ post_play
+
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
end
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index f213d104747..8a3c55033cb 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Projects::LabelsController do
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, namespace: group) }
+ let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
@@ -14,16 +14,21 @@ RSpec.describe Projects::LabelsController do
end
describe 'GET #index' do
- let!(:label_1) { create(:label, project: project, priority: 1, title: 'Label 1') }
- let!(:label_2) { create(:label, project: project, priority: 3, title: 'Label 2') }
- let!(:label_3) { create(:label, project: project, priority: 1, title: 'Label 3') }
- let!(:label_4) { create(:label, project: project, title: 'Label 4') }
- let!(:label_5) { create(:label, project: project, title: 'Label 5') }
-
- let!(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1') }
- let!(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
- let!(:group_label_3) { create(:group_label, group: group, title: 'Group Label 3') }
- let!(:group_label_4) { create(:group_label, group: group, title: 'Group Label 4') }
+ let_it_be(:label_1) { create(:label, project: project, priority: 1, title: 'Label 1') }
+ let_it_be(:label_2) { create(:label, project: project, priority: 3, title: 'Label 2') }
+ let_it_be(:label_3) { create(:label, project: project, priority: 1, title: 'Label 3') }
+ let_it_be(:label_4) { create(:label, project: project, title: 'Label 4') }
+ let_it_be(:label_5) { create(:label, project: project, title: 'Label 5') }
+
+ let_it_be(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1') }
+ let_it_be(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
+ let_it_be(:group_label_3) { create(:group_label, group: group, title: 'Group Label 3') }
+ let_it_be(:group_label_4) { create(:group_label, group: group, title: 'Group Label 4') }
+
+ let_it_be(:group_labels) { [group_label_3, group_label_4]}
+ let_it_be(:project_labels) { [label_4, label_5]}
+ let_it_be(:group_priority_labels) { [group_label_1, group_label_2]}
+ let_it_be(:project_priority_labels) { [label_1, label_2, label_3]}
before do
create(:label_priority, project: project, label: group_label_1, priority: 3)
@@ -68,6 +73,60 @@ RSpec.describe Projects::LabelsController do
end
end
+ context 'with subgroups' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup_label_1) { create(:group_label, group: subgroup, title: 'subgroup_label_1') }
+ let_it_be(:subgroup_label_2) { create(:group_label, group: subgroup, title: 'subgroup_label_2') }
+
+ before do
+ project.update!(namespace: subgroup)
+ subgroup.add_owner(user)
+ create(:label_priority, project: project, label: subgroup_label_2, priority: 1)
+ end
+
+ RSpec.shared_examples 'returns ancestor group labels' do
+ it 'returns ancestor group labels', :aggregate_failures do
+ get :index, params: params
+
+ expect(assigns(:labels)).to match_array([subgroup_label_1] + group_labels + project_labels)
+ expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + group_priority_labels + project_priority_labels)
+ end
+ end
+
+ context 'when show_inherited_labels disabled' do
+ before do
+ stub_feature_flags(show_inherited_labels: false)
+ end
+
+ context 'when include_ancestor_groups false' do
+ let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
+
+ it 'does not return ancestor group labels', :aggregate_failures do
+ get :index, params: params
+
+ expect(assigns(:labels)).to match_array([subgroup_label_1] + project_labels)
+ expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + project_priority_labels)
+ end
+ end
+
+ context 'when include_ancestor_groups true' do
+ let(:params) { { namespace_id: project.namespace.to_param, project_id: project, include_ancestor_groups: true } }
+
+ it_behaves_like 'returns ancestor group labels'
+ end
+ end
+
+ context 'when show_inherited_labels enabled' do
+ let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
+
+ before do
+ stub_feature_flags(show_inherited_labels: true)
+ end
+
+ it_behaves_like 'returns ancestor group labels'
+ end
+ end
+
def list_labels
get :index, params: { namespace_id: project.namespace.to_param, project_id: project }
end
@@ -75,7 +134,7 @@ RSpec.describe Projects::LabelsController do
describe 'POST #generate' do
context 'personal project' do
- let(:personal_project) { create(:project, namespace: user.namespace) }
+ let_it_be(:personal_project) { create(:project, namespace: user.namespace) }
it 'creates labels' do
post :generate, params: { namespace_id: personal_project.namespace.to_param, project_id: personal_project }
@@ -116,8 +175,8 @@ RSpec.describe Projects::LabelsController do
end
describe 'POST #promote' do
- let!(:promoted_label_name) { "Promoted Label" }
- let!(:label_1) { create(:label, title: promoted_label_name, project: project) }
+ let_it_be(:promoted_label_name) { "Promoted Label" }
+ let_it_be(:label_1) { create(:label, title: promoted_label_name, project: project) }
context 'not group reporters' do
it 'denies access' do
@@ -196,7 +255,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
- let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
it 'redirects to the canonical path' do
get :index, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
@@ -242,7 +301,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
- let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
it 'returns not found' do
post :generate, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
diff --git a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
index 5f636bd4340..c2cc3d10ea0 100644
--- a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe Projects::MergeRequests::ConflictsController do
expect(json_response).to include('old_path' => path,
'new_path' => path,
- 'blob_icon' => 'file-text-o',
+ 'blob_icon' => 'doc-text',
'blob_path' => a_string_ending_with(path),
'content' => content)
end
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index fa32d32f552..9e5d41b1075 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -17,7 +17,9 @@ RSpec.describe Projects::MilestonesController do
controller.instance_variable_set(:@project, project)
end
- it_behaves_like 'milestone tabs'
+ it_behaves_like 'milestone tabs' do
+ let(:request_params) { { namespace_id: project.namespace, project_id: project, id: milestone.iid } }
+ end
describe "#show" do
render_views
diff --git a/spec/controllers/projects/pipelines/stages_controller_spec.rb b/spec/controllers/projects/pipelines/stages_controller_spec.rb
index 6e8c08d95a1..a8b328c7563 100644
--- a/spec/controllers/projects/pipelines/stages_controller_spec.rb
+++ b/spec/controllers/projects/pipelines/stages_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::Pipelines::StagesController do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
+ let(:downstream_project) { create(:project, :repository) }
before do
sign_in(user)
@@ -17,6 +18,7 @@ RSpec.describe Projects::Pipelines::StagesController do
before do
create_manual_build(pipeline, 'test', 'rspec 1/2')
create_manual_build(pipeline, 'test', 'rspec 2/2')
+ create_manual_bridge(pipeline, 'test', 'trigger')
pipeline.reload
end
@@ -32,6 +34,7 @@ RSpec.describe Projects::Pipelines::StagesController do
context 'when user has access' do
before do
project.add_maintainer(user)
+ downstream_project.add_maintainer(user)
end
context 'when the stage does not exists' do
@@ -46,12 +49,12 @@ RSpec.describe Projects::Pipelines::StagesController do
context 'when the stage exists' do
it 'starts all manual jobs' do
- expect(pipeline.builds.manual.count).to eq(2)
+ expect(pipeline.processables.manual.count).to eq(3)
play_manual_stage!
expect(response).to have_gitlab_http_status(:ok)
- expect(pipeline.builds.manual.count).to eq(0)
+ expect(pipeline.processables.manual.count).to eq(0)
end
end
end
@@ -68,5 +71,9 @@ RSpec.describe Projects::Pipelines::StagesController do
def create_manual_build(pipeline, stage, name)
create(:ci_build, :manual, pipeline: pipeline, stage: stage, name: name)
end
+
+ def create_manual_bridge(pipeline, stage, name)
+ create(:ci_bridge, :manual, pipeline: pipeline, stage: stage, name: name, downstream: downstream_project)
+ end
end
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index c3be7de25a8..0720124ea57 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -1148,4 +1148,84 @@ RSpec.describe Projects::PipelinesController do
}
end
end
+
+ describe 'GET config_variables.json' do
+ let(:result) { YAML.dump(ci_config) }
+
+ before do
+ stub_gitlab_ci_yml_for_sha(sha, result)
+ end
+
+ context 'when sending a valid sha' do
+ let(:sha) { 'master' }
+ let(:ci_config) do
+ {
+ variables: {
+ KEY1: { value: 'val 1', description: 'description 1' }
+ },
+ test: {
+ stage: 'test',
+ script: 'echo'
+ }
+ }
+ end
+
+ it 'returns variable list' do
+ get_config_variables
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['KEY1']).to eq({ 'value' => 'val 1', 'description' => 'description 1' })
+ end
+ end
+
+ context 'when sending an invalid sha' do
+ let(:sha) { 'invalid-sha' }
+ let(:ci_config) { nil }
+
+ it 'returns empty json' do
+ get_config_variables
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({})
+ end
+ end
+
+ context 'when sending an invalid config' do
+ let(:sha) { 'master' }
+ let(:ci_config) do
+ {
+ variables: {
+ KEY1: { value: 'val 1', description: 'description 1' }
+ },
+ test: {
+ stage: 'invalid',
+ script: 'echo'
+ }
+ }
+ end
+
+ it 'returns empty result' do
+ get_config_variables
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({})
+ end
+ end
+
+ private
+
+ def stub_gitlab_ci_yml_for_sha(sha, result)
+ allow_any_instance_of(Repository)
+ .to receive(:gitlab_ci_yml_for)
+ .with(sha, '.gitlab-ci.yml')
+ .and_return(result)
+ end
+
+ def get_config_variables
+ get :config_variables, params: { namespace_id: project.namespace,
+ project_id: project,
+ sha: sha },
+ format: :json
+ end
+ end
end
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index ae05e2d2631..74311fa89f3 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -228,6 +228,43 @@ RSpec.describe Projects::ProjectMembersController do
end
end
end
+
+ context 'expiration date' do
+ let(:expiry_date) { 1.month.from_now.to_date }
+
+ before do
+ travel_to Time.now.utc.beginning_of_day
+
+ put(
+ :update,
+ params: {
+ project_member: { expires_at: expiry_date },
+ namespace_id: project.namespace,
+ project_id: project,
+ id: requester
+ },
+ format: :json
+ )
+ end
+
+ context 'when `expires_at` is set' do
+ it 'returns correct json response' do
+ expect(json_response).to eq({
+ "expires_in" => "about 1 month",
+ "expires_soon" => false,
+ "expires_at_formatted" => expiry_date.to_time.in_time_zone.to_s(:medium)
+ })
+ end
+ end
+
+ context 'when `expires_at` is not set' do
+ let(:expiry_date) { nil }
+
+ it 'returns empty json response' do
+ expect(json_response).to be_empty
+ end
+ end
+ end
end
describe 'DELETE destroy' do
@@ -536,4 +573,19 @@ RSpec.describe Projects::ProjectMembersController do
end
end
end
+
+ describe 'POST resend_invite' do
+ let(:member) { create(:project_member, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'is successful' do
+ post :resend_invite, params: { namespace_id: project.namespace, project_id: project, id: member }
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
end
diff --git a/spec/controllers/projects/registry/tags_controller_spec.rb b/spec/controllers/projects/registry/tags_controller_spec.rb
index 6adee35b60a..59df9e78a3c 100644
--- a/spec/controllers/projects/registry/tags_controller_spec.rb
+++ b/spec/controllers/projects/registry/tags_controller_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Projects::Registry::TagsController do
it 'tracks the event' do
expect_delete_tags(%w[test.])
- expect(controller).to receive(:track_event).with(:delete_tag)
+ expect(controller).to receive(:track_event).with(:delete_tag, {})
destroy_tag('test.')
end
diff --git a/spec/controllers/projects/releases/evidences_controller_spec.rb b/spec/controllers/projects/releases/evidences_controller_spec.rb
index d5a9665d6a5..0ec4cdf2a31 100644
--- a/spec/controllers/projects/releases/evidences_controller_spec.rb
+++ b/spec/controllers/projects/releases/evidences_controller_spec.rb
@@ -113,18 +113,6 @@ RSpec.describe Projects::Releases::EvidencesController do
it_behaves_like 'does not show the issue in evidence'
- context 'when the issue is confidential' do
- let(:issue) { create(:issue, :confidential, project: project) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
- context 'when the user is the author of the confidential issue' do
- let(:issue) { create(:issue, :confidential, project: project, author: user) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
context 'when project is private' do
let(:project) { create(:project, :repository, :private) }
@@ -143,32 +131,16 @@ RSpec.describe Projects::Releases::EvidencesController do
it_behaves_like 'does not show the issue in evidence'
- context 'when the issue is confidential' do
- let(:issue) { create(:issue, :confidential, project: project) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
- context 'when the user is the author of the confidential issue' do
- let(:issue) { create(:issue, :confidential, project: project, author: user) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
context 'when project is private' do
let(:project) { create(:project, :repository, :private) }
- it 'returns evidence ' do
- subject
-
- expect(json_response).to eq(evidence.summary)
- end
+ it_behaves_like 'does not show the issue in evidence'
end
context 'when project restricts the visibility of issues to project members only' do
let(:project) { create(:project, :repository, :issues_private) }
- it_behaves_like 'evidence not found'
+ it_behaves_like 'does not show the issue in evidence'
end
end
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index 45beccfeef5..420d818daeb 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -194,14 +194,6 @@ RSpec.describe Projects::ReleasesController do
end
end
- context 'when feature flag `release_show_page` is disabled' do
- before do
- stub_feature_flags(release_show_page: false)
- end
-
- it_behaves_like 'not found'
- end
-
context 'when release does not exist' do
let(:tag) { 'non-existent-tag' }
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 66f20bd50c4..2443a823070 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -73,4 +73,45 @@ RSpec.describe Projects::RunnersController do
expect(runner.active).to eq(false)
end
end
+
+ describe '#toggle_shared_runners' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+
+ it 'toggles shared_runners_enabled when the group allows shared runners' do
+ project.update!(shared_runners_enabled: true)
+
+ post :toggle_shared_runners, params: params
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(project.shared_runners_enabled).to eq(false)
+ end
+
+ it 'toggles shared_runners_enabled when the group disallows shared runners but allows overrides' do
+ group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true)
+ project.update!(shared_runners_enabled: false)
+
+ post :toggle_shared_runners, params: params
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(project.shared_runners_enabled).to eq(true)
+ end
+
+ it 'does not enable if the group disallows shared runners' do
+ group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false)
+ project.update!(shared_runners_enabled: false)
+
+ post :toggle_shared_runners, params: params
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(project.shared_runners_enabled).to eq(false)
+ expect(flash[:alert]).to eq("Cannot enable shared runners because parent group does not allow it")
+ end
+ end
end
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 7f558ad9231..75135839a06 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -206,7 +206,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.5.0' do
before do
- prepare_knative_stubs(knative_05_service(knative_stub_options))
+ prepare_knative_stubs(knative_05_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -214,7 +214,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.6.0' do
before do
- prepare_knative_stubs(knative_06_service(knative_stub_options))
+ prepare_knative_stubs(knative_06_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -222,7 +222,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.7.0' do
before do
- prepare_knative_stubs(knative_07_service(knative_stub_options))
+ prepare_knative_stubs(knative_07_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -230,7 +230,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.9.0' do
before do
- prepare_knative_stubs(knative_09_service(knative_stub_options))
+ prepare_knative_stubs(knative_09_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -275,7 +275,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.5.0' do
before do
- prepare_knative_stubs(knative_05_service(knative_stub_options))
+ prepare_knative_stubs(knative_05_service(**knative_stub_options))
end
include_examples 'GET #index with data'
@@ -283,7 +283,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.6.0' do
before do
- prepare_knative_stubs(knative_06_service(knative_stub_options))
+ prepare_knative_stubs(knative_06_service(**knative_stub_options))
end
include_examples 'GET #index with data'
@@ -291,7 +291,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.7.0' do
before do
- prepare_knative_stubs(knative_07_service(knative_stub_options))
+ prepare_knative_stubs(knative_07_service(**knative_stub_options))
end
include_examples 'GET #index with data'
@@ -299,7 +299,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.9.0' do
before do
- prepare_knative_stubs(knative_09_service(knative_stub_options))
+ prepare_knative_stubs(knative_09_service(**knative_stub_options))
end
include_examples 'GET #index with data'
diff --git a/spec/controllers/projects/settings/access_tokens_controller_spec.rb b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
index 4743ab2b7c1..ff52b2a765a 100644
--- a/spec/controllers/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
@@ -5,27 +5,21 @@ require('spec_helper')
RSpec.describe Projects::Settings::AccessTokensController do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:bot_user) { create(:user, :project_bot) }
before_all do
project.add_maintainer(user)
+ project.add_maintainer(bot_user)
end
before do
sign_in(user)
end
- shared_examples 'feature unavailability' do
- context 'when flag is disabled' do
+ shared_examples 'feature unavailable' do
+ context 'user is not a maintainer' do
before do
- stub_feature_flags(resource_access_token: false)
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when environment is Gitlab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
+ project.add_developer(user)
end
it { is_expected.to have_gitlab_http_status(:not_found) }
@@ -35,156 +29,25 @@ RSpec.describe Projects::Settings::AccessTokensController do
describe '#index' do
subject { get :index, params: { namespace_id: project.namespace, project_id: project } }
- it_behaves_like 'feature unavailability'
-
- context 'when feature is available' do
- let_it_be(:bot_user) { create(:user, :project_bot) }
- let_it_be(:active_project_access_token) { create(:personal_access_token, user: bot_user) }
- let_it_be(:inactive_project_access_token) { create(:personal_access_token, :revoked, user: bot_user) }
-
- before_all do
- project.add_maintainer(bot_user)
- end
-
- before do
- enable_feature
- end
-
- it 'retrieves active project access tokens' do
- subject
-
- expect(assigns(:active_project_access_tokens)).to contain_exactly(active_project_access_token)
- end
-
- it 'retrieves inactive project access tokens' do
- subject
-
- expect(assigns(:inactive_project_access_tokens)).to contain_exactly(inactive_project_access_token)
- end
-
- it 'lists all available scopes' do
- subject
-
- expect(assigns(:scopes)).to eq(Gitlab::Auth.resource_bot_scopes)
- end
-
- it 'retrieves newly created personal access token value' do
- token_value = 'random-value'
- allow(PersonalAccessToken).to receive(:redis_getdel).with("#{user.id}:#{project.id}").and_return(token_value)
-
- subject
-
- expect(assigns(:new_project_access_token)).to eq(token_value)
- end
- end
+ it_behaves_like 'feature unavailable'
+ it_behaves_like 'project access tokens available #index'
end
- describe '#create', :clean_gitlab_redis_shared_state do
- subject { post :create, params: { namespace_id: project.namespace, project_id: project }.merge(project_access_token: access_token_params) }
-
- let_it_be(:access_token_params) { {} }
-
- it_behaves_like 'feature unavailability'
-
- context 'when feature is available' do
- let_it_be(:access_token_params) { { name: 'Nerd bot', scopes: ["api"], expires_at: 1.month.since.to_date } }
-
- before do
- enable_feature
- end
-
- def created_token
- PersonalAccessToken.order(:created_at).last
- end
-
- it 'returns success message' do
- subject
-
- expect(response.flash[:notice]).to match(/\AYour new project access token has been created./i)
- end
-
- it 'creates project access token' do
- subject
-
- expect(created_token.name).to eq(access_token_params[:name])
- expect(created_token.scopes).to eq(access_token_params[:scopes])
- expect(created_token.expires_at).to eq(access_token_params[:expires_at])
- end
-
- it 'creates project bot user' do
- subject
-
- expect(created_token.user).to be_project_bot
- end
-
- it 'stores newly created token redis store' do
- expect(PersonalAccessToken).to receive(:redis_store!)
-
- subject
- end
+ describe '#create' do
+ let(:access_token_params) { { name: 'Nerd bot', scopes: ["api"], expires_at: Date.today + 1.month } }
- it { expect { subject }.to change { User.count }.by(1) }
- it { expect { subject }.to change { PersonalAccessToken.count }.by(1) }
-
- context 'when unsuccessful' do
- before do
- allow_next_instance_of(ResourceAccessTokens::CreateService) do |service|
- allow(service).to receive(:execute).and_return ServiceResponse.error(message: 'Failed!')
- end
- end
+ subject { post :create, params: { namespace_id: project.namespace, project_id: project }.merge(project_access_token: access_token_params) }
- it { expect(subject).to render_template(:index) }
- end
- end
+ it_behaves_like 'feature unavailable'
+ it_behaves_like 'project access tokens available #create'
end
- describe '#revoke' do
- subject { put :revoke, params: { namespace_id: project.namespace, project_id: project, id: project_access_token } }
-
- let_it_be(:bot_user) { create(:user, :project_bot) }
- let_it_be(:project_access_token) { create(:personal_access_token, user: bot_user) }
-
- before_all do
- project.add_maintainer(bot_user)
- end
-
- it_behaves_like 'feature unavailability'
+ describe '#revoke', :sidekiq_inline do
+ let(:project_access_token) { create(:personal_access_token, user: bot_user) }
- context 'when feature is available' do
- before do
- enable_feature
- end
-
- it 'revokes token access' do
- subject
-
- expect(project_access_token.reload.revoked?).to be true
- end
-
- it 'removed membership of bot user' do
- subject
-
- expect(project.reload.bots).not_to include(bot_user)
- end
-
- it 'blocks project bot user' do
- subject
-
- expect(bot_user.reload.blocked?).to be true
- end
-
- it 'converts issuables of the bot user to ghost user' do
- issue = create(:issue, author: bot_user)
-
- subject
-
- expect(issue.reload.author.ghost?).to be true
- end
- end
- end
+ subject { put :revoke, params: { namespace_id: project.namespace, project_id: project, id: project_access_token } }
- def enable_feature
- allow(Gitlab).to receive(:com?).and_return(false)
- stub_feature_flags(resource_access_token: true)
+ it_behaves_like 'feature unavailable'
+ it_behaves_like 'project access tokens available #revoke'
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 8498ff49826..7a6e11d53d4 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -230,6 +230,21 @@ RSpec.describe Projects::Settings::CiCdController do
end
end
+ context 'when forward_deployment_enabled is not specified' do
+ let(:params) { { ci_cd_settings_attributes: { forward_deployment_enabled: false } } }
+
+ before do
+ project.ci_cd_settings.update!(forward_deployment_enabled: nil)
+ end
+
+ it 'sets forward deployment enabled' do
+ subject
+
+ project.reload
+ expect(project.ci_forward_deployment_enabled).to eq(false)
+ end
+ end
+
context 'when max_artifacts_size is specified' do
let(:params) { { max_artifacts_size: 10 } }
@@ -266,4 +281,21 @@ RSpec.describe Projects::Settings::CiCdController do
end
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index ca1b0d2fe15..9fc9da1265e 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -6,9 +6,12 @@ RSpec.describe Projects::Settings::OperationsController do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
+ before_all do
+ project.add_maintainer(user)
+ end
+
before do
sign_in(user)
- project.add_maintainer(user)
end
shared_examples 'PATCHable' do
@@ -163,10 +166,6 @@ RSpec.describe Projects::Settings::OperationsController do
context 'updating each incident management setting' do
let(:new_incident_management_settings) { {} }
- before do
- project.add_maintainer(user)
- end
-
shared_examples 'a gitlab tracking event' do |params, event_key|
it "creates a gitlab tracking event #{event_key}" do
new_incident_management_settings = params
@@ -194,10 +193,6 @@ RSpec.describe Projects::Settings::OperationsController do
end
describe 'POST #reset_pagerduty_token' do
- before do
- project.add_maintainer(user)
- end
-
context 'with existing incident management setting has active PagerDuty webhook' do
let!(:incident_management_setting) do
create(:project_incident_management_setting, project: project, pagerduty_active: true)
@@ -392,10 +387,6 @@ RSpec.describe Projects::Settings::OperationsController do
end
describe 'POST #reset_alerting_token' do
- before do
- project.add_maintainer(user)
- end
-
context 'with existing alerting setting' do
let!(:alerting_setting) do
create(:project_alerting_setting, project: project)
@@ -478,6 +469,104 @@ RSpec.describe Projects::Settings::OperationsController do
end
end
+ context 'tracing integration' do
+ describe 'GET #show' do
+ context 'with existing setting' do
+ let_it_be(:setting) do
+ create(:project_tracing_setting, project: project)
+ end
+
+ it 'loads existing setting' do
+ get :show, params: project_params(project)
+
+ expect(controller.helpers.tracing_setting).to eq(setting)
+ end
+ end
+
+ context 'without an existing setting' do
+ it 'builds a new setting' do
+ get :show, params: project_params(project)
+
+ expect(controller.helpers.tracing_setting).to be_new_record
+ end
+ end
+ end
+
+ describe 'PATCH #update' do
+ let_it_be(:external_url) { 'https://gitlab.com' }
+ let(:params) do
+ {
+ tracing_setting_attributes: {
+ external_url: external_url
+ }
+ }
+ end
+
+ it_behaves_like 'PATCHable'
+
+ describe 'gitlab tracking', :snowplow do
+ shared_examples 'event tracking' do
+ it 'tracks an event' do
+ expect_snowplow_event(
+ category: 'project:operations:tracing',
+ action: 'external_url_populated'
+ )
+ end
+ end
+
+ shared_examples 'no event tracking' do
+ it 'does not track an event' do
+ expect_no_snowplow_event
+ end
+ end
+
+ before do
+ make_request
+ end
+
+ subject(:make_request) do
+ patch :update, params: project_params(project, params), format: :json
+ end
+
+ context 'without existing setting' do
+ context 'when creating a new setting' do
+ it_behaves_like 'event tracking'
+ end
+
+ context 'with invalid external_url' do
+ let_it_be(:external_url) { nil }
+
+ it_behaves_like 'no event tracking'
+ end
+ end
+
+ context 'with existing setting' do
+ let_it_be(:existing_setting) do
+ create(:project_tracing_setting,
+ project: project,
+ external_url: external_url)
+ end
+
+ context 'when changing external_url' do
+ let_it_be(:external_url) { 'https://example.com' }
+
+ it_behaves_like 'no event tracking'
+ end
+
+ context 'with unchanged external_url' do
+ it_behaves_like 'no event tracking'
+ end
+
+ context 'with invalid external_url' do
+ let_it_be(:external_url) { nil }
+
+ it_behaves_like 'no event tracking'
+ end
+ end
+ end
+ end
+ end
+
private
def project_params(project, params = {})
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index d0e412dfdb8..6b394fab14c 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -82,215 +82,6 @@ RSpec.describe Projects::SnippetsController do
end
end
- describe 'POST #create' do
- def create_snippet(project, snippet_params = {}, additional_params = {})
- sign_in(user)
-
- project.add_developer(user)
-
- post :create, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- project_snippet: { title: 'Title', content: 'Content', description: 'Description' }.merge(snippet_params)
- }.merge(additional_params)
-
- Snippet.last
- end
-
- it 'creates the snippet correctly' do
- snippet = create_snippet(project, visibility_level: Snippet::PRIVATE)
-
- expect(snippet.title).to eq('Title')
- expect(snippet.content).to eq('Content')
- expect(snippet.description).to eq('Description')
- end
-
- context 'when the snippet is spam' do
- before do
- allow_next_instance_of(Spam::AkismetService) do |instance|
- allow(instance).to receive(:spam?).and_return(true)
- end
- end
-
- context 'when the snippet is private' do
- it 'creates the snippet' do
- expect { create_snippet(project, visibility_level: Snippet::PRIVATE) }
- .to change { Snippet.count }.by(1)
- end
- end
-
- context 'when the snippet is public' do
- it 'rejects the snippet' do
- expect { create_snippet(project, visibility_level: Snippet::PUBLIC) }
- .not_to change { Snippet.count }
- expect(response).to render_template(:new)
- end
-
- it 'creates a spam log' do
- expect { create_snippet(project, visibility_level: Snippet::PUBLIC) }
- .to log_spam(title: 'Title', user_id: user.id, noteable_type: 'ProjectSnippet')
- end
-
- it 'renders :new with reCAPTCHA disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- create_snippet(project, visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:new)
- end
-
- context 'reCAPTCHA enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- it 'renders :verify with reCAPTCHA enabled' do
- create_snippet(project, visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:verify)
- end
-
- it 'renders snippet page when reCAPTCHA verified' do
- spammy_title = 'Whatever'
-
- spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
- create_snippet(project,
- { visibility_level: Snippet::PUBLIC },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
-
- expect(response).to redirect_to(project_snippet_path(project, Snippet.last))
- end
- end
- end
- end
- end
-
- describe 'PUT #update' do
- let(:visibility_level) { Snippet::PUBLIC }
- let(:snippet) { create :project_snippet, author: user, project: project, visibility_level: visibility_level }
-
- def update_snippet(snippet_params = {}, additional_params = {})
- sign_in(user)
-
- project.add_developer(user)
-
- put :update, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: snippet,
- project_snippet: { title: 'Title', content: 'Content' }.merge(snippet_params)
- }.merge(additional_params)
-
- snippet.reload
- end
-
- context 'when the snippet is spam' do
- before do
- allow_next_instance_of(Spam::AkismetService) do |instance|
- allow(instance).to receive(:spam?).and_return(true)
- end
- end
-
- context 'when the snippet is private' do
- let(:visibility_level) { Snippet::PRIVATE }
-
- it 'updates the snippet' do
- expect { update_snippet(title: 'Foo') }
- .to change { snippet.reload.title }.to('Foo')
- end
- end
-
- context 'when the snippet is public' do
- it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo') }
- .not_to change { snippet.reload.title }
- end
-
- it 'creates a spam log' do
- expect { update_snippet(title: 'Foo') }
- .to log_spam(title: 'Foo', user_id: user.id, noteable_type: 'ProjectSnippet')
- end
-
- it 'renders :edit with reCAPTCHA disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- update_snippet(title: 'Foo')
-
- expect(response).to render_template(:edit)
- end
-
- context 'reCAPTCHA enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- it 'renders :verify with reCAPTCHA enabled' do
- update_snippet(title: 'Foo')
-
- expect(response).to render_template(:verify)
- end
-
- it 'renders snippet page when reCAPTCHA verified' do
- spammy_title = 'Whatever'
-
- spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
- snippet = update_snippet({ title: spammy_title },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
-
- expect(response).to redirect_to(project_snippet_path(project, snippet))
- end
- end
- end
-
- context 'when the private snippet is made public' do
- let(:visibility_level) { Snippet::PRIVATE }
-
- it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC) }
- .not_to change { snippet.reload.title }
- end
-
- it 'creates a spam log' do
- expect { update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC) }
- .to log_spam(title: 'Foo', user_id: user.id, noteable_type: 'ProjectSnippet')
- end
-
- it 'renders :edit with reCAPTCHA disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:edit)
- end
-
- context 'reCAPTCHA enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- it 'renders :verify' do
- update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:verify)
- end
-
- it 'renders snippet page' do
- spammy_title = 'Whatever'
-
- spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
- snippet = update_snippet({ title: spammy_title, visibility_level: Snippet::PUBLIC },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
-
- expect(response).to redirect_to(project_snippet_path(project, snippet))
- end
- end
- end
- end
- end
-
describe 'POST #mark_as_spam' do
let_it_be(:snippet) { create(:project_snippet, :private, project: project, author: user) }
@@ -329,12 +120,6 @@ RSpec.describe Projects::SnippetsController do
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
-
- it 'renders the blob from the repository' do
- subject
-
- expect(assigns(:blob)).to eq(project_snippet.blobs.first)
- end
end
%w[show raw].each do |action|
@@ -395,6 +180,16 @@ RSpec.describe Projects::SnippetsController do
end
end
+ describe 'GET #show as JSON' do
+ it 'renders the blob from the repository' do
+ project_snippet = create(:project_snippet, :public, :repository, project: project, author: user)
+
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }, format: :json
+
+ expect(assigns(:blob)).to eq(project_snippet.blobs.first)
+ end
+ end
+
describe "GET #show for embeddable content" do
let(:project_snippet) { create(:project_snippet, :repository, snippet_permission, project: project, author: user) }
let(:extra_params) { {} }
@@ -533,62 +328,4 @@ RSpec.describe Projects::SnippetsController do
it_behaves_like 'content disposition headers'
end
end
-
- describe 'DELETE #destroy' do
- let_it_be(:snippet) { create(:project_snippet, :private, project: project, author: user) }
-
- let(:params) do
- {
- namespace_id: project.namespace.to_param,
- project_id: project,
- id: snippet.to_param
- }
- end
-
- subject { delete :destroy, params: params }
-
- context 'when current user has ability to destroy the snippet' do
- before do
- sign_in(user)
- end
-
- it 'removes the snippet' do
- subject
-
- expect { snippet.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
-
- context 'when snippet is succesfuly destroyed' do
- it 'redirects to the project snippets page' do
- subject
-
- expect(response).to redirect_to(project_snippets_path(project))
- end
- end
-
- context 'when snippet is not destroyed' do
- before do
- allow(snippet).to receive(:destroy).and_return(false)
- controller.instance_variable_set(:@snippet, snippet)
- end
-
- it 'renders the snippet page with errors' do
- subject
-
- expect(flash[:alert]).to eq('Failed to remove snippet.')
- expect(response).to redirect_to(project_snippet_path(project, snippet))
- end
- end
- end
-
- context 'when current_user does not have ability to destroy the snippet' do
- it 'responds with status 404' do
- sign_in(other_user)
-
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
end
diff --git a/spec/controllers/projects/static_site_editor_controller_spec.rb b/spec/controllers/projects/static_site_editor_controller_spec.rb
index 7883c7e6f81..6ea730cbf27 100644
--- a/spec/controllers/projects/static_site_editor_controller_spec.rb
+++ b/spec/controllers/projects/static_site_editor_controller_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Projects::StaticSiteEditorController do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user) }
- let(:data) { instance_double(Hash) }
+ let(:data) { { key: 'value' } }
describe 'GET show' do
+ render_views
+
let(:default_params) do
{
namespace_id: project.namespace,
@@ -50,41 +52,83 @@ RSpec.describe Projects::StaticSiteEditorController do
end
end
- %w[developer maintainer].each do |role|
- context "as #{role}" do
- before_all do
- project.add_role(user, role)
+ context "as developer" do
+ before do
+ allow(Gitlab::UsageDataCounters::StaticSiteEditorCounter).to receive(:increment_views_count)
+ project.add_role(user, 'developer')
+ sign_in(user)
+ get :show, params: default_params
+ end
+
+ it 'increases the views counter' do
+ expect(Gitlab::UsageDataCounters::StaticSiteEditorCounter).to have_received(:increment_views_count)
+ end
+
+ it 'renders the edit page' do
+ expect(response).to render_template(:show)
+ end
+
+ it 'assigns ref and path variables' do
+ expect(assigns(:ref)).to eq('master')
+ expect(assigns(:path)).to eq('README.md')
+ end
+
+ context 'when combination of ref and path is incorrect' do
+ let(:default_params) { super().merge(id: 'unknown') }
+
+ it 'responds with 404 page' do
+ expect(response).to have_gitlab_http_status(:not_found)
end
+ end
+
+ context 'when invalid config file' do
+ let(:service_response) { ServiceResponse.error(message: 'invalid') }
- before do
- sign_in(user)
- get :show, params: default_params
+ it 'redirects to project page and flashes error message' do
+ expect(response).to redirect_to(project_path(project))
+ expect(response).to set_flash[:alert].to('invalid')
end
+ end
- it 'renders the edit page' do
- expect(response).to render_template(:show)
+ context 'with a service response payload containing multiple data types' do
+ let(:data) do
+ {
+ a_string: 'string',
+ an_array: [
+ {
+ foo: 'bar'
+ }
+ ],
+ an_integer: 123,
+ a_hash: {
+ a_deeper_hash: {
+ foo: 'bar'
+ }
+ },
+ a_boolean: true
+ }
end
- it 'assigns a required variables' do
- expect(assigns(:data)).to eq(data)
- expect(assigns(:ref)).to eq('master')
- expect(assigns(:path)).to eq('README.md')
+ let(:assigns_data) { assigns(:data) }
+
+ it 'leaves data values which are strings as strings' do
+ expect(assigns_data[:a_string]).to eq('string')
end
- context 'when combination of ref and path is incorrect' do
- let(:default_params) { super().merge(id: 'unknown') }
+ it 'leaves data values which are integers as integers' do
+ expect(assigns_data[:an_integer]).to eq(123)
+ end
- it 'responds with 404 page' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it 'serializes data values which are booleans to JSON' do
+ expect(assigns_data[:a_boolean]).to eq('true')
end
- context 'when invalid config file' do
- let(:service_response) { ServiceResponse.error(message: 'invalid') }
+ it 'serializes data values which are arrays to JSON' do
+ expect(assigns_data[:an_array]).to eq('[{"foo":"bar"}]')
+ end
- it 'returns 422' do
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
+ it 'serializes data values which are hashes to JSON' do
+ expect(assigns_data[:a_hash]).to eq('{"a_deeper_hash":{"foo":"bar"}}')
end
end
end
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index d213d003bed..57760088183 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -131,4 +131,25 @@ RSpec.describe Projects::TagsController do
end
end
end
+
+ describe 'DELETE #destroy' do
+ let(:tag) { project.repository.add_tag(user, 'fake-tag', 'master') }
+ let(:request) do
+ delete(:destroy, params: { id: tag.name, namespace_id: project.namespace.to_param, project_id: project })
+ end
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'deletes tag' do
+ request
+
+ expect(response).to be_successful
+ expect(response.body).to include("Tag was removed")
+
+ expect(project.repository.find_tag(tag.name)).not_to be_present
+ end
+ end
end
diff --git a/spec/controllers/projects/tracings_controller_spec.rb b/spec/controllers/projects/tracings_controller_spec.rb
new file mode 100644
index 00000000000..1f8a68cc861
--- /dev/null
+++ b/spec/controllers/projects/tracings_controller_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::TracingsController do
+ let_it_be(:user) { create(:user) }
+
+ describe 'GET show' do
+ shared_examples 'user with read access' do |visibility_level|
+ let(:project) { create(:project, visibility_level) }
+
+ %w[developer maintainer].each do |role|
+ context "with a #{visibility_level} project and #{role} role" do
+ before do
+ project.add_role(user, role)
+ end
+
+ it 'renders OK' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
+ end
+ end
+ end
+ end
+
+ shared_examples 'user without read access' do |visibility_level|
+ let(:project) { create(:project, visibility_level) }
+
+ %w[guest reporter].each do |role|
+ context "with a #{visibility_level} project and #{role} role" do
+ before do
+ project.add_role(user, role)
+ end
+
+ it 'returns 404' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ context 'with maintainer role' do
+ it_behaves_like 'user with read access', :public
+ it_behaves_like 'user with read access', :internal
+ it_behaves_like 'user with read access', :private
+ end
+
+ context 'without maintainer role' do
+ it_behaves_like 'user without read access', :public
+ it_behaves_like 'user without read access', :internal
+ it_behaves_like 'user without read access', :private
+ end
+ end
+end
diff --git a/spec/controllers/projects/web_ide_terminals_controller_spec.rb b/spec/controllers/projects/web_ide_terminals_controller_spec.rb
index 3eb3d5da351..09c471d2885 100644
--- a/spec/controllers/projects/web_ide_terminals_controller_spec.rb
+++ b/spec/controllers/projects/web_ide_terminals_controller_spec.rb
@@ -9,17 +9,20 @@ RSpec.describe Projects::WebIdeTerminalsController do
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
- let_it_be(:project) { create(:project, :private, :repository, namespace: owner.namespace) }
+ let_it_be(:project) do
+ create(:project, :private, :repository, namespace: owner.namespace).tap do |project|
+ project.add_maintainer(maintainer)
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ project.add_guest(guest)
+ end
+ end
+
let(:pipeline) { create(:ci_pipeline, project: project, source: :webide, config_source: :webide_source, user: user) }
let(:job) { create(:ci_build, pipeline: pipeline, user: user, project: project) }
let(:user) { maintainer }
before do
- project.add_maintainer(maintainer)
- project.add_developer(developer)
- project.add_reporter(reporter)
- project.add_guest(guest)
-
sign_in(user)
end
@@ -158,11 +161,11 @@ RSpec.describe Projects::WebIdeTerminalsController do
end
context 'access rights' do
- before do
- subject
+ it_behaves_like 'terminal access rights' do
+ before do
+ subject
+ end
end
-
- it_behaves_like 'terminal access rights'
end
it 'increases the web ide terminal counter' do
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index e4374a8f104..0640f9e5724 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -213,13 +213,13 @@ RSpec.describe ProjectsController do
expect(assigns(:issuable_meta_data)).not_to be_nil
end
- it 'shows customize workflow page if wiki and issues are disabled' do
+ it 'shows activity page if wiki and issues are disabled' do
project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
get :show, params: { namespace_id: project.namespace, id: project }
- expect(response).to render_template("projects/_customize_workflow")
+ expect(response).to render_template("projects/_activity")
end
it 'shows activity if enabled by user' do
diff --git a/spec/controllers/registrations/experience_levels_controller_spec.rb b/spec/controllers/registrations/experience_levels_controller_spec.rb
index ee1acf3d93d..4be67f29107 100644
--- a/spec/controllers/registrations/experience_levels_controller_spec.rb
+++ b/spec/controllers/registrations/experience_levels_controller_spec.rb
@@ -85,16 +85,49 @@ RSpec.describe Registrations::ExperienceLevelsController do
end
end
- context 'when a namespace_path is sent' do
- it { is_expected.to have_gitlab_http_status(:redirect) }
- it { is_expected.to redirect_to(group_path(namespace)) }
- end
+ describe 'redirection' do
+ let(:project) { build(:project, namespace: namespace, creator: user, path: 'project-path') }
+ let(:issues_board) { build(:board, id: 123, project: project) }
+
+ before do
+ stub_experiment_for_user(
+ onboarding_issues: true,
+ default_to_issues_board: default_to_issues_board_xp?
+ )
+ allow_next_instance_of(LearnGitlab) do |learn_gitlab|
+ allow(learn_gitlab).to receive(:available?).and_return(learn_gitlab_available?)
+ allow(learn_gitlab).to receive(:project).and_return(project)
+ allow(learn_gitlab).to receive(:board).and_return(issues_board)
+ end
+ end
+
+ context 'when namespace_path param is missing' do
+ let(:params) { super().merge(namespace_path: nil) }
+
+ where(
+ default_to_issues_board_xp?: [true, false],
+ learn_gitlab_available?: [true, false]
+ )
- context 'when no namespace_path is sent' do
- let(:params) { super().merge(namespace_path: nil) }
+ with_them do
+ it { is_expected.to redirect_to('/') }
+ end
+ end
- it { is_expected.to have_gitlab_http_status(:redirect) }
- it { is_expected.to redirect_to(root_path) }
+ context 'when we have a namespace_path param' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:default_to_issues_board_xp?, :learn_gitlab_available?, :path) do
+ true | true | '/group-path/project-path/-/boards/123'
+ true | false | '/group-path'
+ false | true | '/group-path'
+ false | false | '/group-path'
+ end
+
+ with_them do
+ it { is_expected.to redirect_to(path) }
+ end
+ end
end
describe 'applying the chosen level' do
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 60957dc72e6..501d8d4a78d 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -37,65 +37,123 @@ RSpec.describe RegistrationsController do
expect(response).to redirect_to(new_user_session_path(anchor: 'register-pane'))
end
end
+ end
- context 'with sign up flow and terms_opt_in experiment being enabled' do
- before do
- stub_experiment(signup_flow: true, terms_opt_in: true)
- end
+ describe '#create' do
+ let(:base_user_params) { { first_name: 'first', last_name: 'last', username: 'new_username', email: 'new@user.com', password: 'Any_password' } }
+ let(:user_params) { { user: base_user_params } }
+
+ subject { post(:create, params: user_params) }
- context 'when user is not part of the experiment' do
+ context '`blocked_pending_approval` state' do
+ context 'when the feature is enabled' do
before do
- stub_experiment_for_user(signup_flow: true, terms_opt_in: false)
+ stub_feature_flags(admin_approval_for_new_user_signups: true)
end
- it 'tracks event with right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::TermsOptIn',
- 'start',
- label: anything,
- property: 'control_group'
- )
+ context 'when the `require_admin_approval_after_user_signup` setting is turned on' do
+ before do
+ stub_application_setting(require_admin_approval_after_user_signup: true)
+ end
- subject
+ it 'signs up the user in `blocked_pending_approval` state' do
+ subject
+ created_user = User.find_by(email: 'new@user.com')
+
+ expect(created_user).to be_present
+ expect(created_user.blocked_pending_approval?).to eq(true)
+ end
+
+ it 'does not log in the user after sign up' do
+ subject
+
+ expect(controller.current_user).to be_nil
+ end
+
+ it 'shows flash message after signing up' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path(anchor: 'login-pane'))
+ expect(flash[:notice])
+ .to eq('You have signed up successfully. However, we could not sign you in because your account is awaiting approval from your GitLab administrator.')
+ end
+
+ context 'email confirmation' do
+ context 'when `send_user_confirmation_email` is true' do
+ before do
+ stub_application_setting(send_user_confirmation_email: true)
+ end
+
+ it 'does not send a confirmation email' do
+ expect { subject }
+ .not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ end
+ end
+ end
+ end
+
+ context 'when the `require_admin_approval_after_user_signup` setting is turned off' do
+ before do
+ stub_application_setting(require_admin_approval_after_user_signup: false)
+ end
+
+ it 'signs up the user in `active` state' do
+ subject
+ created_user = User.find_by(email: 'new@user.com')
+
+ expect(created_user).to be_present
+ expect(created_user.active?).to eq(true)
+ end
+
+ it 'does not show any flash message after signing up' do
+ subject
+
+ expect(flash[:notice]).to be_nil
+ end
+
+ context 'email confirmation' do
+ context 'when `send_user_confirmation_email` is true' do
+ before do
+ stub_application_setting(send_user_confirmation_email: true)
+ end
+
+ it 'sends a confirmation email' do
+ expect { subject }
+ .to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ end
+ end
+ end
end
end
- context 'when user is part of the experiment' do
+ context 'when the feature is disabled' do
before do
- stub_experiment_for_user(signup_flow: true, terms_opt_in: true)
+ stub_feature_flags(admin_approval_for_new_user_signups: false)
end
- it 'tracks event with right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::TermsOptIn',
- 'start',
- label: anything,
- property: 'experimental_group'
- )
+ context 'when the `require_admin_approval_after_user_signup` setting is turned on' do
+ before do
+ stub_application_setting(require_admin_approval_after_user_signup: true)
+ end
- subject
+ it 'signs up the user in `active` state' do
+ subject
+
+ created_user = User.find_by(email: 'new@user.com')
+ expect(created_user).to be_present
+ expect(created_user.active?).to eq(true)
+ end
end
end
end
- end
-
- describe '#create' do
- let(:base_user_params) { { name: 'new_user', username: 'new_username', email: 'new@user.com', password: 'Any_password' } }
- let(:user_params) { { user: base_user_params } }
context 'email confirmation' do
- around do |example|
- perform_enqueued_jobs do
- example.run
- end
- end
-
context 'when send_user_confirmation_email is false' do
it 'signs the user in' do
stub_application_setting(send_user_confirmation_email: false)
- expect { post(:create, params: user_params) }.not_to change { ActionMailer::Base.deliveries.size }
- expect(subject.current_user).not_to be_nil
+ expect { subject }.not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).not_to be_nil
end
end
@@ -111,10 +169,8 @@ RSpec.describe RegistrationsController do
end
it 'does not authenticate the user and sends a confirmation email' do
- post(:create, params: user_params)
-
- expect(ActionMailer::Base.deliveries.last.to.first).to eq(user_params[:user][:email])
- expect(subject.current_user).to be_nil
+ expect { subject }.to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).to be_nil
end
end
@@ -125,9 +181,8 @@ RSpec.describe RegistrationsController do
end
it 'authenticates the user and sends a confirmation email' do
- post(:create, params: user_params)
-
- expect(ActionMailer::Base.deliveries.last.to.first).to eq(user_params[:user][:email])
+ expect { subject }.to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).to be_present
expect(response).to redirect_to(users_sign_up_welcome_path)
end
end
@@ -137,7 +192,7 @@ RSpec.describe RegistrationsController do
it 'redirects to sign_in' do
stub_application_setting(signup_enabled: false)
- expect { post(:create, params: user_params) }.not_to change(User, :count)
+ expect { subject }.not_to change(User, :count)
expect(response).to redirect_to(new_user_session_path)
end
end
@@ -158,14 +213,14 @@ RSpec.describe RegistrationsController do
it 'displays an error when the reCAPTCHA is not solved' do
allow_any_instance_of(described_class).to receive(:verify_recaptcha).and_return(false)
- post(:create, params: user_params)
+ subject
expect(response).to render_template(:new)
expect(flash[:alert]).to eq(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
end
it 'redirects to the welcome page when the reCAPTCHA is solved' do
- post(:create, params: user_params)
+ subject
expect(response).to redirect_to(users_sign_up_welcome_path)
end
@@ -258,102 +313,26 @@ RSpec.describe RegistrationsController do
end
end
- context 'when terms are enforced' do
- before do
- enforce_terms
- end
-
- it 'redirects back with a notice when the checkbox was not checked' do
- post :create, params: user_params
-
- expect(flash[:alert]).to eq(_('You must accept our Terms of Service and privacy policy in order to register an account'))
- end
-
- it 'creates the user with agreement when terms are accepted' do
- post :create, params: user_params.merge(terms_opt_in: '1')
-
- expect(subject.current_user).to be_present
- expect(subject.current_user.terms_accepted?).to be(true)
- end
-
- context 'when experiment terms_opt_in is enabled' do
+ context 'terms of service' do
+ context 'when terms are enforced' do
before do
- stub_experiment(terms_opt_in: true)
- end
-
- context 'when user is part of the experiment' do
- before do
- stub_experiment_for_user(terms_opt_in: true)
- end
-
- it 'creates the user with accepted terms' do
- post :create, params: user_params
-
- expect(subject.current_user).to be_present
- expect(subject.current_user.terms_accepted?).to be(true)
- end
+ enforce_terms
end
- context 'when user is not part of the experiment' do
- before do
- stub_experiment_for_user(terms_opt_in: false)
- end
-
- it 'creates the user without accepted terms' do
- post :create, params: user_params
+ it 'creates the user with accepted terms' do
+ subject
- expect(flash[:alert]).to eq(_('You must accept our Terms of Service and privacy policy in order to register an account'))
- end
+ expect(controller.current_user).to be_present
+ expect(controller.current_user.terms_accepted?).to be(true)
end
end
- end
-
- describe 'tracking data' do
- context 'with sign up flow and terms_opt_in experiment being enabled' do
- subject { post :create, params: user_params }
-
- before do
- stub_experiment(signup_flow: true, terms_opt_in: true)
- end
-
- it 'records user for the terms_opt_in experiment' do
- expect(controller).to receive(:record_experiment_user).with(:terms_opt_in)
+ context 'when terms are not enforced' do
+ it 'creates the user without accepted terms' do
subject
- end
- context 'when user is not part of the experiment' do
- before do
- stub_experiment_for_user(signup_flow: true, terms_opt_in: false)
- end
-
- it 'tracks event with right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::TermsOptIn',
- 'end',
- label: anything,
- property: 'control_group'
- )
-
- subject
- end
- end
-
- context 'when user is part of the experiment' do
- before do
- stub_experiment_for_user(signup_flow: true, terms_opt_in: true)
- end
-
- it 'tracks event with right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::TermsOptIn',
- 'end',
- label: anything,
- property: 'experimental_group'
- )
-
- subject
- end
+ expect(controller.current_user).to be_present
+ expect(controller.current_user.terms_accepted?).to be(false)
end
end
end
@@ -361,30 +340,21 @@ RSpec.describe RegistrationsController do
it "logs a 'User Created' message" do
expect(Gitlab::AppLogger).to receive(:info).with(/\AUser Created: username=new_username email=new@user.com.+\z/).and_call_original
- post(:create, params: user_params)
+ subject
end
it 'handles when params are new_user' do
post(:create, params: { new_user: base_user_params })
- expect(subject.current_user).not_to be_nil
+ expect(controller.current_user).not_to be_nil
end
- context 'with the experimental signup flow enabled and the user is part of the experimental group' do
- before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_user(signup_flow: true)
- end
-
- let(:base_user_params) { { first_name: 'First', last_name: 'Last', username: 'new_username', email: 'new@user.com', password: 'Any_password' } }
-
- it 'sets name from first and last name' do
- post :create, params: { new_user: base_user_params }
+ it 'sets name from first and last name' do
+ post :create, params: { new_user: base_user_params }
- expect(User.last.first_name).to eq(base_user_params[:first_name])
- expect(User.last.last_name).to eq(base_user_params[:last_name])
- expect(User.last.name).to eq("#{base_user_params[:first_name]} #{base_user_params[:last_name]}")
- end
+ expect(User.last.first_name).to eq(base_user_params[:first_name])
+ expect(User.last.last_name).to eq(base_user_params[:last_name])
+ expect(User.last.name).to eq("#{base_user_params[:first_name]} #{base_user_params[:last_name]}")
end
end
@@ -507,10 +477,16 @@ RSpec.describe RegistrationsController do
patch :update_registration, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
end
- before do
- sign_in(create(:user))
+ context 'without a signed in user' do
+ it { is_expected.to redirect_to new_user_registration_path }
end
- it { is_expected.to redirect_to(dashboard_projects_path)}
+ context 'with a signed in user' do
+ before do
+ sign_in(create(:user))
+ end
+
+ it { is_expected.to redirect_to(dashboard_projects_path)}
+ end
end
end
diff --git a/spec/controllers/runner_setup_controller_spec.rb b/spec/controllers/runner_setup_controller_spec.rb
new file mode 100644
index 00000000000..0b237500907
--- /dev/null
+++ b/spec/controllers/runner_setup_controller_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RunnerSetupController do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET #platforms' do
+ it 'renders the platforms' do
+ get :platforms
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("windows")
+ expect(json_response).to have_key("kubernetes")
+ end
+ end
+end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index f244392bbad..a0cb696828d 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -183,7 +183,7 @@ RSpec.describe SearchController do
end
it_behaves_like 'tracking unique hll events', :search_track_unique_users do
- subject { get :show, params: { scope: 'projects', search: 'term' }, format: format }
+ subject(:request) { get :show, params: { scope: 'projects', search: 'term' } }
let(:target_id) { 'i_search_total' }
let(:expected_type) { instance_of(String) }
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 688539f2a03..75bcc32e6f3 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -78,6 +78,9 @@ RSpec.describe SessionsController do
end
context 'when using standard authentications' do
+ let(:user) { create(:user) }
+ let(:post_action) { post(:create, params: { user: { login: user.username, password: user.password } }) }
+
context 'invalid password' do
it 'does not authenticate user' do
post(:create, params: { user: { login: 'invalid', password: 'invalid' } })
@@ -87,6 +90,36 @@ RSpec.describe SessionsController do
end
end
+ context 'a blocked user' do
+ it 'does not authenticate the user' do
+ user.block!
+ post_action
+
+ expect(@request.env['warden']).not_to be_authenticated
+ expect(flash[:alert]).to include('Your account has been blocked')
+ end
+ end
+
+ context 'a `blocked pending approval` user' do
+ it 'does not authenticate the user' do
+ user.block_pending_approval!
+ post_action
+
+ expect(@request.env['warden']).not_to be_authenticated
+ expect(flash[:alert]).to include('Your account is pending approval from your GitLab administrator and hence blocked')
+ end
+ end
+
+ context 'an internal user' do
+ it 'does not authenticate the user' do
+ user.ghost!
+ post_action
+
+ expect(@request.env['warden']).not_to be_authenticated
+ expect(flash[:alert]).to include('Your account does not have the required permission to login')
+ end
+ end
+
context 'when using valid password', :clean_gitlab_redis_shared_state do
let(:user) { create(:user) }
let(:user_params) { { login: user.username, password: user.password } }
diff --git a/spec/controllers/snippets_controller_spec.rb b/spec/controllers/snippets_controller_spec.rb
index 6517922d92a..1ccba7f9114 100644
--- a/spec/controllers/snippets_controller_spec.rb
+++ b/spec/controllers/snippets_controller_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
RSpec.describe SnippetsController do
let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:public_snippet) { create(:personal_snippet, :public, :repository, author: user) }
describe 'GET #index' do
let(:base_params) { { username: user.username } }
@@ -12,10 +14,6 @@ RSpec.describe SnippetsController do
it_behaves_like 'paginated collection' do
let(:collection) { Snippet.all }
let(:params) { { username: user.username } }
-
- before do
- create(:personal_snippet, :public, author: user)
- end
end
it 'renders snippets of a user when username is present' do
@@ -86,12 +84,6 @@ RSpec.describe SnippetsController do
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
-
- it 'renders the blob from the repository' do
- subject
-
- expect(assigns(:blob)).to eq(personal_snippet.blobs.first)
- end
end
context 'when the personal snippet is private' do
@@ -103,8 +95,7 @@ RSpec.describe SnippetsController do
end
context 'when signed in user is not the author' do
- let(:other_author) { create(:author) }
- let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_author) }
+ let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_user) }
it 'responds with status 404' do
get :show, params: { id: other_personal_snippet.to_param }
@@ -164,7 +155,7 @@ RSpec.describe SnippetsController do
end
context 'when the personal snippet is public' do
- let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository, author: user) }
+ let(:personal_snippet) { public_snippet }
context 'when signed in' do
before do
@@ -172,22 +163,22 @@ RSpec.describe SnippetsController do
end
it_behaves_like 'successful response' do
- subject { get :show, params: { id: personal_snippet.to_param } }
+ subject { get :show, params: { id: public_snippet.to_param } }
end
it 'responds with status 200 when embeddable content is requested' do
- get :show, params: { id: personal_snippet.to_param }, format: :js
+ get :show, params: { id: public_snippet.to_param }, format: :js
- expect(assigns(:snippet)).to eq(personal_snippet)
+ expect(assigns(:snippet)).to eq(public_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when not signed in' do
it 'renders the snippet' do
- get :show, params: { id: personal_snippet.to_param }
+ get :show, params: { id: public_snippet.to_param }
- expect(assigns(:snippet)).to eq(personal_snippet)
+ expect(assigns(:snippet)).to eq(public_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -200,7 +191,7 @@ RSpec.describe SnippetsController do
end
it 'responds with status 404' do
- get :show, params: { id: 'doesntexist' }
+ get :show, params: { id: non_existing_record_id }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -208,260 +199,43 @@ RSpec.describe SnippetsController do
context 'when not signed in' do
it 'responds with status 404' do
- get :show, params: { id: 'doesntexist' }
+ get :show, params: { id: non_existing_record_id }
expect(response).to redirect_to(new_user_session_path)
end
end
end
- end
-
- describe 'POST #create' do
- def create_snippet(snippet_params = {}, additional_params = {})
- sign_in(user)
-
- post :create, params: {
- personal_snippet: { title: 'Title', content: 'Content', description: 'Description' }.merge(snippet_params)
- }.merge(additional_params)
-
- Snippet.last
- end
-
- it 'creates the snippet correctly' do
- snippet = create_snippet(visibility_level: Snippet::PRIVATE)
-
- expect(snippet.title).to eq('Title')
- expect(snippet.content).to eq('Content')
- expect(snippet.description).to eq('Description')
- end
-
- context 'when user is not allowed to create a personal snippet' do
- let(:user) { create(:user, :external) }
-
- it 'responds with status 404' do
- aggregate_failures do
- expect do
- create_snippet(visibility_level: Snippet::PUBLIC)
- end.not_to change { Snippet.count }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'when the controller receives the files param' do
- let(:files) { %w(foo bar) }
-
- it 'passes the files param to the snippet create service' do
- expect(Snippets::CreateService).to receive(:new).with(nil, user, hash_including(files: files)).and_call_original
-
- create_snippet({ title: nil }, { files: files })
- end
- end
-
- context 'when the snippet is spam' do
- before do
- allow_next_instance_of(Spam::AkismetService) do |instance|
- allow(instance).to receive(:spam?).and_return(true)
- end
- end
-
- context 'when the snippet is private' do
- it 'creates the snippet' do
- expect { create_snippet(visibility_level: Snippet::PRIVATE) }
- .to change { Snippet.count }.by(1)
- end
- end
-
- context 'when the snippet is public' do
- it 'rejects the snippet' do
- expect { create_snippet(visibility_level: Snippet::PUBLIC) }
- .not_to change { Snippet.count }
- end
-
- it 'creates a spam log' do
- expect { create_snippet(visibility_level: Snippet::PUBLIC) }
- .to log_spam(title: 'Title', user: user, noteable_type: 'PersonalSnippet')
- end
-
- it 'renders :new with reCAPTCHA disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- create_snippet(visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:new)
- end
-
- context 'reCAPTCHA enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- it 'renders :verify' do
- create_snippet(visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:verify)
- end
-
- it 'renders snippet page' do
- spammy_title = 'Whatever'
-
- spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
- snippet = create_snippet({ title: spammy_title },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
-
- expect(response).to redirect_to(snippet_path(snippet))
- end
- end
- end
- end
- end
-
- describe 'PUT #update' do
- let(:project) { create :project }
- let(:visibility_level) { Snippet::PUBLIC }
- let(:snippet) { create :personal_snippet, author: user, project: project, visibility_level: visibility_level }
-
- def update_snippet(snippet_params = {}, additional_params = {})
- sign_in(user)
-
- put :update, params: {
- id: snippet.id,
- personal_snippet: { title: 'Title', content: 'Content' }.merge(snippet_params)
- }.merge(additional_params)
-
- snippet.reload
- end
-
- context 'when the snippet is spam' do
- before do
- allow_next_instance_of(Spam::AkismetService) do |instance|
- allow(instance).to receive(:spam?).and_return(true)
- end
- end
-
- context 'when the snippet is private' do
- let(:visibility_level) { Snippet::PRIVATE }
-
- it 'updates the snippet' do
- expect { update_snippet(title: 'Foo') }
- .to change { snippet.reload.title }.to('Foo')
- end
- end
-
- context 'when a private snippet is made public' do
- let(:visibility_level) { Snippet::PRIVATE }
-
- it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC) }
- .not_to change { snippet.reload.title }
- end
-
- it 'creates a spam log' do
- expect { update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC) }
- .to log_spam(title: 'Foo', user: user, noteable_type: 'PersonalSnippet')
- end
-
- it 'renders :edit with reCAPTCHA disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:edit)
- end
-
- context 'reCAPTCHA enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- it 'renders :verify' do
- update_snippet(title: 'Foo', visibility_level: Snippet::PUBLIC)
-
- expect(response).to render_template(:verify)
- end
-
- it 'renders snippet page when reCAPTCHA verified' do
- spammy_title = 'Whatever'
-
- spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
- snippet = update_snippet({ title: spammy_title, visibility_level: Snippet::PUBLIC },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
-
- expect(response).to redirect_to(snippet_path(snippet))
- end
- end
- end
-
- context 'when the snippet is public' do
- it 'rejects the snippet' do
- expect { update_snippet(title: 'Foo') }
- .not_to change { snippet.reload.title }
- end
-
- it 'creates a spam log' do
- expect {update_snippet(title: 'Foo') }
- .to log_spam(title: 'Foo', user: user, noteable_type: 'PersonalSnippet')
- end
-
- it 'renders :edit with reCAPTCHA disabled' do
- stub_application_setting(recaptcha_enabled: false)
-
- update_snippet(title: 'Foo')
-
- expect(response).to render_template(:edit)
- end
-
- context 'recaptcha enabled' do
- before do
- stub_application_setting(recaptcha_enabled: true)
- end
-
- it 'renders :verify' do
- update_snippet(title: 'Foo')
-
- expect(response).to render_template(:verify)
- end
-
- it 'renders snippet page when reCAPTCHA verified' do
- spammy_title = 'Whatever'
- spam_logs = create_list(:spam_log, 2, user: user, title: spammy_title)
- snippet = update_snippet({ title: spammy_title },
- { spam_log_id: spam_logs.last.id,
- recaptcha_verification: true })
+ context 'when requesting JSON' do
+ it 'renders the blob from the repository' do
+ get :show, params: { id: public_snippet.to_param }, format: :json
- expect(response).to redirect_to(snippet_path(snippet))
- end
- end
+ expect(assigns(:blob)).to eq(public_snippet.blobs.first)
end
end
end
describe 'POST #mark_as_spam' do
- let(:snippet) { create(:personal_snippet, :public, author: user) }
-
before do
allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive_messages(submit_spam: true)
end
+
stub_application_setting(akismet_enabled: true)
end
def mark_as_spam
admin = create(:admin)
- create(:user_agent_detail, subject: snippet)
+ create(:user_agent_detail, subject: public_snippet)
sign_in(admin)
- post :mark_as_spam, params: { id: snippet.id }
+ post :mark_as_spam, params: { id: public_snippet.id }
end
it 'updates the snippet' do
mark_as_spam
- expect(snippet.reload).not_to be_submittable_as_spam
+ expect(public_snippet.reload).not_to be_submittable_as_spam
end
end
@@ -489,9 +263,7 @@ RSpec.describe SnippetsController do
shared_examples 'CRLF line ending' do
let(:content) { "first line\r\nsecond line\r\nthird line" }
let(:formatted_content) { content.gsub(/\r\n/, "\n") }
- let(:snippet) do
- create(:personal_snippet, :public, :repository, author: user, content: content)
- end
+ let(:snippet) { public_snippet }
before do
allow_next_instance_of(Blob) do |instance|
@@ -560,8 +332,7 @@ RSpec.describe SnippetsController do
end
context 'when signed in user is not the author' do
- let(:other_author) { create(:author) }
- let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_author) }
+ let(:other_personal_snippet) { create(:personal_snippet, :private, author: other_user) }
it 'responds with status 404' do
get :raw, params: { id: other_personal_snippet.to_param }
@@ -605,7 +376,7 @@ RSpec.describe SnippetsController do
end
context 'when the personal snippet is public' do
- let_it_be(:snippet) { create(:personal_snippet, :public, :repository, author: user) }
+ let(:snippet) { public_snippet }
context 'when signed in' do
before do
@@ -632,7 +403,7 @@ RSpec.describe SnippetsController do
end
it 'responds with status 404' do
- get :raw, params: { id: 'doesntexist' }
+ get :raw, params: { id: non_existing_record_id }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -640,7 +411,7 @@ RSpec.describe SnippetsController do
context 'when not signed in' do
it 'redirects to the sign in path' do
- get :raw, params: { id: 'doesntexist' }
+ get :raw, params: { id: non_existing_record_id }
expect(response).to redirect_to(new_user_session_path)
end
@@ -649,11 +420,10 @@ RSpec.describe SnippetsController do
end
context 'award emoji on snippets' do
- let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
- let(:another_user) { create(:user) }
+ let(:personal_snippet) { public_snippet }
before do
- sign_in(another_user)
+ sign_in(other_user)
end
describe 'POST #toggle_award_emoji' do
@@ -678,66 +448,12 @@ RSpec.describe SnippetsController do
end
describe 'POST #preview_markdown' do
- let(:snippet) { create(:personal_snippet, :public) }
-
it 'renders json in a correct format' do
sign_in(user)
- post :preview_markdown, params: { id: snippet, text: '*Markdown* text' }
+ post :preview_markdown, params: { id: public_snippet, text: '*Markdown* text' }
expect(json_response.keys).to match_array(%w(body references))
end
end
-
- describe 'DELETE #destroy' do
- let!(:snippet) { create :personal_snippet, author: user }
-
- context 'when current user has ability to destroy the snippet' do
- before do
- sign_in(user)
- end
-
- it 'removes the snippet' do
- delete :destroy, params: { id: snippet.to_param }
-
- expect { snippet.reload }.to raise_error(ActiveRecord::RecordNotFound)
- end
-
- context 'when snippet is succesfuly destroyed' do
- it 'redirects to the project snippets page' do
- delete :destroy, params: { id: snippet.to_param }
-
- expect(response).to redirect_to(dashboard_snippets_path)
- end
- end
-
- context 'when snippet is not destroyed' do
- before do
- allow(snippet).to receive(:destroy).and_return(false)
- controller.instance_variable_set(:@snippet, snippet)
- end
-
- it 'renders the snippet page with errors' do
- delete :destroy, params: { id: snippet.to_param }
-
- expect(flash[:alert]).to eq('Failed to remove snippet.')
- expect(response).to redirect_to(snippet_path(snippet))
- end
- end
- end
-
- context 'when current_user does not have ability to destroy the snippet' do
- let(:another_user) { create(:user) }
-
- before do
- sign_in(another_user)
- end
-
- it 'responds with status 404' do
- delete :destroy, params: { id: snippet.to_param }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 69cd08d82e1..06fafbddced 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Database schema' do
let(:columns_name_with_jsonb) { retrieve_columns_name_with_jsonb }
# List of columns historically missing a FK, don't add more columns
- # See: https://docs.gitlab.com/ce/development/foreign_keys.html#naming-foreign-keys
+ # See: https://docs.gitlab.com/ee/development/foreign_keys.html#naming-foreign-keys
IGNORED_FK_COLUMNS = {
abuse_reports: %w[reporter_id user_id],
application_settings: %w[performance_bar_allowed_group_id slack_app_id snowplow_app_id eks_account_id eks_access_key_id],
@@ -31,6 +31,7 @@ RSpec.describe 'Database schema' do
ci_trigger_requests: %w[commit_id],
cluster_providers_aws: %w[security_group_id vpc_id access_key_id],
cluster_providers_gcp: %w[gcp_project_id operation_id],
+ compliance_management_frameworks: %w[group_id],
commit_user_mentions: %w[commit_id],
deploy_keys_projects: %w[deploy_key_id],
deployments: %w[deployable_id environment_id user_id],
@@ -237,6 +238,42 @@ RSpec.describe 'Database schema' do
end
end
+ context 'primary keys' do
+ let(:exceptions) do
+ %i(
+ analytics_language_trend_repository_languages
+ approval_project_rules_protected_branches
+ ci_build_trace_sections
+ deployment_merge_requests
+ elasticsearch_indexed_namespaces
+ elasticsearch_indexed_projects
+ issue_assignees
+ issues_prometheus_alert_events
+ issues_self_managed_prometheus_alert_events
+ merge_request_context_commit_diff_files
+ merge_request_diff_commits
+ merge_request_diff_files
+ milestone_releases
+ project_authorizations
+ project_pages_metadata
+ push_event_payloads
+ repository_languages
+ user_interacted_projects
+ users_security_dashboard_projects
+ )
+ end
+
+ it 'expects every table to have a primary key defined' do
+ connection = ActiveRecord::Base.connection
+
+ problematic_tables = connection.tables.select do |table|
+ !connection.primary_key(table).present?
+ end.map(&:to_sym)
+
+ expect(problematic_tables - exceptions).to be_empty
+ end
+ end
+
private
def retrieve_columns_name_with_jsonb
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index d931947fff1..e36e4c38013 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -56,22 +56,22 @@ FactoryBot.define do
end
trait :triggered do
- status { AlertManagement::Alert::STATUSES[:triggered] }
+ status { AlertManagement::Alert.status_value(:triggered) }
without_ended_at
end
trait :acknowledged do
- status { AlertManagement::Alert::STATUSES[:acknowledged] }
+ status { AlertManagement::Alert.status_value(:acknowledged) }
without_ended_at
end
trait :resolved do
- status { AlertManagement::Alert::STATUSES[:resolved] }
+ status { AlertManagement::Alert.status_value(:resolved) }
with_ended_at
end
trait :ignored do
- status { AlertManagement::Alert::STATUSES[:ignored] }
+ status { AlertManagement::Alert.status_value(:ignored) }
without_ended_at
end
@@ -100,7 +100,7 @@ FactoryBot.define do
end
trait :prometheus do
- monitoring_tool { Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus] }
+ monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] }
payload do
{
annotations: {
@@ -123,5 +123,17 @@ FactoryBot.define do
with_description
low
end
+
+ trait :from_payload do
+ after(:build) do |alert|
+ alert_params = ::Gitlab::AlertManagement::Payload.parse(
+ alert.project,
+ alert.payload,
+ monitoring_tool: alert.monitoring_tool
+ ).alert_params
+
+ alert.assign_attributes(alert_params)
+ end
+ end
end
end
diff --git a/spec/factories/alert_management/http_integrations.rb b/spec/factories/alert_management/http_integrations.rb
new file mode 100644
index 00000000000..9311cb3e114
--- /dev/null
+++ b/spec/factories/alert_management/http_integrations.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :alert_management_http_integration, class: 'AlertManagement::HttpIntegration' do
+ project
+ active { true }
+ name { 'DataDog' }
+ endpoint_identifier { SecureRandom.hex(4) }
+
+ trait :inactive do
+ active { false }
+ end
+ end
+end
diff --git a/spec/factories/alerting/alert.rb b/spec/factories/alerting/alert.rb
deleted file mode 100644
index 285bb14efa2..00000000000
--- a/spec/factories/alerting/alert.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :alerting_alert, class: 'Gitlab::Alerting::Alert' do
- project
- payload { {} }
-
- transient do
- metric_id { nil }
-
- after(:build) do |alert, evaluator|
- unless alert.payload.key?('startsAt')
- alert.payload['startsAt'] = Time.now.rfc3339
- end
-
- if metric_id = evaluator.metric_id
- alert.payload['labels'] ||= {}
- alert.payload['labels']['gitlab_alert_id'] = metric_id.to_s
- end
- end
- end
-
- skip_create
- end
-end
diff --git a/spec/factories/authentication_event.rb b/spec/factories/authentication_event.rb
new file mode 100644
index 00000000000..ff539c6f5c4
--- /dev/null
+++ b/spec/factories/authentication_event.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :authentication_event do
+ user
+ provider { :standard }
+ user_name { 'Jane Doe' }
+ ip_address { '127.0.0.1' }
+ result { :failed }
+ end
+end
diff --git a/spec/factories/bulk_import.rb b/spec/factories/bulk_import.rb
new file mode 100644
index 00000000000..0231fe7cfef
--- /dev/null
+++ b/spec/factories/bulk_import.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :bulk_import, class: 'BulkImport' do
+ user
+ source_type { :gitlab }
+ end
+end
diff --git a/spec/factories/bulk_import/entities.rb b/spec/factories/bulk_import/entities.rb
new file mode 100644
index 00000000000..3bf6af92d00
--- /dev/null
+++ b/spec/factories/bulk_import/entities.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :bulk_import_entity, class: 'BulkImports::Entity' do
+ bulk_import
+
+ source_type { :group_entity }
+ sequence(:source_full_path) { |n| "source-path-#{n}" }
+
+ sequence(:destination_namespace) { |n| "destination-path-#{n}" }
+ destination_name { 'Imported Entity' }
+
+ trait(:group_entity) do
+ source_type { :group_entity }
+ end
+
+ trait(:project_entity) do
+ source_type { :project_entity }
+ end
+ end
+end
diff --git a/spec/factories/ci/bridge.rb b/spec/factories/ci/bridge.rb
index 5a33a30921b..7727a468633 100644
--- a/spec/factories/ci/bridge.rb
+++ b/spec/factories/ci/bridge.rb
@@ -40,6 +40,10 @@ FactoryBot.define do
end
end
+ trait :created do
+ status { 'created' }
+ end
+
trait :started do
started_at { '2013-10-29 09:51:28 CET' }
end
@@ -62,5 +66,14 @@ FactoryBot.define do
trait :strategy_depend do
options { { trigger: { strategy: 'depend' } } }
end
+
+ trait :manual do
+ status { 'manual' }
+ self.when { 'manual' }
+ end
+
+ trait :playable do
+ manual
+ end
end
end
diff --git a/spec/factories/ci/build_pending_states.rb b/spec/factories/ci/build_pending_states.rb
index 765b7f005b9..eddd74b1068 100644
--- a/spec/factories/ci/build_pending_states.rb
+++ b/spec/factories/ci/build_pending_states.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
factory :ci_build_pending_state, class: 'Ci::BuildPendingState' do
build factory: :ci_build
- trace_checksum { 'crc32:12345678' }
+ trace_checksum { 'crc32:bc614e' }
state { 'success' }
end
end
diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb
index 7c348f4b7e4..d996b41b648 100644
--- a/spec/factories/ci/build_trace_chunks.rb
+++ b/spec/factories/ci/build_trace_chunks.rb
@@ -53,5 +53,18 @@ FactoryBot.define do
trait :fog_without_data do
data_store { :fog }
end
+
+ trait :persisted do
+ data_store { :database}
+
+ transient do
+ initial_data { 'test data' }
+ end
+
+ after(:build) do |chunk, evaluator|
+ Ci::BuildTraceChunks::Database.new.set_data(chunk, evaluator.initial_data)
+ chunk.checksum = chunk.class.crc32(evaluator.initial_data)
+ end
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index b3815b53c2b..73920b76025 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -384,7 +384,8 @@ FactoryBot.define do
key: 'cache_key',
untracked: false,
paths: ['vendor/*'],
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
}
}
end
diff --git a/spec/factories/ci/deleted_object.rb b/spec/factories/ci/deleted_object.rb
new file mode 100644
index 00000000000..c91d259ffeb
--- /dev/null
+++ b/spec/factories/ci/deleted_object.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_deleted_object, class: 'Ci::DeletedObject' do
+ pick_up_at { Time.current }
+ store_dir { SecureRandom.uuid }
+ file { fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip') }
+ end
+end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 6174bfbfbb7..4fa5dde4eff 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -15,15 +15,31 @@ FactoryBot.define do
# on pipeline factories to avoid circular references
transient { head_pipeline_of { nil } }
+ transient { child_of { nil } }
+
+ after(:build) do |pipeline, evaluator|
+ if evaluator.child_of
+ pipeline.project = evaluator.child_of.project
+ pipeline.source = :parent_pipeline
+ end
+ end
+
after(:create) do |pipeline, evaluator|
merge_request = evaluator.head_pipeline_of
merge_request&.update!(head_pipeline: pipeline)
+
+ if evaluator.child_of
+ bridge = create(:ci_bridge, pipeline: evaluator.child_of)
+ create(:ci_sources_pipeline,
+ source_job: bridge,
+ pipeline: pipeline)
+ end
end
factory :ci_pipeline do
transient { ci_ref_presence { true } }
- after(:build) do |pipeline, evaluator|
+ before(:create) do |pipeline, evaluator|
pipeline.ensure_ci_ref! if evaluator.ci_ref_presence && pipeline.ci_ref_id.nil?
end
diff --git a/spec/factories/ci/test_case.rb b/spec/factories/ci/test_case.rb
index 0639aac566a..7f99f0e123e 100644
--- a/spec/factories/ci/test_case.rb
+++ b/spec/factories/ci/test_case.rb
@@ -2,6 +2,7 @@
FactoryBot.define do
factory :test_case, class: 'Gitlab::Ci::Reports::TestCase' do
+ suite_name { "rspec" }
name { "test-1" }
classname { "trace" }
file { "spec/trace_spec.rb" }
@@ -25,6 +26,7 @@ FactoryBot.define do
initialize_with do
new(
+ suite_name: suite_name,
name: name,
classname: classname,
file: file,
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index 66c33c9ece0..38d0545483c 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -75,7 +75,7 @@ FactoryBot.define do
end
# Use this trait if you want versions in a particular history, but don't
- # want to pay for gitlay calls.
+ # want to pay for gitaly calls.
trait :with_versions do
transient do
deleted { false }
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index ecbda5fbfd3..6c9f1ba0137 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -18,6 +18,7 @@ FactoryBot.define do
trait(:destroyed) { action { :destroyed } }
trait(:expired) { action { :expired } }
trait(:archived) { action { :archived } }
+ trait(:approved) { action { :approved } }
factory :closed_issue_event do
action { :closed }
@@ -55,6 +56,16 @@ FactoryBot.define do
action { :created }
target { design }
end
+
+ factory :project_created_event do
+ project factory: :project
+ action { :created }
+ end
+
+ factory :project_imported_event do
+ project factory: [:project, :with_import_url]
+ action { :created }
+ end
end
factory :push_event, class: 'PushEvent' do
diff --git a/spec/factories/group_import_states.rb b/spec/factories/group_import_states.rb
index 0b491d444fa..47d4b480b12 100644
--- a/spec/factories/group_import_states.rb
+++ b/spec/factories/group_import_states.rb
@@ -3,6 +3,7 @@
FactoryBot.define do
factory :group_import_state, class: 'GroupImportState', traits: %i[created] do
association :group, factory: :group
+ association :user, factory: :user
trait :created do
status { 0 }
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index 60d427dde00..17db69e4699 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -14,6 +14,8 @@ FactoryBot.define do
# https://gitlab.com/gitlab-org/gitlab-foss/issues/43292
raise "Don't set owner for groups, use `group.add_owner(user)` instead"
end
+
+ create(:namespace_settings, namespace: group)
end
trait :public do
@@ -21,7 +23,7 @@ FactoryBot.define do
end
trait :internal do
- visibility_level {Gitlab::VisibilityLevel::INTERNAL }
+ visibility_level { Gitlab::VisibilityLevel::INTERNAL }
end
trait :private do
diff --git a/spec/factories/instance_statistics/measurement.rb b/spec/factories/instance_statistics/measurement.rb
index fb180c23214..f9398cd3061 100644
--- a/spec/factories/instance_statistics/measurement.rb
+++ b/spec/factories/instance_statistics/measurement.rb
@@ -13,5 +13,13 @@ FactoryBot.define do
trait :group_count do
identifier { :groups }
end
+
+ trait :pipelines_succeeded_count do
+ identifier { :pipelines_succeeded }
+ end
+
+ trait :pipelines_skipped_count do
+ identifier { :pipelines_skipped }
+ end
end
end
diff --git a/spec/factories/issue_email_participants.rb b/spec/factories/issue_email_participants.rb
new file mode 100644
index 00000000000..730e224b01e
--- /dev/null
+++ b/spec/factories/issue_email_participants.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :issue_email_participant do
+ issue
+ email { generate(:email) }
+ end
+end
diff --git a/spec/factories/merge_request_diffs.rb b/spec/factories/merge_request_diffs.rb
index fdb7f52f3bd..481cabdae6d 100644
--- a/spec/factories/merge_request_diffs.rb
+++ b/spec/factories/merge_request_diffs.rb
@@ -2,13 +2,7 @@
FactoryBot.define do
factory :merge_request_diff do
- merge_request do
- build(:merge_request) do |merge_request|
- # MergeRequest should not create a MergeRequestDiff in the callback
- allow(merge_request).to receive(:ensure_merge_request_diff)
- end
- end
-
+ association :merge_request, factory: :merge_request_without_merge_request_diff
state { :collected }
commits_count { 1 }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 6836d5d71f0..e5381071228 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -164,6 +164,10 @@ FactoryBot.define do
target_branch { generate(:branch) }
end
+ trait :unique_author do
+ author { association(:user) }
+ end
+
trait :with_coverage_reports do
after(:build) do |merge_request|
merge_request.head_pipeline = build(
@@ -286,5 +290,7 @@ FactoryBot.define do
merge_request.update!(labels: evaluator.labels)
end
end
+
+ factory :merge_request_without_merge_request_diff, class: 'MergeRequestWithoutMergeRequestDiff'
end
end
diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb
index 0dcec086da9..0ec977b8234 100644
--- a/spec/factories/namespaces.rb
+++ b/spec/factories/namespaces.rb
@@ -63,5 +63,13 @@ FactoryBot.define do
)
end
end
+
+ trait :shared_runners_disabled do
+ shared_runners_enabled { false }
+ end
+
+ trait :allow_descendants_override_disabled_shared_runners do
+ allow_descendants_override_disabled_shared_runners { true }
+ end
end
end
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 52b2a32cd3b..e2c5b000988 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -21,6 +21,10 @@ FactoryBot.define do
end
end
+ factory :debian_package do
+ package_type { :debian }
+ end
+
factory :npm_package do
sequence(:name) { |n| "@#{project.root_namespace.path}/package-#{n}"}
version { '1.0.0' }
@@ -91,6 +95,12 @@ FactoryBot.define do
end
end
+ factory :golang_package do
+ sequence(:name) { |n| "golang.org/x/pkg-#{n}"}
+ sequence(:version) { |n| "v1.0.#{n}" }
+ package_type { :golang }
+ end
+
factory :conan_package do
conan_metadatum
@@ -141,160 +151,6 @@ FactoryBot.define do
package
end
- factory :package_file, class: 'Packages::PackageFile' do
- package
-
- file_name { 'somefile.txt' }
-
- transient do
- file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
- end
-
- after(:build) do |package_file, evaluator|
- package_file.file = fixture_file_upload(evaluator.file_fixture)
- end
-
- factory :conan_package_file do
- package { create(:conan_package, without_package_files: true) }
-
- transient do
- without_loaded_metadatum { false }
- end
-
- trait(:conan_recipe_file) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :recipe_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
- file_name { 'conanfile.py' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_recipe_manifest) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :recipe_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanmanifest.txt' }
- file_name { 'conanmanifest.txt' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_package_manifest) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :package_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/package_files/conanmanifest.txt' }
- file_name { 'conanmanifest.txt' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_package_info) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :package_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/package_files/conaninfo.txt' }
- file_name { 'conaninfo.txt' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_package) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :package_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/package_files/conan_package.tgz' }
- file_name { 'conan_package.tgz' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
- end
-
- trait(:jar) do
- file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
- file_name { 'my-app-1.0-20180724.124855-1.jar' }
- file_sha1 { '4f0bfa298744d505383fbb57c554d4f5c12d88b3' }
- size { 100.kilobytes }
- end
-
- trait(:pom) do
- file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom' }
- file_name { 'my-app-1.0-20180724.124855-1.pom' }
- file_sha1 { '19c975abd49e5102ca6c74a619f21e0cf0351c57' }
- size { 200.kilobytes }
- end
-
- trait(:xml) do
- file_fixture { 'spec/fixtures/packages/maven/maven-metadata.xml' }
- file_name { 'maven-metadata.xml' }
- file_sha1 { '42b1bdc80de64953b6876f5a8c644f20204011b0' }
- size { 300.kilobytes }
- end
-
- trait(:npm) do
- file_fixture { 'spec/fixtures/packages/npm/foo-1.0.1.tgz' }
- file_name { 'foo-1.0.1.tgz' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- verified_at { Date.current }
- verification_checksum { '4437b5775e61455588a7e5187a2e5c58c680694260bbe5501c235ec690d17f83' }
- size { 400.kilobytes }
- end
-
- trait(:nuget) do
- package
- file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
- file_name { 'package.nupkg' }
- file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
- size { 300.kilobytes }
- end
-
- trait(:pypi) do
- package
- file_fixture { 'spec/fixtures/packages/pypi/sample-project.tar.gz' }
- file_name { 'sample-project-1.0.0.tar.gz' }
- file_sha1 { '2c0cfbed075d3fae226f051f0cc771b533e01aff' }
- file_md5 { '0a7392d24f42f83068fa3767c5310052' }
- file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
- size { 1149.bytes }
- end
-
- trait(:object_storage) do
- file_store { Packages::PackageFileUploader::Store::REMOTE }
- end
-
- trait(:checksummed) do
- verification_checksum { 'abc' }
- end
-
- trait(:checksum_failure) do
- verification_failure { 'Could not calculate the checksum' }
- end
-
- factory :package_file_with_file, traits: [:jar]
- end
-
factory :maven_metadatum, class: 'Packages::Maven::Metadatum' do
association :package, package_type: :maven
path { 'my/company/app/my-app/1.0-SNAPSHOT' }
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_file.rb
new file mode 100644
index 00000000000..bcca48fb086
--- /dev/null
+++ b/spec/factories/packages/package_file.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :package_file, class: 'Packages::PackageFile' do
+ package
+
+ file_name { 'somefile.txt' }
+
+ transient do
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
+ end
+
+ after(:build) do |package_file, evaluator|
+ package_file.file = fixture_file_upload(evaluator.file_fixture)
+ end
+
+ factory :conan_package_file do
+ package { create(:conan_package, without_package_files: true) }
+
+ transient do
+ without_loaded_metadatum { false }
+ end
+
+ trait(:conan_recipe_file) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :recipe_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
+ file_name { 'conanfile.py' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_recipe_manifest) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :recipe_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanmanifest.txt' }
+ file_name { 'conanmanifest.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package_manifest) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conanmanifest.txt' }
+ file_name { 'conanmanifest.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package_info) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conaninfo.txt' }
+ file_name { 'conaninfo.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conan_package.tgz' }
+ file_name { 'conan_package.tgz' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+ end
+
+ trait(:jar) do
+ file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
+ file_name { 'my-app-1.0-20180724.124855-1.jar' }
+ file_sha1 { '4f0bfa298744d505383fbb57c554d4f5c12d88b3' }
+ size { 100.kilobytes }
+ end
+
+ trait(:pom) do
+ file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom' }
+ file_name { 'my-app-1.0-20180724.124855-1.pom' }
+ file_sha1 { '19c975abd49e5102ca6c74a619f21e0cf0351c57' }
+ size { 200.kilobytes }
+ end
+
+ trait(:xml) do
+ file_fixture { 'spec/fixtures/packages/maven/maven-metadata.xml' }
+ file_name { 'maven-metadata.xml' }
+ file_sha1 { '42b1bdc80de64953b6876f5a8c644f20204011b0' }
+ size { 300.kilobytes }
+ end
+
+ trait(:npm) do
+ file_fixture { 'spec/fixtures/packages/npm/foo-1.0.1.tgz' }
+ file_name { 'foo-1.0.1.tgz' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ verified_at { Date.current }
+ verification_checksum { '4437b5775e61455588a7e5187a2e5c58c680694260bbe5501c235ec690d17f83' }
+ size { 400.kilobytes }
+ end
+
+ trait(:nuget) do
+ package
+ file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
+ file_name { 'package.nupkg' }
+ file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
+ size { 300.kilobytes }
+ end
+
+ trait(:pypi) do
+ package
+ file_fixture { 'spec/fixtures/packages/pypi/sample-project.tar.gz' }
+ file_name { 'sample-project-1.0.0.tar.gz' }
+ file_sha1 { '2c0cfbed075d3fae226f051f0cc771b533e01aff' }
+ file_md5 { '0a7392d24f42f83068fa3767c5310052' }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ size { 1149.bytes }
+ end
+
+ trait(:generic) do
+ package
+ file_fixture { 'spec/fixtures/packages/generic/myfile.tar.gz' }
+ file_name { "#{package.name}.tar.gz" }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ size { 1149.bytes }
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::PackageFileUploader::Store::REMOTE }
+ end
+
+ trait(:checksummed) do
+ verification_checksum { 'abc' }
+ end
+
+ trait(:checksum_failure) do
+ verification_failure { 'Could not calculate the checksum' }
+ end
+
+ factory :package_file_with_file, traits: [:jar]
+ end
+end
diff --git a/spec/factories/pages_deployments.rb b/spec/factories/pages_deployments.rb
index 1bea003d683..f57852a8f94 100644
--- a/spec/factories/pages_deployments.rb
+++ b/spec/factories/pages_deployments.rb
@@ -3,10 +3,11 @@
FactoryBot.define do
factory :pages_deployment, class: 'PagesDeployment' do
project
- file_store { ObjectStorage::SUPPORTED_STORES.first }
- size { 1.megabytes }
- # TODO: replace with proper file uploaded in https://gitlab.com/gitlab-org/gitlab/-/issues/245295
- file { "dummy string" }
+ after(:build) do |deployment, _evaluator|
+ deployment.file = fixture_file_upload(
+ Rails.root.join("spec/fixtures/pages.zip")
+ )
+ end
end
end
diff --git a/spec/factories/project_repository_storage_moves.rb b/spec/factories/project_repository_storage_moves.rb
index 69fb3af45e6..c0068de5f58 100644
--- a/spec/factories/project_repository_storage_moves.rb
+++ b/spec/factories/project_repository_storage_moves.rb
@@ -5,7 +5,6 @@ FactoryBot.define do
project
source_storage_name { 'default' }
- destination_storage_name { 'default' }
trait :scheduled do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:scheduled].value }
diff --git a/spec/factories/project_tracing_settings.rb b/spec/factories/project_tracing_settings.rb
new file mode 100644
index 00000000000..05c1529c18e
--- /dev/null
+++ b/spec/factories/project_tracing_settings.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_tracing_setting do
+ project
+ external_url { 'https://example.com' }
+ end
+end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 147413557d6..87e4a8e355d 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -15,7 +15,7 @@ FactoryBot.define do
# Associations
namespace
- creator { group ? create(:user) : namespace&.owner }
+ creator { group ? association(:user) : namespace&.owner }
transient do
# Nest Project Feature attributes
@@ -285,6 +285,12 @@ FactoryBot.define do
end
end
+ trait :with_import_url do
+ import_finished
+
+ import_url { generate(:url) }
+ end
+
trait(:wiki_enabled) { wiki_access_level { ProjectFeature::ENABLED } }
trait(:wiki_disabled) { wiki_access_level { ProjectFeature::DISABLED } }
trait(:wiki_private) { wiki_access_level { ProjectFeature::PRIVATE } }
diff --git a/spec/factories/prometheus_alert.rb b/spec/factories/prometheus_alert.rb
index 18cf1a20e0d..ad3868c38ed 100644
--- a/spec/factories/prometheus_alert.rb
+++ b/spec/factories/prometheus_alert.rb
@@ -7,11 +7,11 @@ FactoryBot.define do
threshold { 1 }
environment do |alert|
- build(:environment, project: alert.project)
+ association(:environment, project: alert.project)
end
prometheus_metric do |alert|
- build(:prometheus_metric, project: alert.project)
+ association(:prometheus_metric, project: alert.project)
end
trait :with_runbook_url do
diff --git a/spec/factories/prometheus_metrics.rb b/spec/factories/prometheus_metrics.rb
index 83e3845f1c3..503d392a524 100644
--- a/spec/factories/prometheus_metrics.rb
+++ b/spec/factories/prometheus_metrics.rb
@@ -9,6 +9,7 @@ FactoryBot.define do
group { :business }
project
legend { 'legend' }
+ dashboard_path { '.gitlab/dashboards/dashboard_path.yml'}
trait :common do
common { true }
diff --git a/spec/factories/resource_weight_events.rb b/spec/factories/resource_weight_events.rb
deleted file mode 100644
index cb9a34df332..00000000000
--- a/spec/factories/resource_weight_events.rb
+++ /dev/null
@@ -1,8 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :resource_weight_event do
- issue { create(:issue) }
- user { issue&.author || create(:user) }
- end
-end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 9056fd97f13..13997080817 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -81,7 +81,7 @@ FactoryBot.define do
project_key { nil }
end
- after(:build) do |service, evaluator|
+ before(:create) do |service, evaluator|
if evaluator.create_data
create(:jira_tracker_data, service: service,
url: evaluator.url, api_url: evaluator.api_url, jira_issue_transition_id: evaluator.jira_issue_transition_id,
@@ -130,7 +130,7 @@ FactoryBot.define do
new_issue_url { 'http://new-issue.example.com' }
end
- after(:build) do |service, evaluator|
+ before(:create) do |service, evaluator|
if evaluator.create_data
create(:issue_tracker_data, service: service,
project_url: evaluator.project_url, issues_url: evaluator.issues_url, new_issue_url: evaluator.new_issue_url
@@ -151,7 +151,7 @@ FactoryBot.define do
project_identifier_code { 'PRJ-1' }
end
- after(:build) do |service, evaluator|
+ before(:create) do |service, evaluator|
create(:open_project_tracker_data, service: service,
url: evaluator.url, api_url: evaluator.api_url, token: evaluator.token,
closed_status_id: evaluator.closed_status_id, project_identifier_code: evaluator.project_identifier_code
diff --git a/spec/factories/terraform/state.rb b/spec/factories/terraform/state.rb
index 9decc89ef39..d80c1315e28 100644
--- a/spec/factories/terraform/state.rb
+++ b/spec/factories/terraform/state.rb
@@ -17,16 +17,6 @@ FactoryBot.define do
locked_by_user { create(:user) }
end
- trait(:checksummed) do
- with_file
- verification_checksum { 'abc' }
- end
-
- trait(:checksum_failure) do
- with_file
- verification_failure { 'Could not calculate the checksum' }
- end
-
trait :with_version do
after(:create) do |state|
create(:terraform_state_version, :with_file, terraform_state: state)
diff --git a/spec/factories/terraform/state_version.rb b/spec/factories/terraform/state_version.rb
index d1bd78215e3..b45bd01fd3c 100644
--- a/spec/factories/terraform/state_version.rb
+++ b/spec/factories/terraform/state_version.rb
@@ -7,5 +7,13 @@ FactoryBot.define do
sequence(:version)
file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
+
+ trait(:checksummed) do
+ verification_checksum { 'abc' }
+ end
+
+ trait(:checksum_failure) do
+ verification_failure { 'Could not calculate the checksum' }
+ end
end
end
diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb
index 0b5d00cff67..97a1265c46a 100644
--- a/spec/factories/todos.rb
+++ b/spec/factories/todos.rb
@@ -12,6 +12,10 @@ FactoryBot.define do
action { Todo::ASSIGNED }
end
+ trait :review_requested do
+ action { Todo::REVIEW_REQUESTED }
+ end
+
trait :mentioned do
action { Todo::MENTIONED }
end
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 5b20205a235..adca6eabb0e 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -7,12 +7,12 @@ FactoryBot.define do
initialize_with do
projects = create_list(:project, 3)
projects << create(:project, :repository)
+ group = create(:group)
create(:board, project: projects[0])
create(:jira_service, project: projects[0])
create(:jira_service, :without_properties_callback, project: projects[1])
create(:jira_service, :jira_cloud_service, project: projects[2])
- create(:jira_service, :without_properties_callback, project: projects[3],
- properties: { url: 'https://mysite.atlassian.net' })
+ create(:jira_service, :without_properties_callback, project: projects[3], properties: { url: 'https://mysite.atlassian.net' })
jira_label = create(:label, project: projects[0])
create(:jira_import_state, :finished, project: projects[0], label: jira_label, failed_to_import_count: 2, imported_issues_count: 7, total_issue_count: 9)
create(:jira_import_state, :finished, project: projects[1], label: jira_label, imported_issues_count: 3, total_issue_count: 3)
@@ -23,9 +23,11 @@ FactoryBot.define do
create(:service, project: projects[1], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'MattermostService', active: false)
+ create(:service, group: group, project: nil, type: 'MattermostService', active: true)
create(:service, :template, type: 'MattermostService', active: true)
matermost_instance = create(:service, :instance, type: 'MattermostService', active: true)
create(:service, project: projects[1], type: 'MattermostService', active: true, inherit_from_id: matermost_instance.id)
+ create(:service, group: group, project: nil, type: 'SlackService', active: true, inherit_from_id: matermost_instance.id)
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
@@ -50,12 +52,14 @@ FactoryBot.define do
create(:protected_branch, project: projects[0])
create(:protected_branch, name: 'main', project: projects[0])
+ # Tracing
+ create(:project_tracing_setting, project: projects[0])
+
# Incident Labeled Issues
incident_label = create(:label, :incident, project: projects[0])
create(:labeled_issue, project: projects[0], labels: [incident_label])
- incident_group = create(:group)
incident_label_scoped_to_project = create(:label, :incident, project: projects[1])
- incident_label_scoped_to_group = create(:group_label, :incident, group: incident_group)
+ incident_label_scoped_to_group = create(:group_label, :incident, group: group)
create(:labeled_issue, project: projects[1], labels: [incident_label_scoped_to_project])
create(:labeled_issue, project: projects[1], labels: [incident_label_scoped_to_group])
@@ -97,16 +101,19 @@ FactoryBot.define do
create(:grafana_integration, project: projects[1], enabled: true)
create(:grafana_integration, project: projects[2], enabled: false)
- create(:package, project: projects[0])
- create(:package, project: projects[0])
- create(:package, project: projects[1])
+ create(:package, project: projects[0], created_at: 3.days.ago)
+ create(:package, project: projects[0], created_at: 3.days.ago)
+ create(:package, project: projects[1], created_at: 3.days.ago)
create(:package, created_at: 2.months.ago, project: projects[1])
+ # User Preferences
+ create(:user_preference, gitpod_enabled: true)
+
ProjectFeature.first.update_attribute('repository_access_level', 0)
# Create fresh & a month (28-days SMAU) old data
env = create(:environment, project: projects[3])
- [2, 29].each do |n|
+ [3, 31].each do |n|
deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
create(:deployment, :failed, deployment_options)
create(:deployment, :success, deployment_options)
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 1a8c5d7e40c..2e5b3be3bf2 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -23,6 +23,14 @@ FactoryBot.define do
after(:build) { |user, _| user.block! }
end
+ trait :blocked_pending_approval do
+ after(:build) { |user, _| user.block_pending_approval! }
+ end
+
+ trait :ldap_blocked do
+ after(:build) { |user, _| user.ldap_block! }
+ end
+
trait :bot do
user_type { :alert_bot }
end
diff --git a/spec/factories/wiki_pages.rb b/spec/factories/wiki_pages.rb
index cc866d336a4..3397277839e 100644
--- a/spec/factories/wiki_pages.rb
+++ b/spec/factories/wiki_pages.rb
@@ -9,7 +9,7 @@ FactoryBot.define do
content { 'Content for wiki page' }
format { :markdown }
message { nil }
- project { association(:project, :wiki_repo) }
+ project { association(:project) }
container { project }
wiki { association(:wiki, container: container) }
page { OpenStruct.new(url_path: title) }
@@ -18,6 +18,7 @@ FactoryBot.define do
initialize_with do
new(wiki, page).tap do |page|
page.attributes = {
+ slug: title&.tr(' ', '-'),
title: title,
content: content,
format: format
diff --git a/spec/factories/wikis.rb b/spec/factories/wikis.rb
index 96578fdcee6..86d98bfd756 100644
--- a/spec/factories/wikis.rb
+++ b/spec/factories/wikis.rb
@@ -3,8 +3,8 @@
FactoryBot.define do
factory :wiki do
transient do
- container { association(:project, :wiki_repo) }
- user { association(:user) }
+ container { association(:project) }
+ user { container.default_owner || association(:user) }
end
initialize_with { Wiki.for_container(container, user) }
@@ -12,7 +12,7 @@ FactoryBot.define do
factory :project_wiki do
transient do
- project { association(:project, :wiki_repo) }
+ project { association(:project) }
end
container { project }
diff --git a/spec/factories_spec.rb b/spec/factories_spec.rb
index f89aeb1c93d..7241af6e8c0 100644
--- a/spec/factories_spec.rb
+++ b/spec/factories_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'factories' do
- FactoryBot.factories.each do |factory|
+ shared_examples 'factory' do |factory|
describe "#{factory.name} factory" do
it 'does not raise error when built' do
expect { build(factory.name) }.not_to raise_error
@@ -22,4 +22,32 @@ RSpec.describe 'factories' do
end
end
end
+
+ # FactoryDefault speed up specs by creating associations only once
+ # and reuse them in other factories.
+ #
+ # However, for some factories we cannot use FactoryDefault because the
+ # associations must be unique and cannot be reused.
+ skip_factory_defaults = %i[
+ fork_network_member
+ ].to_set.freeze
+
+ without_fd, with_fd = FactoryBot.factories
+ .partition { |factory| skip_factory_defaults.include?(factory.name) }
+
+ context 'with factory defaults', factory_default: :keep do
+ let_it_be(:namespace) { create_default(:namespace) }
+ let_it_be(:project) { create_default(:project, :repository) }
+ let_it_be(:user) { create_default(:user) }
+
+ with_fd.each do |factory|
+ it_behaves_like 'factory', factory
+ end
+ end
+
+ context 'without factory defaults' do
+ without_fd.each do |factory|
+ it_behaves_like 'factory', factory
+ end
+ end
end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index f5c5a73c042..653a45a4bb8 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -194,7 +194,7 @@ RSpec.describe 'Admin Groups' do
expect(page).to have_content('Developer')
end
- accept_confirm { find(:css, 'li', text: current_user.name).find(:css, 'a.btn-remove').click }
+ accept_confirm { find(:css, 'li', text: current_user.name).find(:css, 'a.btn-danger').click }
visit group_group_members_path(group)
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index 12046518aac..7cbba9ec674 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_moc
it 'allows login with valid code' do
# Cannot reuse the TOTP
- Timecop.travel(30.seconds.from_now) do
+ travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
expect(current_path).to eq admin_root_path
@@ -58,7 +58,7 @@ RSpec.describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_moc
it 'blocks login with invalid code' do
# Cannot reuse the TOTP
- Timecop.travel(30.seconds.from_now) do
+ travel_to(30.seconds.from_now) do
enter_code('foo')
expect(page).to have_content('Invalid two-factor code')
@@ -67,7 +67,7 @@ RSpec.describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_moc
it 'allows login with invalid code, then valid code' do
# Cannot reuse the TOTP
- Timecop.travel(30.seconds.from_now) do
+ travel_to(30.seconds.from_now) do
enter_code('foo')
expect(page).to have_content('Invalid two-factor code')
@@ -163,7 +163,7 @@ RSpec.describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_moc
expect(page).to have_content('Two-Factor Authentication')
# Cannot reuse the TOTP
- Timecop.travel(30.seconds.from_now) do
+ travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
expect(current_path).to eq admin_root_path
@@ -215,7 +215,7 @@ RSpec.describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_moc
expect(page).to have_content('Two-Factor Authentication')
# Cannot reuse the TOTP
- Timecop.travel(30.seconds.from_now) do
+ travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
expect(current_path).to eq admin_root_path
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 38f0b813183..528dfad606e 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -130,6 +130,38 @@ RSpec.describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_n
expect(user_internal_regex['placeholder']).to eq 'Regex pattern'
end
+ context 'Change Sign-up restrictions' do
+ context 'Require Admin approval for new signup setting' do
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: true)
+ end
+
+ it 'changes the setting' do
+ page.within('.as-signup') do
+ check 'Require admin approval for new sign-ups'
+ click_button 'Save changes'
+ end
+
+ expect(current_settings.require_admin_approval_after_user_signup).to be_truthy
+ expect(page).to have_content "Application settings saved successfully"
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: false)
+ end
+
+ it 'does not show the the setting' do
+ page.within('.as-signup') do
+ expect(page).not_to have_selector('.application_setting_require_admin_approval_after_user_signup')
+ end
+ end
+ end
+ end
+ end
+
it 'Change Sign-in restrictions' do
page.within('.as-signin') do
fill_in 'Home page URL', with: 'https://about.gitlab.com/'
@@ -497,18 +529,23 @@ RSpec.describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_n
end
it 'Change Help page' do
+ stub_feature_flags(help_page_documentation_redirect: true)
+
new_support_url = 'http://example.com/help'
+ new_documentation_url = 'https://docs.gitlab.com'
page.within('.as-help-page') do
fill_in 'Help page text', with: 'Example text'
check 'Hide marketing-related entries from help'
fill_in 'Support page URL', with: new_support_url
+ fill_in 'Documentation pages URL', with: new_documentation_url
click_button 'Save changes'
end
expect(current_settings.help_page_text).to eq "Example text"
expect(current_settings.help_page_hide_commercial_content).to be_truthy
expect(current_settings.help_page_support_url).to eq new_support_url
+ expect(current_settings.help_page_documentation_base_url).to eq new_documentation_url
expect(page).to have_content "Application settings saved successfully"
end
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index a37210d2acc..e06e2d14f3c 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe "Admin::Users" do
expect(page).to have_content(current_user.last_activity_on.strftime("%e %b, %Y"))
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
+ expect(page).to have_content('Projects')
expect(page).to have_button('Block')
expect(page).to have_button('Deactivate')
expect(page).to have_button('Delete user')
@@ -48,6 +49,56 @@ RSpec.describe "Admin::Users" do
end
end
+ context 'user project count' do
+ before do
+ project = create(:project)
+ project.add_maintainer(current_user)
+ end
+
+ it 'displays count of users projects' do
+ visit admin_users_path
+
+ expect(page.find("[data-testid='user-project-count-#{current_user.id}']").text).to eq("1")
+ end
+ end
+
+ describe 'tabs' do
+ it 'has multiple tabs to filter users' do
+ expect(page).to have_link('Active', href: admin_users_path)
+ expect(page).to have_link('Admins', href: admin_users_path(filter: 'admins'))
+ expect(page).to have_link('2FA Enabled', href: admin_users_path(filter: 'two_factor_enabled'))
+ expect(page).to have_link('2FA Disabled', href: admin_users_path(filter: 'two_factor_disabled'))
+ expect(page).to have_link('External', href: admin_users_path(filter: 'external'))
+ expect(page).to have_link('Blocked', href: admin_users_path(filter: 'blocked'))
+ expect(page).to have_link('Deactivated', href: admin_users_path(filter: 'deactivated'))
+ expect(page).to have_link('Without projects', href: admin_users_path(filter: 'wop'))
+ end
+
+ context '`Pending approval` tab' do
+ context 'feature is enabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: true)
+ visit admin_users_path
+ end
+
+ it 'shows the `Pending approval` tab' do
+ expect(page).to have_link('Pending approval', href: admin_users_path(filter: 'blocked_pending_approval'))
+ end
+ end
+
+ context 'feature is disabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: false)
+ visit admin_users_path
+ end
+
+ it 'does not show the `Pending approval` tab' do
+ expect(page).not_to have_link('Pending approval', href: admin_users_path(filter: 'blocked_pending_approval'))
+ end
+ end
+ end
+ end
+
describe 'search and sort' do
before do
create(:user, name: 'Foo Bar', last_activity_on: 3.days.ago)
@@ -146,6 +197,27 @@ RSpec.describe "Admin::Users" do
expect(page).to have_content(user.email)
end
end
+
+ describe 'Pending approval filter' do
+ it 'counts users who are pending approval' do
+ create_list(:user, 2, :blocked_pending_approval)
+
+ visit admin_users_path
+
+ page.within('.filter-blocked-pending-approval small') do
+ expect(page).to have_content('2')
+ end
+ end
+
+ it 'filters by users who are pending approval' do
+ user = create(:user, :blocked_pending_approval)
+
+ visit admin_users_path
+ click_link 'Pending approval'
+
+ expect(page).to have_content(user.email)
+ end
+ end
end
describe "GET /admin/users/new" do
@@ -287,6 +359,23 @@ RSpec.describe "Admin::Users" do
expect(page).to have_button('Delete user and contributions')
end
+ context 'user pending approval' do
+ it 'shows user info' do
+ user = create(:user, :blocked_pending_approval)
+
+ visit admin_users_path
+ click_link 'Pending approval'
+ click_link user.name
+
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('Pending approval')
+ expect(page).to have_link('Approve user')
+ expect(page).to have_button('Block user')
+ expect(page).to have_button('Delete user')
+ expect(page).to have_button('Delete user and contributions')
+ end
+ end
+
describe 'Impersonation' do
let(:another_user) { create(:user) }
@@ -606,7 +695,7 @@ RSpec.describe "Admin::Users" do
end
end
- describe 'show user keys' do
+ describe 'show user keys', :js do
let!(:key1) do
create(:key, user: user, title: "ssh-rsa Key1", key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4FIEBXGi4bPU8kzxMefudPIJ08/gNprdNTaO9BR/ndy3+58s2HCTw2xCHcsuBmq+TsAqgEidVq4skpqoTMB+Uot5Uzp9z4764rc48dZiI661izoREoKnuRQSsRqUTHg5wrLzwxlQbl1MVfRWQpqiz/5KjBC7yLEb9AbusjnWBk8wvC1bQPQ1uLAauEA7d836tgaIsym9BrLsMVnR4P1boWD3Xp1B1T/ImJwAGHvRmP/ycIqmKdSpMdJXwxcb40efWVj0Ibbe7ii9eeoLdHACqevUZi6fwfbymdow+FeqlkPoHyGg3Cu4vD/D8+8cRc7mE/zGCWcQ15Var83Tczour Key1")
end
@@ -629,7 +718,11 @@ RSpec.describe "Admin::Users" do
expect(page).to have_content(key2.title)
expect(page).to have_content(key2.key)
- click_link 'Remove'
+ click_button 'Delete'
+
+ page.within('.modal') do
+ page.click_button('Delete')
+ end
expect(page).not_to have_content(key2.title)
end
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index b8851c28531..44642983a36 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Admin uses repository checks', :request_store, :clean_gitlab_red
)
visit_admin_project_page(project)
- page.within('.alert') do
+ page.within('.gl-alert') do
expect(page.text).to match(/Last repository check \(just now\) failed/)
end
end
diff --git a/spec/features/admin/clusters/eks_spec.rb b/spec/features/admin/clusters/eks_spec.rb
index ef49aebc7c5..ad7122bf182 100644
--- a/spec/features/admin/clusters/eks_spec.rb
+++ b/spec/features/admin/clusters/eks_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Instance-level AWS EKS Cluster', :js do
before do
visit admin_clusters_path
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
end
context 'when user creates a cluster on AWS EKS' do
diff --git a/spec/features/admin/dashboard_spec.rb b/spec/features/admin/dashboard_spec.rb
index 4ffa5e3be0b..acb8fb54e11 100644
--- a/spec/features/admin/dashboard_spec.rb
+++ b/spec/features/admin/dashboard_spec.rb
@@ -28,11 +28,9 @@ RSpec.describe 'admin visits dashboard' do
describe 'Users statistic' do
let_it_be(:users_statistics) { create(:users_statistics) }
+ let_it_be(:users_count_label) { Gitlab.ee? ? 'Billable users 71' : 'Active users 71' }
it 'shows correct amounts of users', :aggregate_failures do
- expected_active_users_text = Gitlab.ee? ? 'Active users (Billable users) 71' : 'Active users 71'
-
- sign_in(create(:admin))
visit admin_dashboard_stats_path
expect(page).to have_content('Users without a Group and Project 23')
@@ -42,9 +40,9 @@ RSpec.describe 'admin visits dashboard' do
expect(page).to have_content('Users with highest role Maintainer 6')
expect(page).to have_content('Users with highest role Owner 5')
expect(page).to have_content('Bots 2')
- expect(page).to have_content(expected_active_users_text)
expect(page).to have_content('Blocked users 7')
expect(page).to have_content('Total users 78')
+ expect(page).to have_content(users_count_label)
end
end
end
diff --git a/spec/features/alert_management/alert_details_spec.rb b/spec/features/alert_management/alert_details_spec.rb
new file mode 100644
index 00000000000..d190e4b6939
--- /dev/null
+++ b/spec/features/alert_management/alert_details_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Alert details', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered', title: 'Alert') }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit details_project_alert_management_path(project, alert)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the alert' do
+ it 'shows the alert' do
+ page.within('.alert-management-details') do
+ expect(find('h2')).to have_content(alert.title)
+ end
+ end
+
+ it 'shows the alert tabs' do
+ page.within('.alert-management-details') do
+ alert_tabs = find('[data-testid="alertDetailsTabs"]')
+
+ expect(alert_tabs).to have_content('Alert details')
+ end
+ end
+
+ it 'shows the right sidebar mounted with correct widgets' do
+ page.within('.layout-page') do
+ sidebar = find('.right-sidebar')
+
+ expect(sidebar).to have_selector('.alert-status')
+ expect(sidebar).to have_selector('.alert-assignees')
+ expect(sidebar).to have_content('Triggered')
+ end
+ end
+
+ it 'updates the alert todo button from the right sidebar' do
+ expect(page).to have_selector('[data-testid="alert-todo-button"]')
+ todo_button = find('[data-testid="alert-todo-button"]')
+
+ expect(todo_button).to have_content('Add a To-Do')
+ find('[data-testid="alert-todo-button"]').click
+ wait_for_requests
+
+ expect(todo_button).to have_content('Mark as done')
+ end
+
+ it 'updates the alert status from the right sidebar' do
+ page.within('.alert-status') do
+ alert_status = find('[data-testid="status"]')
+
+ expect(alert_status).to have_content('Triggered')
+
+ find('.btn-link').click
+ find('.gl-new-dropdown-item', text: 'Acknowledged').click
+
+ wait_for_requests
+
+ expect(alert_status).to have_content('Acknowledged')
+ end
+ end
+
+ it 'updates the alert assignee from the right sidebar' do
+ page.within('.right-sidebar') do
+ alert_assignee = find('.alert-assignees')
+
+ expect(alert_assignee).to have_content('None - assign yourself')
+
+ find('[data-testid="unassigned-users"]').click
+
+ wait_for_requests
+
+ expect(alert_assignee).to have_content('Assignee Edit John Doe')
+ end
+ end
+ end
+end
diff --git a/spec/features/alert_management/alert_management_list_spec.rb b/spec/features/alert_management/alert_management_list_spec.rb
new file mode 100644
index 00000000000..c2514d80474
--- /dev/null
+++ b/spec/features/alert_management/alert_management_list_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Alert Management index', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered') }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_alert_management_index_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the alert list and the alert service is not enabled' do
+ it 'shows the alert page title' do
+ expect(page).to have_content('Alerts')
+ end
+
+ it 'shows the empty state by default' do
+ expect(page).to have_content('Surface alerts in GitLab')
+ end
+
+ it 'does not show the filtered search' do
+ page.within('.layout-page') do
+ expect(page).not_to have_css('[data-testid="search-icon"]')
+ end
+ end
+
+ it 'does not show the alert table' do
+ expect(page).not_to have_selector('.gl-table')
+ end
+ end
+
+ context 'when a developer displays the alert list and the alert service is enabled' do
+ let_it_be(:alerts_service) { create(:alerts_service, project: project) }
+
+ it 'shows the alert page title' do
+ expect(page).to have_content('Alerts')
+ end
+
+ it 'shows the filtered search' do
+ page.within('.layout-page') do
+ expect(page).to have_css('[data-testid="search-icon"]')
+ end
+ end
+
+ it 'shows the alert table' do
+ expect(page).to have_selector('.gl-table')
+ end
+ end
+end
diff --git a/spec/features/alert_management/user_filters_alerts_by_status_spec.rb b/spec/features/alert_management/user_filters_alerts_by_status_spec.rb
new file mode 100644
index 00000000000..ee516418cd6
--- /dev/null
+++ b/spec/features/alert_management/user_filters_alerts_by_status_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User filters Alert Management table by status', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:alerts_service) { create(:alerts_service, project: project) }
+ let_it_be(:alert1, reload: true) { create(:alert_management_alert, :triggered, project: project) }
+ let_it_be(:alert2, reload: true) { create(:alert_management_alert, :acknowledged, project: project) }
+ let_it_be(:alert3, reload: true) { create(:alert_management_alert, :acknowledged, project: project) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_alert_management_index_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the alert list and the alert service is enabled they can filter the table by an alert status' do
+ it 'shows the alert table items with alert status of Open by default' do
+ expect(page).to have_selector('.gl-table')
+ expect(page).to have_content('Open 3')
+ end
+
+ it 'shows the alert table items with alert status of Acknowledged' do
+ find('.gl-tab-nav-item', text: 'Acknowledged').click
+
+ expect(page).to have_selector('.gl-tab-nav-item-active')
+ expect(find('.gl-tab-nav-item-active')).to have_content('Acknowledged 2')
+ expect(all('.dropdown-menu-selectable').count).to be(2)
+ end
+
+ it 'shows the alert table items with alert status of Triggered' do
+ find('.gl-tab-nav-item', text: 'Triggered').click
+ wait_for_requests
+
+ expect(page).to have_selector('.gl-tab-nav-item-active')
+ expect(find('.gl-tab-nav-item-active')).to have_content('Triggered 1')
+ expect(all('.dropdown-menu-selectable').count).to be(1)
+ end
+
+ it 'shows the an empty table for a status with no alerts' do
+ find('.gl-tab-nav-item', text: 'Resolved').click
+ wait_for_requests
+
+ expect(page).to have_selector('.gl-tab-nav-item-active')
+ expect(find('.gl-tab-nav-item-active')).to have_content('Resolved 0')
+ expect(page).to have_content('No alerts to display.')
+ end
+ end
+end
diff --git a/spec/features/alert_management/user_searches_alerts_spec.rb b/spec/features/alert_management/user_searches_alerts_spec.rb
new file mode 100644
index 00000000000..568321de025
--- /dev/null
+++ b/spec/features/alert_management/user_searches_alerts_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User searches Alert Management alerts', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:alerts_service) { create(:alerts_service, project: project) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered') }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_alert_management_index_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the alert list and the alert service is enabled they can search an alert' do
+ it 'shows the incident table with an incident for a valid search filter bar' do
+ expect(page).to have_selector('.filtered-search-wrapper')
+ expect(page).to have_selector('.gl-table')
+ expect(page).to have_css('[data-testid="severityField"]')
+ expect(all('tbody tr').count).to be(1)
+ expect(page).not_to have_selector('.empty-state')
+ end
+ end
+end
diff --git a/spec/features/alert_management/user_updates_alert_status_spec.rb b/spec/features/alert_management/user_updates_alert_status_spec.rb
new file mode 100644
index 00000000000..8974796662c
--- /dev/null
+++ b/spec/features/alert_management/user_updates_alert_status_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User updates Alert Management status', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:alerts_service) { create(:alerts_service, project: project) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered') }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_alert_management_index_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer+ displays the alerts list and the alert service is enabled they can update an alert status' do
+ it 'shows the alert table with an alert status dropdown' do
+ expect(page).to have_selector('.gl-table')
+ expect(find('.dropdown-menu-selectable')).to have_content('Triggered')
+ end
+
+ it 'updates the alert status' do
+ find('.dropdown-menu-selectable').click
+ find('.dropdown-item', text: 'Acknowledged').click
+ wait_for_requests
+
+ expect(find('.dropdown-menu-selectable')).to have_content('Acknowledged')
+ end
+ end
+end
diff --git a/spec/features/alert_management_spec.rb b/spec/features/alert_management_spec.rb
new file mode 100644
index 00000000000..2989f72e356
--- /dev/null
+++ b/spec/features/alert_management_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Alert management', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ context 'when visiting the alert details page' do
+ let!(:alert) { create(:alert_management_alert, :resolved, :with_fingerprint, title: 'dos-test', project: project, **options) }
+ let(:options) { {} }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when actor has permission to see the alert' do
+ let(:user) { developer }
+
+ it 'shows the alert details' do
+ visit(details_project_alert_management_path(project, alert))
+
+ within('.alert-management-details-table') do
+ expect(page).to have_content(alert.title)
+ end
+ end
+
+ context 'when alert belongs to an environment' do
+ let(:options) { { environment: environment } }
+ let!(:environment) { create(:environment, name: 'production', project: project) }
+
+ it 'shows the environment name' do
+ visit(details_project_alert_management_path(project, alert))
+
+ expect(page).to have_link(environment.name, href: project_environment_path(project, environment))
+ within('.alert-management-details-table') do
+ expect(page).to have_content(environment.name)
+ end
+ end
+
+ context 'when expose_environment_path_in_alert_details feature flag is disabled' do
+ before do
+ stub_feature_flags(expose_environment_path_in_alert_details: false)
+ end
+
+ it 'does not show the environment name' do
+ visit(details_project_alert_management_path(project, alert))
+
+ within('.alert-management-details-table') do
+ expect(page).to have_content(alert.title)
+ expect(page).not_to have_content(environment.name)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/boards/add_issues_modal_spec.rb b/spec/features/boards/add_issues_modal_spec.rb
index d432825e113..00efca5d3a8 100644
--- a/spec/features/boards/add_issues_modal_spec.rb
+++ b/spec/features/boards/add_issues_modal_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe 'Issue Boards add issue modal', :js do
it 'loads issues' do
page.within('.add-issues-modal') do
- page.within('.nav-links') do
+ page.within('.gl-tabs') do
expect(page).to have_content('2')
end
@@ -103,7 +103,13 @@ RSpec.describe 'Issue Boards add issue modal', :js do
click_button 'Cancel'
end
- accept_confirm { first('.board-delete').click }
+ page.within(find('.board:nth-child(2)')) do
+ find('button[title="List settings"]').click
+ end
+
+ page.within(find('.js-board-settings-sidebar')) do
+ accept_confirm { find('[data-testid="remove-list"]').click }
+ end
click_button('Add issues')
@@ -146,7 +152,7 @@ RSpec.describe 'Issue Boards add issue modal', :js do
page.within('.add-issues-modal') do
first('.board-card .board-card-number').click
- page.within('.nav-links') do
+ page.within('.gl-tabs') do
expect(page).to have_content('Selected issues 1')
end
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index e36378bd34e..06ec4e05828 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -24,33 +24,11 @@ RSpec.describe 'Issue Boards', :js do
context 'no lists' do
before do
visit project_board_path(project, board)
- wait_for_requests
- expect(page).to have_selector('.board', count: 3)
- end
-
- it 'shows blank state' do
- expect(page).to have_content('Welcome to your Issue Board!')
- end
-
- it 'shows tooltip on add issues button' do
- button = page.find('.filter-dropdown-container button', text: 'Add issues')
-
- expect(button[:"data-original-title"]).to eq("Please add a list to your board first")
- end
-
- it 'hides the blank state when clicking nevermind button' do
- page.within(find('.board-blank-state')) do
- click_button("Nevermind, I'll use my own")
- end
- expect(page).to have_selector('.board', count: 2)
end
it 'creates default lists' do
lists = ['Open', 'To Do', 'Doing', 'Closed']
- page.within(find('.board-blank-state')) do
- click_button('Add default lists')
- end
wait_for_requests
expect(page).to have_selector('.board', count: 4)
@@ -181,9 +159,7 @@ RSpec.describe 'Issue Boards', :js do
end
it 'allows user to delete board' do
- page.within(find('.board:nth-child(2)')) do
- accept_confirm { find('.board-delete').click }
- end
+ remove_list
wait_for_requests
@@ -196,9 +172,7 @@ RSpec.describe 'Issue Boards', :js do
find('.js-new-board-list').click
- page.within(find('.board:nth-child(2)')) do
- accept_confirm { find('.board-delete').click }
- end
+ remove_list
wait_for_requests
@@ -692,4 +666,14 @@ RSpec.describe 'Issue Boards', :js do
click_button(link_text)
end
end
+
+ def remove_list
+ page.within(find('.board:nth-child(2)')) do
+ find('button[title="List settings"]').click
+ end
+
+ page.within(find('.js-board-settings-sidebar')) do
+ accept_confirm { find('[data-testid="remove-list"]').click }
+ end
+ end
end
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 4b4cb444903..332c90df6d7 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe 'Issue Boards', :js do
end
context 'time tracking' do
- let(:compare_meter_tooltip) { find('.time-tracking .time-tracking-content .compare-meter')['data-original-title'] }
+ let(:compare_meter_tooltip) { find('.time-tracking .time-tracking-content .compare-meter')['title'] }
before do
issue2.timelogs.create(time_spent: 14400, user: user)
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 346f305f0d0..5f58fa420fb 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -180,7 +180,7 @@ RSpec.describe 'Contributions Calendar', :js do
before do
push_code_contribution
- Timecop.freeze(Date.yesterday) do
+ travel_to(Date.yesterday) do
Issues::CreateService.new(contributed_project, user, issue_params).execute
end
end
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 4f7f62d00a5..31d6bcda9e8 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -87,6 +87,7 @@ RSpec.describe 'Clusterable > Show page' do
within('#advanced-settings-section') do
expect(page).to have_content('Google Kubernetes Engine')
expect(page).to have_content('Manage your Kubernetes cluster by visiting')
+ expect_common_advanced_options
end
end
end
@@ -117,6 +118,7 @@ RSpec.describe 'Clusterable > Show page' do
within('#advanced-settings-section') do
expect(page).not_to have_content('Google Kubernetes Engine')
expect(page).not_to have_content('Manage your Kubernetes cluster by visiting')
+ expect_common_advanced_options
end
end
end
@@ -176,4 +178,14 @@ RSpec.describe 'Clusterable > Show page' do
let(:cluster) { create(:cluster, :provided_by_user, :instance) }
end
end
+
+ private
+
+ def expect_common_advanced_options
+ aggregate_failures do
+ expect(page).to have_content('Cluster management project')
+ expect(page).to have_content('Clear cluster cache')
+ expect(page).to have_content('Remove Kubernetes cluster integration')
+ end
+ end
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index e66a40720da..97ee891dbb8 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -140,6 +140,7 @@ RSpec.describe 'Commits' do
context 'when accessing internal project with disallowed access', :js do
before do
+ stub_feature_flags(graphql_pipeline_header: false)
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index 7526a55a3c1..3cb7140d253 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
user.invalidate_cache_counts
- Timecop.travel(3.minutes.from_now) do
+ travel_to(3.minutes.from_now) do
visit issues_path
expect_counters('issues', '0')
@@ -39,7 +39,7 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
user.invalidate_cache_counts
- Timecop.travel(3.minutes.from_now) do
+ travel_to(3.minutes.from_now) do
visit merge_requests_path
expect_counters('merge_requests', '0')
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 5331b5559d8..952a78ec79a 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -19,6 +19,12 @@ RSpec.describe 'Dashboard Merge Requests' do
sign_in(current_user)
end
+ it 'disables target branch filter' do
+ visit merge_requests_dashboard_path
+
+ expect(page).not_to have_selector('#js-dropdown-target-branch', visible: false)
+ end
+
context 'new merge request dropdown' do
let(:project_with_disabled_merge_requests) { create(:project, :merge_requests_disabled) }
diff --git a/spec/features/dashboard/todos/todos_filtering_spec.rb b/spec/features/dashboard/todos/todos_filtering_spec.rb
index f60b07c976e..b1464af4194 100644
--- a/spec/features/dashboard/todos/todos_filtering_spec.rb
+++ b/spec/features/dashboard/todos/todos_filtering_spec.rb
@@ -130,6 +130,7 @@ RSpec.describe 'Dashboard > User filters todos', :js do
before do
create(:todo, :build_failed, user: user_1, author: user_2, project: project_1)
create(:todo, :marked, user: user_1, author: user_2, project: project_1, target: issue1)
+ create(:todo, :review_requested, user: user_1, author: user_2, project: project_1, target: issue1)
end
it 'filters by Assigned' do
@@ -138,6 +139,12 @@ RSpec.describe 'Dashboard > User filters todos', :js do
expect_to_see_action(:assigned)
end
+ it 'filters by Review Requested' do
+ filter_action('Review requested')
+
+ expect_to_see_action(:review_requested)
+ end
+
it 'filters by Mentioned' do
filter_action('Mentioned')
@@ -168,6 +175,7 @@ RSpec.describe 'Dashboard > User filters todos', :js do
def expect_to_see_action(action_name)
action_names = {
assigned: ' assigned you ',
+ review_requested: ' requested a review of ',
mentioned: ' mentioned ',
marked: ' added a todo for ',
build_failed: ' build failed for '
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index cf773d2caed..0b4fed55f11 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -197,6 +197,21 @@ RSpec.describe 'Dashboard Todos' do
end
end
end
+
+ context 'review request todo' do
+ let(:merge_request) { create(:merge_request, title: "Fixes issue") }
+
+ before do
+ create(:todo, :review_requested, user: user, project: project, target: merge_request, author: user)
+ visit dashboard_todos_path
+ end
+
+ it 'shows you set yourself as an reviewer message' do
+ page.within('.js-todos-all') do
+ expect(page).to have_content("You requested a review of merge request #{merge_request.to_reference} \"Fixes issue\" at #{project.namespace.owner_name} / #{project.name} from yourself")
+ end
+ end
+ end
end
context 'User has done todos', :js do
@@ -213,7 +228,7 @@ RSpec.describe 'Dashboard Todos' do
describe 'restoring the todo' do
before do
within first('.todo') do
- click_link 'Add a To Do'
+ click_link 'Add a to do'
end
end
@@ -228,7 +243,7 @@ RSpec.describe 'Dashboard Todos' do
end
end
- context 'User has Todos with labels spanning multiple projects' do
+ context 'User has to dos with labels spanning multiple projects' do
before do
label1 = create(:label, project: project)
note1 = create(:note_on_issue, note: "Hello #{label1.to_reference(format: :name)}", noteable_id: issue.id, noteable_type: 'Issue', project: issue.project)
diff --git a/spec/features/discussion_comments/snippets_spec.rb b/spec/features/discussion_comments/snippets_spec.rb
index 50201bbdb21..b2d3fbf4b5d 100644
--- a/spec/features/discussion_comments/snippets_spec.rb
+++ b/spec/features/discussion_comments/snippets_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Thread Comments Snippet', :js do
let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
before do
- stub_feature_flags(snippets_vue: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index e705f2916da..49343cc7a57 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Expand and collapse diffs', :js do
let(:project) { create(:project, :repository) }
before do
+ stub_feature_flags(increased_diff_limits: false)
sign_in(create(:admin))
# Ensure that undiffable.md is in .gitattributes
diff --git a/spec/features/file_uploads/maven_package_spec.rb b/spec/features/file_uploads/maven_package_spec.rb
index c873a0e9a36..e87eec58618 100644
--- a/spec/features/file_uploads/maven_package_spec.rb
+++ b/spec/features/file_uploads/maven_package_spec.rb
@@ -25,5 +25,31 @@ RSpec.describe 'Upload a maven package', :api, :js do
it { expect(subject.code).to eq(200) }
end
+ RSpec.shared_examples 'for a maven sha1' do
+ let(:dummy_package) { double(Packages::Package) }
+ let(:api_path) { "/projects/#{project.id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar.sha1" }
+
+ before do
+ # The sha verification done by the maven api is between:
+ # - the sha256 set by workhorse
+ # - the sha256 of the sha1 of the uploaded package file
+ # We're going to send `file` for the sha1 and stub the sha1 of the package file so that
+ # both sha256 being the same
+ expect(::Packages::PackageFileFinder).to receive(:new).and_return(double(execute!: dummy_package))
+ expect(dummy_package).to receive(:file_sha1).and_return(File.read(file.path))
+ end
+
+ it { expect(subject.code).to eq(204) }
+ end
+
+ RSpec.shared_examples 'for a maven md5' do
+ let(:api_path) { "/projects/#{project.id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar.md5" }
+ let(:file) { StringIO.new('dummy_package') }
+
+ it { expect(subject.code).to eq(200) }
+ end
+
it_behaves_like 'handling file uploads', 'for a maven package'
+ it_behaves_like 'handling file uploads', 'for a maven sha1'
+ it_behaves_like 'handling file uploads', 'for a maven md5'
end
diff --git a/spec/features/groups/clusters/eks_spec.rb b/spec/features/groups/clusters/eks_spec.rb
index 5a62741250a..c361c502cbb 100644
--- a/spec/features/groups/clusters/eks_spec.rb
+++ b/spec/features/groups/clusters/eks_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Group AWS EKS Cluster', :js do
before do
visit group_clusters_path(group)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
end
context 'when user creates a cluster on AWS EKS' do
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 90253451d6b..97f8864aab2 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User Cluster', :js do
before do
visit group_clusters_path(group)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Connect existing cluster'
end
@@ -66,6 +66,10 @@ RSpec.describe 'User Cluster', :js do
expect(page.find_field('cluster[platform_kubernetes_attributes][authorization_type]', disabled: true)).to be_checked
end
end
+
+ it 'user sees namespace per environment is enabled by default' do
+ expect(page).to have_checked_field('Namespace per environment')
+ end
end
context 'when user filled form with invalid parameters' do
@@ -125,7 +129,7 @@ RSpec.describe 'User Cluster', :js do
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
end
end
end
diff --git a/spec/features/groups/members/leave_group_spec.rb b/spec/features/groups/members/leave_group_spec.rb
index 9eb5cc15c5e..32acf7edd2a 100644
--- a/spec/features/groups/members/leave_group_spec.rb
+++ b/spec/features/groups/members/leave_group_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe 'Groups > Members > Leave group' do
visit group_group_members_path(group)
- expect(find(:css, '.project-members-page li', text: user.name)).not_to have_selector(:css, 'a.btn-remove')
+ expect(find(:css, '.project-members-page li', text: user.name)).to have_no_selector(:css, 'a.btn-danger')
end
it 'owner can not leave the group by url param if they are the last owner', :js do
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index e3bbbd4d73b..33caa3af36d 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -6,62 +6,115 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
include Select2Helper
include Spec::Support::Helpers::Features::ListRowsHelpers
- let(:user) { create(:user) }
- let(:shared_with_group) { create(:group) }
- let(:shared_group) { create(:group) }
+ let_it_be(:user) { create(:user) }
before do
stub_feature_flags(vue_group_members_list: false)
- shared_group.add_owner(user)
sign_in(user)
end
- it 'add group to group' do
- visit group_group_members_path(shared_group)
+ context 'when group link does not exist' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_to_add) { create(:group) }
- add_group(shared_with_group.id, 'Reporter')
+ before do
+ group.add_owner(user)
+ visit group_group_members_path(group)
+ end
- click_groups_tab
+ it 'add group to group' do
+ add_group(group_to_add.id, 'Reporter')
- page.within(first_row) do
- expect(page).to have_content(shared_with_group.name)
- expect(page).to have_content('Reporter')
+ click_groups_tab
+
+ page.within(first_row) do
+ expect(page).to have_content(group_to_add.name)
+ expect(page).to have_content('Reporter')
+ end
end
end
- it 'remove group from group' do
- create(:group_group_link, shared_group: shared_group,
- shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
+ context 'when group link exists' do
+ let_it_be(:shared_with_group) { create(:group) }
+ let_it_be(:shared_group) { create(:group) }
- visit group_group_members_path(shared_group)
+ let(:additional_link_attrs) { {} }
- click_groups_tab
+ let_it_be(:group_link, refind: true) do
+ create(
+ :group_group_link,
+ shared_group: shared_group,
+ shared_with_group: shared_with_group,
+ group_access: ::Gitlab::Access::DEVELOPER
+ )
+ end
- expect(page).to have_content(shared_with_group.name)
+ before do
+ travel_to Time.now.utc.beginning_of_day
+ group_link.update!(additional_link_attrs)
- accept_confirm do
- find(:css, '#tab-groups li', text: shared_with_group.name).find(:css, 'a.btn-remove').click
+ shared_group.add_owner(user)
+ visit group_group_members_path(shared_group)
end
- expect(page).not_to have_content(shared_with_group.name)
- end
+ it 'remove group from group' do
+ click_groups_tab
+
+ expect(page).to have_content(shared_with_group.name)
+
+ accept_confirm do
+ find(:css, '#tab-groups li', text: shared_with_group.name).find(:css, 'a.btn-danger').click
+ end
+
+ expect(page).not_to have_content(shared_with_group.name)
+ end
- it 'update group to owner level' do
- create(:group_group_link, shared_group: shared_group,
- shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
+ it 'update group to owner level' do
+ click_groups_tab
- visit group_group_members_path(shared_group)
+ page.within(first_row) do
+ click_button('Developer')
+ click_link('Maintainer')
- click_groups_tab
+ wait_for_requests
- page.within(first_row) do
- click_button('Developer')
- click_link('Maintainer')
+ expect(page).to have_button('Maintainer')
+ end
+ end
+
+ it 'updates expiry date' do
+ click_groups_tab
+
+ expires_at_field = "member_expires_at_#{shared_with_group.id}"
+ fill_in "member_expires_at_#{shared_with_group.id}", with: 3.days.from_now.to_date
+ find_field(expires_at_field).native.send_keys :enter
wait_for_requests
- expect(page).to have_button('Maintainer')
+ page.within(find('li.group_member')) do
+ expect(page).to have_content('Expires in 3 days')
+ end
+ end
+
+ context 'when expiry date is set' do
+ let(:additional_link_attrs) { { expires_at: 3.days.from_now.to_date } }
+
+ it 'clears expiry date' do
+ click_groups_tab
+
+ page.within(find('li.group_member')) do
+ expect(page).to have_content('Expires in 3 days')
+
+ page.within(find('.js-edit-member-form')) do
+ find('.js-clear-input').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Expires in')
+ end
+ end
end
end
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index d94cc85f411..dd708c243a8 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -6,65 +6,66 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
include Select2Helper
include ActiveSupport::Testing::TimeHelpers
- let(:user1) { create(:user, name: 'John Doe') }
- let!(:new_member) { create(:user, name: 'Mary Jane') }
- let(:group) { create(:group) }
+ let_it_be(:user1) { create(:user, name: 'John Doe') }
+ let_it_be(:group) { create(:group) }
+ let(:new_member) { create(:user, name: 'Mary Jane') }
before do
stub_feature_flags(vue_group_members_list: false)
+ travel_to Time.now.utc.beginning_of_day
+
group.add_owner(user1)
sign_in(user1)
end
it 'expiration date is displayed in the members list' do
- travel_to Time.zone.parse('2016-08-06 08:00') do
- date = 4.days.from_now
- visit group_group_members_path(group)
-
- page.within '.invite-users-form' do
- select2(new_member.id, from: '#user_ids', multiple: true)
- fill_in 'expires_at', with: date.to_s(:medium) + "\n"
- click_on 'Invite'
- end
-
- page.within "#group_member_#{group_member_id(new_member)}" do
- expect(page).to have_content('Expires in 4 days')
- end
+ visit group_group_members_path(group)
+
+ page.within '.invite-users-form' do
+ select2(new_member.id, from: '#user_ids', multiple: true)
+
+ fill_in 'expires_at', with: 3.days.from_now.to_date
+ find_field('expires_at').native.send_keys :enter
+
+ click_on 'Invite'
+ end
+
+ page.within "#group_member_#{group_member_id}" do
+ expect(page).to have_content('Expires in 3 days')
end
end
- it 'change expiration date' do
- travel_to Time.zone.parse('2016-08-06 08:00') do
- date = 3.days.from_now
- group.add_developer(new_member)
+ it 'changes expiration date' do
+ group.add_developer(new_member)
+ visit group_group_members_path(group)
+
+ page.within "#group_member_#{group_member_id}" do
+ fill_in 'Expiration date', with: 3.days.from_now.to_date
+ find_field('Expiration date').native.send_keys :enter
- visit group_group_members_path(group)
+ wait_for_requests
- page.within "#group_member_#{group_member_id(new_member)}" do
- find('.js-access-expiration-date').set date.to_s(:medium) + "\n"
- wait_for_requests
- expect(page).to have_content('Expires in 3 days')
- end
+ expect(page).to have_content('Expires in 3 days')
end
end
- it 'remove expiration date' do
- travel_to Time.zone.parse('2016-08-06 08:00') do
- date = 3.days.from_now
- group_member = create(:group_member, :developer, user: new_member, group: group, expires_at: date.to_s(:medium))
+ it 'clears expiration date' do
+ create(:group_member, :developer, user: new_member, group: group, expires_at: 3.days.from_now.to_date)
+ visit group_group_members_path(group)
+
+ page.within "#group_member_#{group_member_id}" do
+ expect(page).to have_content('Expires in 3 days')
+
+ find('.js-clear-input').click
- visit group_group_members_path(group)
+ wait_for_requests
- page.within "#group_member_#{group_member.id}" do
- find('.js-clear-input').click
- wait_for_requests
- expect(page).not_to have_content('Expires in 3 days')
- end
+ expect(page).not_to have_content('Expires in')
end
end
- def group_member_id(user)
+ def group_member_id
group.members.find_by(user_id: new_member).id
end
end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 60f1c404e78..e81f2370d10 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -72,4 +72,12 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
+
+ context 'when invite team members is not available' do
+ it 'does not display the js-invite-members-trigger' do
+ visit group_path(group)
+
+ expect(page).not_to have_selector('.js-invite-members-trigger')
+ end
+ end
end
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index d81e4aa70cf..60e0c08b3d4 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Group Packages' do
it 'allows you to navigate to the project page' do
page.within('[data-qa-selector="packages-table"]') do
- click_link project.name
+ find('[data-qa-selector="package-path"]', text: project.name).click
end
expect(page).to have_current_path(project_path(project))
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index ec30f34199d..304573ecd6e 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -184,4 +184,17 @@ RSpec.describe 'Group show page' do
expect(page).to have_selector('.notifications-btn.disabled', visible: true)
end
end
+
+ context 'page og:description' do
+ let(:group) { create(:group, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
+ let(:maintainer) { create(:user) }
+
+ before do
+ group.add_maintainer(maintainer)
+ sign_in(maintainer)
+ visit path
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
new file mode 100644
index 00000000000..3ec7717b649
--- /dev/null
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Incident details', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project, author: developer, description: 'description') }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_issues_incident_path(project, incident)
+ wait_for_requests
+ end
+
+ context 'when a developer+ displays the incident' do
+ it 'shows the incident' do
+ page.within('.issuable-details') do
+ expect(find('h2')).to have_content(incident.title)
+ end
+ end
+
+ it 'does not show design management' do
+ expect(page).not_to have_selector('.js-design-management')
+ end
+
+ it 'shows the incident tabs' do
+ page.within('.issuable-details') do
+ incident_tabs = find('[data-testid="incident-tabs"]')
+
+ expect(find('h2')).to have_content(incident.title)
+ expect(incident_tabs).to have_content('Summary')
+ expect(incident_tabs).to have_content(incident.description)
+ end
+ end
+
+ it 'shows the right sidebar mounted with type issue' do
+ page.within('.layout-page') do
+ sidebar = find('.right-sidebar')
+
+ expect(page).to have_selector('.right-sidebar[data-issuable-type="issue"]')
+ expect(sidebar).to have_selector('.incident-severity')
+ expect(sidebar).not_to have_selector('.milestone')
+ end
+ end
+ end
+end
diff --git a/spec/features/incidents/incidents_list_spec.rb b/spec/features/incidents/incidents_list_spec.rb
new file mode 100644
index 00000000000..c65c83b2804
--- /dev/null
+++ b/spec/features/incidents/incidents_list_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Incident Management index', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_incidents_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the incident list' do
+ it 'shows the status tabs' do
+ expect(page).to have_selector('.gl-tabs')
+ end
+
+ it 'shows the filtered search' do
+ expect(page).to have_selector('.filtered-search-wrapper')
+ end
+
+ it 'shows the alert table' do
+ expect(page).to have_selector('.gl-table')
+ end
+
+ it 'alert page title' do
+ expect(page).to have_content('Incidents')
+ end
+ end
+end
diff --git a/spec/features/incidents/user_creates_new_incident_spec.rb b/spec/features/incidents/user_creates_new_incident_spec.rb
new file mode 100644
index 00000000000..99a137b5852
--- /dev/null
+++ b/spec/features/incidents/user_creates_new_incident_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Incident Management index', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_guest(guest)
+ end
+
+ shared_examples 'create incident form' do
+ it 'shows the create new issue button' do
+ expect(page).to have_selector('.create-incident-button')
+ end
+
+ it 'when clicked shows the create issue page with the Incident type pre-selected' do
+ find('.create-incident-button').click
+ wait_for_all_requests
+
+ expect(page).to have_selector('.dropdown-menu-toggle')
+ expect(page).to have_selector('.js-issuable-type-filter-dropdown-wrap')
+
+ page.within('.js-issuable-type-filter-dropdown-wrap') do
+ expect(page).to have_content('Incident')
+ end
+ end
+ end
+
+ context 'when a developer displays the incident list' do
+ before do
+ sign_in(developer)
+
+ visit project_incidents_path(project)
+ wait_for_all_requests
+ end
+
+ it_behaves_like 'create incident form'
+ end
+
+ context 'when a guest displays the incident list' do
+ before do
+ sign_in(guest)
+
+ visit project_incidents_path(project)
+ wait_for_all_requests
+ end
+
+ it_behaves_like 'create incident form'
+ end
+end
diff --git a/spec/features/incidents/user_filters_incidents_by_status_spec.rb b/spec/features/incidents/user_filters_incidents_by_status_spec.rb
new file mode 100644
index 00000000000..661c737141b
--- /dev/null
+++ b/spec/features/incidents/user_filters_incidents_by_status_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User filters Incident Management table by status', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+
+ before_all do
+ project.add_developer(developer)
+
+ create_list(:incident, 2, project: project, state: 'opened')
+ create(:incident, project: project, state: 'closed')
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_incidents_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the incident list they can filter the table by an incident status' do
+ def the_page_shows_the_nav_text_with_correct_count
+ expect(page).to have_selector('.gl-table')
+ expect(page).to have_content('All 3')
+ expect(page).to have_content('Open 2')
+ expect(page).to have_content('Closed 1')
+ end
+
+ it 'shows the incident table items with incident status of Open by default' do
+ expect(find('.gl-tab-nav-item-active')).to have_content('Open 2')
+ expect(all('tbody tr').count).to be(2)
+
+ the_page_shows_the_nav_text_with_correct_count
+ end
+
+ it 'shows the incident table items with incident status of Closed' do
+ find('.gl-tab-nav-item', text: 'Closed').click
+ wait_for_requests
+
+ expect(find('.gl-tab-nav-item-active')).to have_content('Closed 1')
+ expect(all('tbody tr').count).to be(1)
+
+ the_page_shows_the_nav_text_with_correct_count
+ end
+
+ it 'shows the incident table items with all status' do
+ find('.gl-tab-nav-item', text: 'All').click
+ wait_for_requests
+
+ expect(find('.gl-tab-nav-item-active')).to have_content('All 3')
+ expect(all('[data-testid="incident-assignees"]').count).to be(3)
+ expect(all('tbody tr').count).to be(3)
+
+ the_page_shows_the_nav_text_with_correct_count
+ end
+ end
+end
diff --git a/spec/features/incidents/user_searches_incidents_spec.rb b/spec/features/incidents/user_searches_incidents_spec.rb
new file mode 100644
index 00000000000..b8e3ff534c3
--- /dev/null
+++ b/spec/features/incidents/user_searches_incidents_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User searches Incident Management incidents', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_incidents_path(project)
+ wait_for_requests
+ end
+
+ context 'when a developer displays the incident list they can search for an incident' do
+ it 'shows the incident table with an incident for a valid search filter bar' do
+ expect(page).to have_selector('.filtered-search-wrapper')
+ expect(page).to have_selector('.gl-table')
+ expect(page).to have_selector('.incident-severity')
+ expect(all('tbody tr').count).to be(1)
+ expect(page).not_to have_selector('.empty-state')
+ end
+ end
+end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 3954de56eea..8ccaf82536a 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -23,7 +23,8 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
def fill_in_sign_up_form(new_user)
- fill_in 'new_user_name', with: new_user.name
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
fill_in 'new_user_password', with: new_user.password
@@ -81,10 +82,10 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
- context 'when inviting a user using their email address' do
+ context 'when inviting a user' do
let(:new_user) { build_stubbed(:user) }
let(:invite_email) { new_user.email }
- let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email) }
+ let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email, created_by: owner) }
let!(:project_invite) { create(:project_member, :invited, project: project, invite_email: invite_email) }
context 'when user has not signed in yet' do
@@ -210,30 +211,43 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
context 'when declining the invitation' do
let(:send_email_confirmation) { true }
- context 'when signed in' do
- before do
- sign_in(user)
- visit invite_path(group_invite.raw_invite_token)
+ context 'as an existing user' do
+ let(:group_invite) { create(:group_member, user: user, group: group, created_by: owner) }
+
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ visit decline_invite_path(group_invite.raw_invite_token)
+ end
+
+ it 'declines application and redirects to dashboard' do
+ expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
end
- it 'declines application and redirects to dashboard' do
- page.click_link 'Decline'
+ context 'when signed out' do
+ before do
+ visit decline_invite_path(group_invite.raw_invite_token)
+ end
- expect(current_path).to eq(dashboard_projects_path)
- expect(page).to have_content('You have declined the invitation to join group Owned.')
- expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ it 'declines application and redirects to sign in page' do
+ expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
end
end
- context 'when signed out' do
+ context 'as a non-existing user' do
before do
visit decline_invite_path(group_invite.raw_invite_token)
end
- it 'declines application and redirects to sign in page' do
- expect(current_path).to eq(new_user_session_path)
-
- expect(page).to have_content('You have declined the invitation to join group Owned.')
+ it 'declines application and shows a decline page' do
+ expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
+ expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
diff --git a/spec/features/issuables/close_reopen_report_toggle_spec.rb b/spec/features/issuables/close_reopen_report_toggle_spec.rb
index 5ea89a7984f..6e99cfb3293 100644
--- a/spec/features/issuables/close_reopen_report_toggle_spec.rb
+++ b/spec/features/issuables/close_reopen_report_toggle_spec.rb
@@ -23,7 +23,15 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
expect(container).to have_content("Close #{human_model_name}")
expect(container).to have_content('Report abuse')
expect(container).to have_content("Report #{human_model_name.pluralize} that are abusive, inappropriate or spam.")
- expect(container).to have_selector('.close-item.droplab-item-selected')
+
+ if issuable.is_a?(MergeRequest)
+ page.within('.js-issuable-close-dropdown') do
+ expect(page).to have_link('Close merge request')
+ end
+ else
+ expect(container).to have_selector('.close-item.droplab-item-selected')
+ end
+
expect(container).to have_selector('.report-item')
expect(container).not_to have_selector('.report-item.droplab-item-selected')
expect(container).not_to have_selector('.reopen-item')
@@ -123,7 +131,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
it 'shows only the `Edit` button' do
expect(page).to have_link('Edit')
- expect(page).not_to have_link('Report abuse')
+ expect(page).to have_link('Report abuse')
expect(page).not_to have_button('Close merge request')
expect(page).not_to have_button('Reopen merge request')
end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index b1ffaaa7c7e..3f00bdc478d 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -48,6 +48,14 @@ RSpec.describe 'issuable list', :js do
end
end
+ it 'displays a warning if counting the number of issues times out' do
+ allow_any_instance_of(IssuesFinder).to receive(:count_by_state).and_raise(ActiveRecord::QueryCanceled)
+
+ visit_issuable_list(:issue)
+
+ expect(page).to have_text('Open ? Closed ? All ?')
+ end
+
it "counts merge requests closing issues icons for each issue" do
visit_issuable_list(:issue)
diff --git a/spec/features/issuables/markdown_references/internal_references_spec.rb b/spec/features/issuables/markdown_references/internal_references_spec.rb
index aceaea8d2ed..07d4271eed7 100644
--- a/spec/features/issuables/markdown_references/internal_references_spec.rb
+++ b/spec/features/issuables/markdown_references/internal_references_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe "Internal references", :js do
add_note("##{public_project_issue.to_reference(private_project)}")
end
- context "when user doesn't have access to private project" do
+ context "when user doesn't have access to private project", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
before do
sign_in(public_project_user)
@@ -52,7 +52,7 @@ RSpec.describe "Internal references", :js do
visit(project_issue_path(public_project, public_project_issue))
end
- it "doesn't show any references" do
+ it "doesn't show any references", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
page.within(".issue-details") do
expect(page).not_to have_content("#merge-requests .merge-requests-title")
end
@@ -94,7 +94,7 @@ RSpec.describe "Internal references", :js do
add_note("##{public_project_merge_request.to_reference(private_project)}")
end
- context "when user doesn't have access to private project" do
+ context "when user doesn't have access to private project", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
before do
sign_in(public_project_user)
@@ -121,7 +121,7 @@ RSpec.describe "Internal references", :js do
visit(project_merge_request_path(public_project, public_project_merge_request))
end
- it "doesn't show any references" do
+ it "doesn't show any references", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
page.within(".merge-request-details") do
expect(page).not_to have_content("#merge-requests .merge-requests-title")
end
diff --git a/spec/features/issuables/merge_request_discussion_lock_spec.rb b/spec/features/issuables/merge_request_discussion_lock_spec.rb
new file mode 100644
index 00000000000..4e0265839f6
--- /dev/null
+++ b/spec/features/issuables/merge_request_discussion_lock_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+# TODO use shared examples to merge this spec with discussion_lock_spec.rb
+# https://gitlab.com/gitlab-org/gitlab/-/issues/255910
+
+require 'spec_helper'
+
+RSpec.describe 'Merge Request Discussion Lock', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, author: user) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the merge_request' do
+ visit project_merge_request_path(merge_request.project, merge_request)
+
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Lock')
+ end
+
+ expect(find('[data-testid="lock-status"]')).to have_content('Locked')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ merge_request.update_attribute(:discussion_locked, true)
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ it 'the user can unlock the merge_request' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Unlock')
+ end
+
+ expect(find('[data-testid="lock-status"]')).to have_content('Unlocked')
+ end
+ end
+ end
+
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ it 'the user can not lock the merge_request' do
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ merge_request.update_attribute(:discussion_locked, true)
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ it 'the user can not unlock the merge_request' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
+ end
+ end
+end
diff --git a/spec/features/issues/csv_spec.rb b/spec/features/issues/csv_spec.rb
index 8d06bf24f8b..c93693ec40a 100644
--- a/spec/features/issues/csv_spec.rb
+++ b/spec/features/issues/csv_spec.rb
@@ -31,13 +31,13 @@ RSpec.describe 'Issues csv' do
end
it 'triggers an email export' do
- expect(ExportCsvWorker).to receive(:perform_async).with(user.id, project.id, hash_including("project_id" => project.id))
+ expect(IssuableExportCsvWorker).to receive(:perform_async).with(:issue, user.id, project.id, hash_including("project_id" => project.id))
request_csv
end
it "doesn't send request params to ExportCsvWorker" do
- expect(ExportCsvWorker).to receive(:perform_async).with(anything, anything, hash_excluding("controller" => anything, "action" => anything))
+ expect(IssuableExportCsvWorker).to receive(:perform_async).with(:issue, anything, anything, hash_excluding("controller" => anything, "action" => anything))
request_csv
end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 0165fba9ace..ff78b9e608f 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -6,7 +6,9 @@ RSpec.describe 'GFM autocomplete', :js do
let_it_be(:user_xss_title) { 'eve <img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;' }
let_it_be(:user_xss) { create(:user, name: user_xss_title, username: 'xss.user') }
let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group, name: 'Ancestor') }
+ let_it_be(:child_group) { create(:group, parent: group, name: 'My group') }
+ let_it_be(:project) { create(:project, group: child_group) }
let_it_be(:label) { create(:label, project: project, title: 'special+') }
let(:issue) { create(:issue, project: project) }
@@ -530,7 +532,7 @@ RSpec.describe 'GFM autocomplete', :js do
expect(page).to have_selector('.tribute-container', visible: true)
- expect(find('.tribute-container ul', visible: true).text).to have_content(user_xss.username)
+ expect(find('.tribute-container ul', visible: true)).to have_text(user_xss.username)
end
it 'selects the first item for assignee dropdowns' do
@@ -558,6 +560,24 @@ RSpec.describe 'GFM autocomplete', :js do
expect(find('.tribute-container ul', visible: true)).to have_content(user.name)
end
+ context 'when autocompleting for groups' do
+ it 'shows the group when searching for the name of the group' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys('@mygroup')
+ end
+
+ expect(find('.tribute-container ul', visible: true)).to have_text('My group')
+ end
+
+ it 'does not show the group when searching for the name of the parent of the group' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys('@ancestor')
+ end
+
+ expect(find('.tribute-container ul', visible: true)).not_to have_text('My group')
+ end
+ end
+
context 'if a selected value has special characters' do
it 'wraps the result in double quotes' do
note = find('#note-body')
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 38d11ee2560..94a1de06488 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -16,129 +16,83 @@ RSpec.describe 'Issue Sidebar' do
sign_in(user)
end
- context 'assignee', :js do
+ context 'when concerning the assignee', :js do
let(:user2) { create(:user) }
let(:issue2) { create(:issue, project: project, author: user2) }
- context 'when invite_members_version_a experiment is enabled' do
- before do
- stub_experiment_for_user(invite_members_version_a: true)
- end
+ include_examples 'issuable invite members experiments' do
+ let(:issuable_path) { project_issue_path(project, issue2) }
+ end
- context 'when user can not see invite members' do
- before do
- project.add_developer(user)
- visit_issue(project, issue2)
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ visit_issue(project, issue2)
- find('.block.assignee .edit-link').click
+ find('.block.assignee .edit-link').click
- wait_for_requests
- end
+ wait_for_requests
+ end
- it 'does not see link to invite members' do
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_link('Invite Members')
- end
+ it 'shows author in assignee dropdown' do
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_content(user2.name)
end
end
- context 'when user can see invite members' do
- before do
- project.add_maintainer(user)
- visit_issue(project, issue2)
-
- find('.block.assignee .edit-link').click
+ it 'shows author when filtering assignee dropdown' do
+ page.within '.dropdown-menu-user' do
+ find('.dropdown-input-field').set(user2.name)
wait_for_requests
- end
- it 'sees link to invite members' do
- page.within '.dropdown-menu-user' do
- expect(page).to have_link('Invite Members', href: project_project_members_path(project))
- expect(page).to have_selector('[data-track-event="click_invite_members"]')
- expect(page).to have_selector("[data-track-label='edit_assignee']")
- end
+ expect(page).to have_content(user2.name)
end
end
- end
-
- context 'when invite_members_version_a experiment is not enabled' do
- context 'when user is a developer' do
- before do
- project.add_developer(user)
- visit_issue(project, issue2)
- find('.block.assignee .edit-link').click
+ it 'assigns yourself' do
+ find('.block.assignee .dropdown-menu-toggle').click
- wait_for_requests
- end
-
- it 'shows author in assignee dropdown' do
- page.within '.dropdown-menu-user' do
- expect(page).to have_content(user2.name)
- end
- end
+ click_button 'assign yourself'
- it 'shows author when filtering assignee dropdown' do
- page.within '.dropdown-menu-user' do
- find('.dropdown-input-field').native.send_keys user2.name
- sleep 1 # Required to wait for end of input delay
+ wait_for_requests
- wait_for_requests
+ find('.block.assignee .edit-link').click
- expect(page).to have_content(user2.name)
- end
+ page.within '.dropdown-menu-user' do
+ expect(page.find('.dropdown-header')).to be_visible
+ expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name)
end
+ end
- it 'assigns yourself' do
- find('.block.assignee .dropdown-menu-toggle').click
-
- click_button 'assign yourself'
-
- wait_for_requests
+ it 'keeps your filtered term after filtering and dismissing the dropdown' do
+ find('.dropdown-input-field').set(user2.name)
- find('.block.assignee .edit-link').click
+ wait_for_requests
- page.within '.dropdown-menu-user' do
- expect(page.find('.dropdown-header')).to be_visible
- expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name)
- end
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_content 'Unassigned'
+ click_link user2.name
end
- it 'keeps your filtered term after filtering and dismissing the dropdown' do
- find('.dropdown-input-field').native.send_keys user2.name
-
- wait_for_requests
-
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_content 'Unassigned'
- click_link user2.name
- end
-
- find('.js-right-sidebar').click
- find('.block.assignee .edit-link').click
+ find('.js-right-sidebar').click
+ find('.block.assignee .edit-link').click
- expect(page.all('.dropdown-menu-user li').length).to eq(1)
- expect(find('.dropdown-input-field').value).to eq(user2.name)
- end
+ expect(page.all('.dropdown-menu-user li').length).to eq(1)
+ expect(find('.dropdown-input-field').value).to eq(user2.name)
end
+ end
- context 'when user is a maintainer' do
- before do
- project.add_maintainer(user)
- visit_issue(project, issue2)
+ it 'shows label text as "Apply" when assignees are changed' do
+ project.add_developer(user)
+ visit_issue(project, issue2)
- find('.block.assignee .edit-link').click
+ find('.block.assignee .edit-link').click
+ wait_for_requests
- wait_for_requests
- end
+ click_on 'Unassigned'
- it 'shows author in assignee dropdown and no invite link' do
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_link('Invite Members')
- end
- end
- end
+ expect(page).to have_link('Apply')
end
end
diff --git a/spec/features/issues/todo_spec.rb b/spec/features/issues/todo_spec.rb
index 3de33049db0..315d1c911a2 100644
--- a/spec/features/issues/todo_spec.rb
+++ b/spec/features/issues/todo_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Manually create a todo item from issue', :js do
it 'creates todo when clicking button' do
page.within '.issuable-sidebar' do
- click_button 'Add a To Do'
+ click_button 'Add a to do'
expect(page).to have_content 'Mark as done'
end
@@ -32,7 +32,7 @@ RSpec.describe 'Manually create a todo item from issue', :js do
it 'marks a todo as done' do
page.within '.issuable-sidebar' do
- click_button 'Add a To Do'
+ click_button 'Add a to do'
click_button 'Mark as done'
end
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 88b8e9624e2..de746415205 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -6,292 +6,356 @@ RSpec.describe "Issues > User edits issue", :js do
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:project_with_milestones) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
- let_it_be(:issue) { create(:issue, project: project, author: user, assignees: [user]) }
+ let_it_be(:label_assigned) { create(:label, project: project, title: 'verisimilitude') }
+ let_it_be(:label_unassigned) { create(:label, project: project, title: 'syzygy') }
+ let_it_be(:issue) { create(:issue, project: project, author: user, assignees: [user], labels: [label_assigned]) }
let_it_be(:issue_with_milestones) { create(:issue, project: project_with_milestones, author: user, assignees: [user]) }
- let_it_be(:label) { create(:label, project: project) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:milestones) { create_list(:milestone, 25, project: project_with_milestones) }
- before do
- project.add_developer(user)
- project_with_milestones.add_developer(user)
- sign_in(user)
- end
-
- context "from edit page" do
+ context 'with authorized user' do
before do
- visit edit_project_issue_path(project, issue)
+ project.add_developer(user)
+ project_with_milestones.add_developer(user)
+ sign_in(user)
end
- it "previews content" do
- form = first(".gfm-form")
-
- page.within(form) do
- fill_in("Description", with: "Bug fixed :smile:")
- click_button("Preview")
+ context "from edit page" do
+ before do
+ visit edit_project_issue_path(project, issue)
end
- expect(form).to have_button("Write")
- end
-
- it 'allows user to select unassigned' do
- visit edit_project_issue_path(project, issue)
-
- expect(page).to have_content "Assignee #{user.name}"
+ it "previews content" do
+ form = first(".gfm-form")
- first('.js-user-search').click
- click_link 'Unassigned'
-
- click_button 'Save changes'
+ page.within(form) do
+ fill_in("Description", with: "Bug fixed :smile:")
+ click_button("Preview")
+ end
- page.within('.assignee') do
- expect(page).to have_content 'None - assign yourself'
+ expect(form).to have_button("Write")
end
- end
- context 'with due date' do
- before do
+ it 'allows user to select unassigned' do
visit edit_project_issue_path(project, issue)
- end
-
- it 'saves with due date' do
- date = Date.today.at_beginning_of_month.tomorrow
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
- find('#issuable-due-date').click
-
- page.within '.pika-single' do
- click_button date.day
- end
+ expect(page).to have_content "Assignee #{user.name}"
- expect(find('#issuable-due-date').value).to eq date.to_s
+ first('.js-user-search').click
+ click_link 'Unassigned'
click_button 'Save changes'
- page.within '.issuable-sidebar' do
- expect(page).to have_content date.to_s(:medium)
+ page.within('.assignee') do
+ expect(page).to have_content 'None - assign yourself'
end
end
- it 'warns about version conflict' do
- issue.update(title: "New title")
+ context 'with due date' do
+ before do
+ visit edit_project_issue_path(project, issue)
+ end
- fill_in 'issue_title', with: 'bug 345'
- fill_in 'issue_description', with: 'bug description'
+ it 'saves with due date' do
+ date = Date.today.at_beginning_of_month.tomorrow
- click_button 'Save changes'
+ fill_in 'issue_title', with: 'bug 345'
+ fill_in 'issue_description', with: 'bug description'
+ find('#issuable-due-date').click
- expect(page).to have_content 'Someone edited the issue the same time you did'
- end
- end
- end
+ page.within '.pika-single' do
+ click_button date.day
+ end
- context "from issue#show" do
- before do
- visit project_issue_path(project, issue)
- end
+ expect(find('#issuable-due-date').value).to eq date.to_s
+
+ click_button 'Save changes'
+
+ page.within '.issuable-sidebar' do
+ expect(page).to have_content date.to_s(:medium)
+ end
+ end
- describe 'update labels' do
- it 'will not send ajax request when no data is changed' do
- page.within '.labels' do
- click_on 'Edit'
+ it 'warns about version conflict' do
+ issue.update(title: "New title")
- find('.dropdown-title button').click
+ fill_in 'issue_title', with: 'bug 345'
+ fill_in 'issue_description', with: 'bug description'
- expect(page).not_to have_selector('.block-loading')
- expect(page).not_to have_selector('.gl-spinner')
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Someone edited the issue the same time you did'
end
end
end
- describe 'update assignee' do
- context 'by authorized user' do
- def close_dropdown_menu_if_visible
- find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
- toggle.click if toggle.visible?
+ context "from issue#show" do
+ before do
+ visit project_issue_path(project, issue)
+ end
+
+ describe 'update labels' do
+ it 'will not send ajax request when no data is changed' do
+ page.within '.labels' do
+ click_on 'Edit'
+
+ find('.dropdown-title button').click
+
+ expect(page).not_to have_selector('.block-loading')
+ expect(page).not_to have_selector('.gl-spinner')
end
end
- it 'allows user to select unassigned' do
- visit project_issue_path(project, issue)
+ it 'can add label to issue' do
+ page.within '.block.labels' do
+ expect(page).to have_text('verisimilitude')
+ expect(page).not_to have_text('syzygy')
- page.within('.assignee') do
- expect(page).to have_content "#{user.name}"
+ click_on 'Edit'
- click_link 'Edit'
- click_link 'Unassigned'
- first('.title').click
- expect(page).to have_content 'None - assign yourself'
+ wait_for_requests
+
+ click_on 'syzygy'
+ find('.dropdown-header-button').click
+
+ wait_for_requests
+
+ expect(page).to have_text('verisimilitude')
+ expect(page).to have_text('syzygy')
end
end
- it 'allows user to select an assignee' do
- issue2 = create(:issue, project: project, author: user)
- visit project_issue_path(project, issue2)
+ it 'can remove label from issue by clicking on the label `x` button' do
+ page.within '.block.labels' do
+ expect(page).to have_text('verisimilitude')
+
+ within '.gl-label' do
+ click_button
+ end
+
+ wait_for_requests
- page.within('.assignee') do
- expect(page).to have_content "None"
+ expect(page).not_to have_text('verisimilitude')
end
+ end
+ end
- page.within '.assignee' do
- click_link 'Edit'
+ describe 'update assignee' do
+ context 'by authorized user' do
+ def close_dropdown_menu_if_visible
+ find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
+ toggle.click if toggle.visible?
+ end
end
- page.within '.dropdown-menu-user' do
- click_link user.name
+ it 'allows user to select unassigned' do
+ visit project_issue_path(project, issue)
+
+ page.within('.assignee') do
+ expect(page).to have_content "#{user.name}"
+
+ click_link 'Edit'
+ click_link 'Unassigned'
+ first('.title').click
+ expect(page).to have_content 'None - assign yourself'
+ end
end
- page.within('.assignee') do
- expect(page).to have_content user.name
+ it 'allows user to select an assignee' do
+ issue2 = create(:issue, project: project, author: user)
+ visit project_issue_path(project, issue2)
+
+ page.within('.assignee') do
+ expect(page).to have_content "None"
+ end
+
+ page.within '.assignee' do
+ click_link 'Edit'
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within('.assignee') do
+ expect(page).to have_content user.name
+ end
end
- end
- it 'allows user to unselect themselves' do
- issue2 = create(:issue, project: project, author: user, assignees: [user])
+ it 'allows user to unselect themselves' do
+ issue2 = create(:issue, project: project, author: user, assignees: [user])
- visit project_issue_path(project, issue2)
+ visit project_issue_path(project, issue2)
- page.within '.assignee' do
- expect(page).to have_content user.name
+ page.within '.assignee' do
+ expect(page).to have_content user.name
- click_link 'Edit'
- click_link user.name
+ click_link 'Edit'
+ click_link user.name
- close_dropdown_menu_if_visible
+ close_dropdown_menu_if_visible
- page.within '.value .assign-yourself' do
- expect(page).to have_content "None"
+ page.within '.value .assign-yourself' do
+ expect(page).to have_content "None"
+ end
end
end
end
- end
- context 'by unauthorized user' do
- let(:guest) { create(:user) }
+ context 'by unauthorized user' do
+ let(:guest) { create(:user) }
- before do
- project.add_guest(guest)
- end
+ before do
+ project.add_guest(guest)
+ end
- it 'shows assignee text' do
- sign_out(:user)
- sign_in(guest)
+ it 'shows assignee text' do
+ sign_out(:user)
+ sign_in(guest)
- visit project_issue_path(project, issue)
- expect(page).to have_content issue.assignees.first.name
+ visit project_issue_path(project, issue)
+ expect(page).to have_content issue.assignees.first.name
+ end
end
end
- end
- describe 'update milestone' do
- context 'by authorized user' do
- it 'allows user to select unassigned' do
- visit project_issue_path(project, issue)
+ describe 'update milestone' do
+ context 'by authorized user' do
+ it 'allows user to select unassigned' do
+ visit project_issue_path(project, issue)
- page.within('.milestone') do
- expect(page).to have_content "None"
- end
+ page.within('.milestone') do
+ expect(page).to have_content "None"
+ end
- find('.block.milestone .edit-link').click
- sleep 2 # wait for ajax stuff to complete
- first('.dropdown-content li').click
- sleep 2
- page.within('.milestone') do
- expect(page).to have_content 'None'
+ find('.block.milestone .edit-link').click
+ sleep 2 # wait for ajax stuff to complete
+ first('.dropdown-content li').click
+ sleep 2
+ page.within('.milestone') do
+ expect(page).to have_content 'None'
+ end
end
- end
- it 'allows user to de-select milestone' do
- visit project_issue_path(project, issue)
+ it 'allows user to de-select milestone' do
+ visit project_issue_path(project, issue)
- page.within('.milestone') do
- click_link 'Edit'
- click_link milestone.title
+ page.within('.milestone') do
+ click_link 'Edit'
+ click_link milestone.title
- page.within '.value' do
- expect(page).to have_content milestone.title
- end
+ page.within '.value' do
+ expect(page).to have_content milestone.title
+ end
- click_link 'Edit'
- click_link milestone.title
+ click_link 'Edit'
+ click_link milestone.title
- page.within '.value' do
- expect(page).to have_content 'None'
+ page.within '.value' do
+ expect(page).to have_content 'None'
+ end
end
end
- end
- it 'allows user to search milestone' do
- visit project_issue_path(project_with_milestones, issue_with_milestones)
+ it 'allows user to search milestone' do
+ visit project_issue_path(project_with_milestones, issue_with_milestones)
- page.within('.milestone') do
- click_link 'Edit'
- wait_for_requests
- # We need to enclose search string in quotes for exact match as all the milestone titles
- # within tests are prefixed with `My title`.
- find('.dropdown-input-field', visible: true).send_keys "\"#{milestones[0].title}\""
- wait_for_requests
+ page.within('.milestone') do
+ click_link 'Edit'
+ wait_for_requests
+ # We need to enclose search string in quotes for exact match as all the milestone titles
+ # within tests are prefixed with `My title`.
+ find('.dropdown-input-field', visible: true).send_keys "\"#{milestones[0].title}\""
+ wait_for_requests
- page.within '.dropdown-content' do
- expect(page).to have_content milestones[0].title
+ page.within '.dropdown-content' do
+ expect(page).to have_content milestones[0].title
+ end
end
end
end
- end
- context 'by unauthorized user' do
- let(:guest) { create(:user) }
+ context 'by unauthorized user' do
+ let(:guest) { create(:user) }
- before do
- project.add_guest(guest)
- issue.milestone = milestone
- issue.save
- end
+ before do
+ project.add_guest(guest)
+ issue.milestone = milestone
+ issue.save
+ end
- it 'shows milestone text' do
- sign_out(:user)
- sign_in(guest)
+ it 'shows milestone text' do
+ sign_out(:user)
+ sign_in(guest)
- visit project_issue_path(project, issue)
- expect(page).to have_content milestone.title
+ visit project_issue_path(project, issue)
+ expect(page).to have_content milestone.title
+ end
end
end
- end
- context 'update due date' do
- it 'adds due date to issue' do
- date = Date.today.at_beginning_of_month + 2.days
+ context 'update due date' do
+ it 'adds due date to issue' do
+ date = Date.today.at_beginning_of_month + 2.days
- page.within '.due_date' do
- click_link 'Edit'
+ page.within '.due_date' do
+ click_link 'Edit'
- page.within '.pika-single' do
- click_button date.day
- end
+ page.within '.pika-single' do
+ click_button date.day
+ end
- wait_for_requests
+ wait_for_requests
- expect(find('.value').text).to have_content date.strftime('%b %-d, %Y')
+ expect(find('.value').text).to have_content date.strftime('%b %-d, %Y')
+ end
end
- end
- it 'removes due date from issue' do
- date = Date.today.at_beginning_of_month + 2.days
+ it 'removes due date from issue' do
+ date = Date.today.at_beginning_of_month + 2.days
- page.within '.due_date' do
- click_link 'Edit'
+ page.within '.due_date' do
+ click_link 'Edit'
- page.within '.pika-single' do
- click_button date.day
+ page.within '.pika-single' do
+ click_button date.day
+ end
+
+ wait_for_requests
+
+ expect(page).to have_no_content 'None'
+
+ click_link 'remove due date'
+ expect(page).to have_content 'None'
end
+ end
+ end
+ end
+ end
- wait_for_requests
+ context 'with unauthorized user' do
+ before do
+ sign_in(user)
+ end
- expect(page).to have_no_content 'None'
+ context "from issue#show" do
+ before do
+ visit project_issue_path(project, issue)
+ end
- click_link 'remove due date'
- expect(page).to have_content 'None'
+ describe 'updating labels' do
+ it 'cannot edit labels' do
+ page.within '.block.labels' do
+ expect(page).not_to have_button('Edit')
+ end
+ end
+
+ it 'cannot remove label with a click as it has no `x` button' do
+ page.within '.block.labels' do
+ within '.gl-label' do
+ expect(page).not_to have_button
+ end
+ end
end
end
end
diff --git a/spec/features/issues/user_sees_live_update_spec.rb b/spec/features/issues/user_sees_live_update_spec.rb
index c9b751715bc..d27cdb774a5 100644
--- a/spec/features/issues/user_sees_live_update_spec.rb
+++ b/spec/features/issues/user_sees_live_update_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'Issues > User sees live update', :js do
expect(page).to have_css('.sidebar-item-warning-message')
within('.sidebar-item-warning-message') do
- find('.btn-close').click
+ find('[data-testid="confidential-toggle"]').click
end
wait_for_requests
diff --git a/spec/features/issues/user_views_issue_spec.rb b/spec/features/issues/user_views_issue_spec.rb
index 3f18764aa58..9b1c8be1513 100644
--- a/spec/features/issues/user_views_issue_spec.rb
+++ b/spec/features/issues/user_views_issue_spec.rb
@@ -5,7 +5,7 @@ require "spec_helper"
RSpec.describe "User views issue" do
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
- let_it_be(:issue) { create(:issue, project: project, description: "# Description header", author: user) }
+ let_it_be(:issue) { create(:issue, project: project, description: "# Description header\n\n**Lorem** _ipsum_ dolor sit [amet](https://example.com)", author: user) }
let_it_be(:note) { create(:note, noteable: issue, project: project, author: user) }
before_all do
@@ -20,6 +20,8 @@ RSpec.describe "User views issue" do
it { expect(page).to have_header_with_correct_id_and_link(1, "Description header", "description-header") }
+ it_behaves_like 'page meta description', ' Description header Lorem ipsum dolor sit amet'
+
it 'shows the merge request and issue actions', :aggregate_failures do
expect(page).to have_link('New issue')
expect(page).to have_button('Create merge request')
diff --git a/spec/features/labels_hierarchy_spec.rb b/spec/features/labels_hierarchy_spec.rb
index eed9a6d1043..5d141580874 100644
--- a/spec/features/labels_hierarchy_spec.rb
+++ b/spec/features/labels_hierarchy_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe 'Labels Hierarchy', :js do
let!(:project_label_1) { create(:label, project: project_1, title: 'Label_4') }
before do
+ stub_feature_flags(graphql_board_lists: false)
grandparent.add_owner(user)
sign_in(user)
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index 40f6482c948..c8fc23bebf9 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -41,7 +41,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_comment
page.within('.review-bar-content') do
- click_button 'Finish review'
click_button 'Submit review'
end
@@ -64,18 +63,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
end
- it 'discards review' do
- write_comment
-
- click_button 'Discard review'
-
- click_button 'Delete all pending comments'
-
- wait_for_requests
-
- expect(page).not_to have_selector('.draft-note-component')
- end
-
it 'deletes draft note' do
write_comment
@@ -149,7 +136,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_reply_to_discussion(resolve: true)
page.within('.review-bar-content') do
- click_button 'Finish review'
click_button 'Submit review'
end
@@ -192,7 +178,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_reply_to_discussion(button_text: 'Start a review', unresolve: true)
page.within('.review-bar-content') do
- click_button 'Finish review'
click_button 'Submit review'
end
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 0e65cb358da..a98bfd1c8a4 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -26,7 +26,12 @@ RSpec.describe 'a maintainer edits files on a source-branch of an MR from a fork
visit project_merge_request_path(target_project, merge_request)
click_link 'Changes'
wait_for_requests
- first('.js-file-title').find('.js-edit-blob').click
+
+ page.within(first('.js-file-title')) do
+ find('.js-diff-more-actions').click
+ find('.js-edit-blob').click
+ end
+
wait_for_requests
end
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 3a199951b56..ad1ad067935 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -34,7 +34,8 @@ RSpec.describe 'User comments on a diff', :js do
page.within('.diff-files-holder > div:nth-child(3)') do
expect(page).to have_content('Line is wrong')
- find('.js-btn-vue-toggle-comments').click
+ find('.js-diff-more-actions').click
+ click_button 'Hide comments on this file'
expect(page).not_to have_content('Line is wrong')
end
@@ -67,7 +68,8 @@ RSpec.describe 'User comments on a diff', :js do
# Hide the comment.
page.within('.diff-files-holder > div:nth-child(3)') do
- find('.js-btn-vue-toggle-comments').click
+ find('.js-diff-more-actions').click
+ click_button 'Hide comments on this file'
expect(page).not_to have_content('Line is wrong')
end
@@ -80,7 +82,8 @@ RSpec.describe 'User comments on a diff', :js do
# Show the comment.
page.within('.diff-files-holder > div:nth-child(3)') do
- find('.js-btn-vue-toggle-comments').click
+ find('.js-diff-more-actions').click
+ click_button 'Show comments on this file'
end
# Now both the comments should be shown.
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index affd6f6b7b5..7d55a72c2b1 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js do
let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-menu li[data-user-id=\"#{assignee.id}\"]") }
let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item.find('a')['data-title'] || '' }
- context 'when invite_members_version_a experiment is not enabled' do
+ context 'when user is an owner' do
before do
stub_const('Autocomplete::UsersFinder::LIMIT', users_find_limit)
@@ -52,12 +52,6 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js do
it "shows assignee tooltip '#{expected_tooltip}" do
expect(sidebar_assignee_dropdown_tooltip).to eql(expected_tooltip)
end
-
- it 'does not show invite link' do
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_link('Invite Members')
- end
- end
end
end
@@ -74,48 +68,15 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js do
end
end
- context 'when invite_members_version_a experiment is enabled' do
+ context 'with invite members experiment considerations' do
let_it_be(:user) { create(:user) }
before do
- stub_experiment_for_user(invite_members_version_a: true)
sign_in(user)
end
- context 'when user can not see invite members' do
- before do
- project.add_developer(user)
- visit project_merge_request_path(project, merge_request)
-
- find('.block.assignee .edit-link').click
-
- wait_for_requests
- end
-
- it 'does not see link to invite members' do
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_link('Invite Members')
- end
- end
- end
-
- context 'when user can see invite members' do
- before do
- project.add_maintainer(user)
- visit project_merge_request_path(project, merge_request)
-
- find('.block.assignee .edit-link').click
-
- wait_for_requests
- end
-
- it 'sees link to invite members' do
- page.within '.dropdown-menu-user' do
- expect(page).to have_link('Invite Members', href: project_project_members_path(project))
- expect(page).to have_selector('[data-track-event="click_invite_members"]')
- expect(page).to have_selector("[data-track-label='edit_assignee']")
- end
- end
+ include_examples 'issuable invite members experiments' do
+ let(:issuable_path) { project_merge_request_path(project, merge_request) }
end
end
end
diff --git a/spec/features/merge_request/user_edits_mr_spec.rb b/spec/features/merge_request/user_edits_mr_spec.rb
index 397ca70f4a1..817b4e0b48e 100644
--- a/spec/features/merge_request/user_edits_mr_spec.rb
+++ b/spec/features/merge_request/user_edits_mr_spec.rb
@@ -21,24 +21,6 @@ RSpec.describe 'Merge request > User edits MR' do
it_behaves_like 'an editable merge request'
end
- context 'when merge_request_reviewers is turned on' do
- before do
- stub_feature_flags(merge_request_reviewers: true)
- end
-
- context 'non-fork merge request' do
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request with reviewers'
- end
-
- context 'for a forked project' do
- let(:source_project) { fork_project(target_project, nil, repository: true) }
-
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request with reviewers'
- end
- end
-
context 'when merge_request_reviewers is turned off' do
before do
stub_feature_flags(merge_request_reviewers: false)
diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb
index 0340d9ccc3d..0cdc87de761 100644
--- a/spec/features/merge_request/user_expands_diff_spec.rb
+++ b/spec/features/merge_request/user_expands_diff_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User expands diff', :js do
let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) }
before do
+ stub_feature_flags(increased_diff_limits: false)
visit(diffs_project_merge_request_path(project, merge_request))
wait_for_requests
diff --git a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
new file mode 100644
index 00000000000..f5bca7cf015
--- /dev/null
+++ b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User marks merge request as draft', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'toggles draft status' do
+ click_link 'Mark as draft'
+
+ expect(page).to have_content("Draft: #{merge_request.title}")
+
+ page.within('.detail-page-header-actions') do
+ click_link 'Mark as ready'
+ end
+
+ expect(page).to have_content(merge_request.title)
+ end
+end
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index 3dc49fb4dea..444d5371e7a 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
it_behaves_like 'Merge when pipeline succeeds activator'
end
- context 'when enabled after pipeline status changed' do
+ context 'when enabled after pipeline status changed', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/258667' do
before do
pipeline.run!
diff --git a/spec/features/merge_request/user_reopens_merge_request_spec.rb b/spec/features/merge_request/user_reopens_merge_request_spec.rb
index 7866ece84ac..4a05a3be59a 100644
--- a/spec/features/merge_request/user_reopens_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reopens_merge_request_spec.rb
@@ -15,7 +15,11 @@ RSpec.describe 'User reopens a merge requests', :js do
end
it 'reopens a merge request' do
- click_button('Reopen merge request', match: :first)
+ find('.js-issuable-close-dropdown .dropdown-toggle').click
+
+ click_link('Reopen merge request', match: :first)
+
+ wait_for_requests
page.within('.status-box') do
expect(page).to have_content('Open')
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
index a9d4c4df507..b67167252e1 100644
--- a/spec/features/merge_request/user_resolves_wip_mr_spec.rb
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -35,7 +35,9 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do
expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}")
expect(page).to have_content "This merge request is still a work in progress."
- click_button('Mark as ready')
+ page.within('.mr-state-widget') do
+ click_button('Mark as ready')
+ end
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 7a3a14e61e3..a7713ed9964 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'Merge request > User sees diff', :js do
visit diffs_project_merge_request_path(project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
- expect(page).to have_selector("[id=\"#{changelog_id}\"] a.js-edit-blob")
+ expect(page).to have_selector("[id=\"#{changelog_id}\"] .js-edit-blob", visible: false)
end
end
@@ -73,6 +73,7 @@ RSpec.describe 'Merge request > User sees diff', :js do
visit diffs_project_merge_request_path(project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
+ find("[id=\"#{changelog_id}\"] .js-diff-more-actions").click
find("[id=\"#{changelog_id}\"] .js-edit-blob").click
expect(page).to have_selector('.js-fork-suggestion-button', count: 1)
diff --git a/spec/features/merge_request/user_sees_page_metadata_spec.rb b/spec/features/merge_request/user_sees_page_metadata_spec.rb
new file mode 100644
index 00000000000..7b3e07152a0
--- /dev/null
+++ b/spec/features/merge_request/user_sees_page_metadata_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User sees page metadata' do
+ let(:merge_request) { create(:merge_request, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
+ let(:project) { merge_request.target_project }
+ let(:user) { project.creator }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 8e15ba6cf8d..107fc002ebd 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
wait_for_requests
- expect(page.find('.js-run-mr-pipeline')).to have_text('Run Pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run Pipeline')
end
end
@@ -66,7 +66,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
wait_for_requests
- expect(page.find('.js-run-mr-pipeline')).to have_text('Run Pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run Pipeline')
end
end
end
diff --git a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
new file mode 100644
index 00000000000..93807512d9c
--- /dev/null
+++ b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User sees suggest pipeline', :js do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.source_project }
+ let(:user) { project.creator }
+
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ stub_experiment(suggest_pipeline: true)
+ stub_experiment_for_user(suggest_pipeline: true)
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows the suggest pipeline widget and then allows dismissal correctly' do
+ expect(page).to have_content('Are you adding technical debt or code vulnerabilities?')
+
+ page.within '.mr-pipeline-suggest' do
+ find('[data-testid="close"]').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Are you adding technical debt or code vulnerabilities?')
+
+ # Reload so we know the user callout was registered
+ visit page.current_url
+
+ expect(page).not_to have_content('Are you adding technical debt or code vulnerabilities?')
+ end
+end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 39495832547..9268190c7e0 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -119,7 +119,8 @@ RSpec.describe 'User comments on a diff', :js do
it 'can add and remove suggestions from a batch' do
files.each_with_index do |file, index|
page.within("[id='#{file[:hash]}']") do
- find("button[title='Show full file']").click
+ find('.js-diff-more-actions').click
+ click_button 'Show full file'
wait_for_requests
click_diff_line(find("[id='#{file[:line_code]}']"))
@@ -130,7 +131,9 @@ RSpec.describe 'User comments on a diff', :js do
wait_for_requests
end
end
+ end
+ files.each_with_index do |file, index|
page.within("[id='#{file[:hash]}']") do
expect(page).not_to have_content('Applied')
@@ -247,7 +250,7 @@ RSpec.describe 'User comments on a diff', :js do
end
context 'multiple suggestions in a single note' do
- it 'suggestions are presented' do
+ it 'suggestions are presented', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/258989' do
click_diff_line(find("[id='#{sample_compare.changes[1][:line_code]}']"))
page.within('.js-discussion-note-form') do
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index 448844ae57d..e8998f9457a 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -22,7 +22,24 @@ RSpec.describe 'User views an open merge request' do
# returns the whole document, not the node's actual parent element
expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..-1])
- expect(page).to have_content(merge_request.title).and have_content(merge_request.description)
+ expect(page).to have_content(merge_request.title)
+ end
+
+ it 'has reviewers in sidebar' do
+ expect(page).to have_css('.reviewer')
+ end
+ end
+
+ context 'when merge_request_reviewers is turned off' do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ stub_feature_flags(merge_request_reviewers: false)
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'has reviewers in sidebar' do
+ expect(page).not_to have_css('.reviewer')
end
end
diff --git a/spec/features/merge_requests/user_filters_by_approvals_spec.rb b/spec/features/merge_requests/user_filters_by_approvals_spec.rb
new file mode 100644
index 00000000000..6dda9ca7952
--- /dev/null
+++ b/spec/features/merge_requests/user_filters_by_approvals_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge Requests > User filters', :js do
+ include FilteredSearchHelpers
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { project.creator }
+ let_it_be(:group_user) { create(:user) }
+ let_it_be(:first_user) { create(:user) }
+
+ before do
+ sign_in(user)
+ visit project_merge_requests_path(project)
+ end
+
+ context 'by "approved by"' do
+ let_it_be(:merge_request) { create(:merge_request, title: 'Bugfix3', source_project: project, source_branch: 'bugfix3') }
+
+ let_it_be(:merge_request_with_first_user_approval) do
+ create(:merge_request, source_project: project, title: 'Bugfix5').tap do |mr|
+ create(:approval, merge_request: mr, user: first_user)
+ end
+ end
+
+ let_it_be(:merge_request_with_group_user_approved) do
+ group = create(:group)
+ group.add_developer(group_user)
+
+ create(:merge_request, source_project: project, title: 'Bugfix6', source_branch: 'bugfix6').tap do |mr|
+ create(:approval, merge_request: mr, user: group_user)
+ end
+ end
+
+ context 'filtering by approved-by:none' do
+ it 'applies the filter' do
+ input_filtered_search('approved-by:=none')
+
+ expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
+
+ expect(page).not_to have_content 'Bugfix5'
+ expect(page).not_to have_content 'Bugfix6'
+ expect(page).to have_content 'Bugfix3'
+ end
+ end
+
+ context 'filtering by approved-by:any' do
+ it 'applies the filter' do
+ input_filtered_search('approved-by:=any')
+
+ expect(page).to have_issuable_counts(open: 2, closed: 0, all: 2)
+
+ expect(page).to have_content 'Bugfix5'
+ expect(page).not_to have_content 'Bugfix3'
+ end
+ end
+
+ context 'filtering by approved-by:@username' do
+ it 'applies the filter' do
+ input_filtered_search("approved-by:=@#{first_user.username}")
+
+ expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
+
+ expect(page).to have_content 'Bugfix5'
+ expect(page).not_to have_content 'Bugfix3'
+ end
+ end
+
+ context 'filtering by an approver from a group' do
+ it 'applies the filter' do
+ input_filtered_search("approved-by:=@#{group_user.username}")
+
+ expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
+
+ expect(page).to have_content 'Bugfix6'
+ expect(page).not_to have_content 'Bugfix5'
+ expect(page).not_to have_content 'Bugfix3'
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_requests/user_filters_by_deployments_spec.rb b/spec/features/merge_requests/user_filters_by_deployments_spec.rb
new file mode 100644
index 00000000000..157454d4e10
--- /dev/null
+++ b/spec/features/merge_requests/user_filters_by_deployments_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge Requests > User filters by deployments', :js do
+ include FilteredSearchHelpers
+
+ let!(:project) { create(:project, :public, :repository) }
+ let!(:user) { project.creator }
+ let!(:gstg) { create(:environment, project: project, name: 'gstg') }
+ let!(:gprd) { create(:environment, project: project, name: 'gprd') }
+
+ let(:mr1) do
+ create(
+ :merge_request,
+ :simple,
+ :merged,
+ author: user,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let(:mr2) do
+ create(
+ :merge_request,
+ :simple,
+ :merged,
+ author: user,
+ source_project: project,
+ target_project: project
+ )
+ end
+
+ let(:deploy1) do
+ create(
+ :deployment,
+ :success,
+ deployable: nil,
+ environment: gstg,
+ project: project,
+ sha: mr1.diff_head_sha,
+ finished_at: Time.utc(2020, 10, 1, 0, 0)
+ )
+ end
+
+ let(:deploy2) do
+ create(
+ :deployment,
+ :success,
+ deployable: nil,
+ environment: gprd,
+ project: project,
+ sha: mr2.diff_head_sha,
+ finished_at: Time.utc(2020, 10, 2, 0, 0)
+ )
+ end
+
+ before do
+ deploy1.link_merge_requests(MergeRequest.where(id: mr1.id))
+ deploy2.link_merge_requests(MergeRequest.where(id: mr2.id))
+
+ sign_in(user)
+ visit(project_merge_requests_path(project, state: :merged))
+ end
+
+ describe 'filtering by deployed-before' do
+ it 'applies the filter' do
+ input_filtered_search('deployed-before:=2020-10-02')
+
+ expect(page).to have_issuable_counts(open: 0, merged: 1, all: 1)
+ expect(page).to have_content mr1.title
+ end
+ end
+
+ describe 'filtering by deployed-after' do
+ it 'applies the filter' do
+ input_filtered_search('deployed-after:=2020-10-01')
+
+ expect(page).to have_issuable_counts(open: 0, merged: 1, all: 1)
+ expect(page).to have_content mr2.title
+ end
+ end
+
+ describe 'filtering by environment' do
+ it 'applies the filter' do
+ input_filtered_search('environment:=gstg')
+
+ expect(page).to have_issuable_counts(open: 0, merged: 1, all: 1)
+ expect(page).to have_content mr1.title
+ end
+ end
+end
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index 4531ef40901..36d28ae2822 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -8,6 +8,10 @@ RSpec.describe 'Merge requests > User lists merge requests' do
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:user3) { create(:user) }
+ let(:user4) { create(:user) }
+ let(:user5) { create(:user) }
before do
@fix = create(:merge_request,
@@ -15,6 +19,7 @@ RSpec.describe 'Merge requests > User lists merge requests' do
source_project: project,
source_branch: 'fix',
assignees: [user],
+ reviewers: [user, user2, user3, user4, user5],
milestone: create(:milestone, project: project, due_date: '2013-12-11'),
created_at: 1.minute.ago,
updated_at: 1.minute.ago)
@@ -23,6 +28,7 @@ RSpec.describe 'Merge requests > User lists merge requests' do
source_project: project,
source_branch: 'markdown',
assignees: [user],
+ reviewers: [user, user2, user3, user4],
milestone: create(:milestone, project: project, due_date: '2013-12-12'),
created_at: 2.minutes.ago,
updated_at: 2.minutes.ago)
@@ -34,6 +40,37 @@ RSpec.describe 'Merge requests > User lists merge requests' do
updated_at: 10.seconds.ago)
end
+ context 'when merge_request_reviewers is turned on' do
+ before do
+ stub_feature_flags(merge_request_reviewers: true)
+ visit_merge_requests(project, reviewer_id: user.id)
+ end
+
+ it 'has reviewers in MR list' do
+ expect(page).to have_css('.issuable-reviewers')
+ end
+
+ it 'shows reviewers avatar count badge if more_reviewers_count > 4' do
+ first_issuable_reviewers = first('.issuable-reviewers')
+
+ expect(first_issuable_reviewers).to have_content('2')
+ expect(first_issuable_reviewers).to have_css('.avatar-counter')
+ end
+
+ it 'does not show reviewers avatar count badge if more_reviewers_count <= 4' do
+ expect(page.all('.issuable-reviewers')[1]).not_to have_css('.avatar-counter')
+ end
+ end
+
+ context 'when merge_request_reviewers is turned false' do
+ it 'has no reviewers in MR list' do
+ stub_feature_flags(merge_request_reviewers: false)
+ visit_merge_requests(project, reviewer_id: user.id)
+
+ expect(page).not_to have_css('.issuable-reviewers')
+ end
+ end
+
it 'filters on no assignee' do
visit_merge_requests(project, assignee_id: IssuableFinder::Params::FILTER_NONE)
diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb
index 4a7f14d5a1b..fefa2916c30 100644
--- a/spec/features/milestone_spec.rb
+++ b/spec/features/milestone_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Milestone' do
find('input[name="commit"]').click
- expect(find('.alert-success')).to have_content('Assign some issues to this milestone.')
+ expect(find('[data-testid="no-issues-alert"]')).to have_content('Assign some issues to this milestone.')
expect(page).to have_content('Nov 16, 2016–Dec 16, 2016')
end
end
@@ -37,7 +37,7 @@ RSpec.describe 'Milestone' do
create(:issue, title: "Bugfix1", project: project, milestone: milestone, state: "closed")
visit project_milestone_path(project, milestone)
- expect(find('.alert-success')).to have_content('All issues for this milestone are closed. You may close this milestone now.')
+ expect(find('[data-testid="all-issues-closed-alert"]')).to have_content('All issues for this milestone are closed. You may close this milestone now.')
end
end
diff --git a/spec/features/milestones/user_views_milestone_spec.rb b/spec/features/milestones/user_views_milestone_spec.rb
index 420f8d49483..9c19f842427 100644
--- a/spec/features/milestones/user_views_milestone_spec.rb
+++ b/spec/features/milestones/user_views_milestone_spec.rb
@@ -4,15 +4,27 @@ require 'spec_helper'
RSpec.describe "User views milestone" do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:milestone) { create(:milestone, project: project, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
let_it_be(:labels) { create_list(:label, 2, project: project) }
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
sign_in(user)
end
+ context 'page description' do
+ before do
+ visit(project_milestone_path(project, milestone))
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
+
it "avoids N+1 database queries" do
issue_params = { project: project, assignees: [user], author: user, milestone: milestone, labels: labels }.freeze
@@ -25,7 +37,7 @@ RSpec.describe "User views milestone" do
expect { visit_milestone }.not_to exceed_query_limit(control)
end
- context 'limiting milestone issues' do
+ context 'issues list', :js do
before_all do
2.times do
create(:issue, milestone: milestone, project: project)
@@ -34,6 +46,28 @@ RSpec.describe "User views milestone" do
end
end
+ context 'for a project milestone' do
+ it 'does not show the project name' do
+ visit(project_milestone_path(project, milestone))
+
+ wait_for_requests
+
+ expect(page.find('#tab-issues')).not_to have_text(project.name)
+ end
+ end
+
+ context 'for a group milestone' do
+ let(:group_milestone) { create(:milestone, group: group) }
+
+ it 'shows the project name' do
+ create(:issue, project: project, milestone: group_milestone)
+
+ visit(group_milestone_path(group, group_milestone))
+
+ expect(page.find('#tab-issues')).to have_text(project.name)
+ end
+ end
+
context 'when issues on milestone are over DISPLAY_ISSUES_LIMIT' do
it "limits issues to display and shows warning" do
stub_const('Milestoneish::DISPLAY_ISSUES_LIMIT', 3)
@@ -56,6 +90,40 @@ RSpec.describe "User views milestone" do
end
end
+ context 'merge requests list', :js do
+ context 'for a project milestone' do
+ it 'does not show the project name' do
+ create(:merge_request, source_project: project, milestone: milestone)
+
+ visit(project_milestone_path(project, milestone))
+
+ within('.js-milestone-tabs') do
+ click_link('Merge Requests')
+ end
+
+ wait_for_requests
+
+ expect(page.find('#tab-merge-requests')).not_to have_text(project.name)
+ end
+ end
+
+ context 'for a group milestone' do
+ let(:group_milestone) { create(:milestone, group: group) }
+
+ it 'shows the project name' do
+ create(:merge_request, source_project: project, milestone: group_milestone)
+
+ visit(group_milestone_path(group, group_milestone))
+
+ within('.js-milestone-tabs') do
+ click_link('Merge Requests')
+ end
+
+ expect(page.find('#tab-merge-requests')).to have_text(project.name)
+ end
+ end
+ end
+
private
def visit_milestone
diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb
index 3f606577121..f8b4b802a60 100644
--- a/spec/features/milestones/user_views_milestones_spec.rb
+++ b/spec/features/milestones/user_views_milestones_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe "User views milestones" do
.and have_content("Merge Requests")
end
- context "with issues" do
+ context "with issues", :js do
let_it_be(:issue) { create(:issue, project: project, milestone: milestone) }
let_it_be(:closed_issue) { create(:closed_issue, project: project, milestone: milestone) }
@@ -33,7 +33,6 @@ RSpec.describe "User views milestones" do
.and have_selector("#tab-issues li.issuable-row", count: 2)
.and have_content(issue.title)
.and have_content(closed_issue.title)
- .and have_selector("#tab-merge-requests")
end
end
diff --git a/spec/features/operations_sidebar_link_spec.rb b/spec/features/operations_sidebar_link_spec.rb
new file mode 100644
index 00000000000..32e2833dafb
--- /dev/null
+++ b/spec/features/operations_sidebar_link_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Operations dropdown sidebar' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_role(user, role)
+ sign_in(user)
+ visit project_issues_path(project)
+ end
+
+ context 'user has guest role' do
+ let(:role) { :guest }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+
+ expect(page).not_to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).not_to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).not_to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+ expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
+ expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+
+ context 'user has reporter role' do
+ let(:role) { :reporter }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+ expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+
+ expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
+ expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+
+ context 'user has developer role' do
+ let(:role) { :developer }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+ expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+ expect(page).to have_link(title: 'Logs', href: project_logs_path(project))
+
+ expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+
+ context 'user has maintainer role' do
+ let(:role) { :maintainer }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+ expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+ expect(page).to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).to have_link(title: 'Logs', href: project_logs_path(project))
+ expect(page).to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+end
diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb
index b5e784a749f..23bbe9c1587 100644
--- a/spec/features/profiles/keys_spec.rb
+++ b/spec/features/profiles/keys_spec.rb
@@ -71,21 +71,35 @@ RSpec.describe 'Profile > SSH Keys' do
expect(page).to have_content(key.title)
end
- it 'User removes a key via the key index' do
- create(:key, user: user)
- visit profile_keys_path
+ describe 'User removes a key', :js do
+ shared_examples 'removes key' do
+ it 'removes key' do
+ visit path
+ click_button('Delete')
- click_link('Remove')
+ page.within('.modal') do
+ page.click_button('Delete')
+ end
- expect(page).to have_content('Your SSH keys (0)')
- end
+ expect(page).to have_content('Your SSH keys (0)')
+ end
+ end
- it 'User removes a key via its details page' do
- key = create(:key, user: user)
- visit profile_key_path(key)
+ context 'via the key index' do
+ before do
+ create(:key, user: user)
+ end
+
+ let(:path) { profile_keys_path }
- click_link('Remove')
+ it_behaves_like 'removes key'
+ end
- expect(page).to have_content('Your SSH keys (0)')
+ context 'via its details page' do
+ let(:key) { create(:key, user: user) }
+ let(:path) { profile_keys_path(key) }
+
+ it_behaves_like 'removes key'
+ end
end
end
diff --git a/spec/features/projects/activity/user_sees_design_comment_spec.rb b/spec/features/projects/activity/user_sees_design_comment_spec.rb
index e60deba65f0..3a8e2790858 100644
--- a/spec/features/projects/activity/user_sees_design_comment_spec.rb
+++ b/spec/features/projects/activity/user_sees_design_comment_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Projects > Activity > User sees design comment', :js do
let_it_be(:design) { create(:design, issue: issue) }
let(:design_activity) do
- "#{commenter.name} #{commenter.to_reference} commented on design"
+ "#{commenter.name} #{commenter.to_reference} commented on design #{design.to_reference}"
end
let(:issue_activity) do
diff --git a/spec/features/projects/badges/list_spec.rb b/spec/features/projects/badges/list_spec.rb
index 3382bdcd65f..d1e635f11c0 100644
--- a/spec/features/projects/badges/list_spec.rb
+++ b/spec/features/projects/badges/list_spec.rb
@@ -17,10 +17,10 @@ RSpec.describe 'list of badges' do
expect(page).to have_content 'Markdown'
expect(page).to have_content 'HTML'
expect(page).to have_content 'AsciiDoc'
- expect(page).to have_css('.highlight', count: 3)
+ expect(page).to have_css('.js-syntax-highlight', count: 3)
expect(page).to have_xpath("//img[@alt='pipeline status']")
- page.within('.highlight', match: :first) do
+ page.within('.js-syntax-highlight', match: :first) do
expect(page).to have_content 'badges/master/pipeline.svg'
end
end
@@ -32,10 +32,10 @@ RSpec.describe 'list of badges' do
expect(page).to have_content 'Markdown'
expect(page).to have_content 'HTML'
expect(page).to have_content 'AsciiDoc'
- expect(page).to have_css('.highlight', count: 3)
+ expect(page).to have_css('.js-syntax-highlight', count: 3)
expect(page).to have_xpath("//img[@alt='coverage report']")
- page.within('.highlight', match: :first) do
+ page.within('.js-syntax-highlight', match: :first) do
expect(page).to have_content 'badges/master/coverage.svg'
end
end
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 5aca994f53e..c30c8dda852 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Editing file blob', :js do
include TreeHelper
+ include BlobSpecHelpers
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master') }
@@ -20,9 +21,18 @@ RSpec.describe 'Editing file blob', :js do
sign_in(user)
end
- def edit_and_commit(commit_changes: true)
+ def edit_and_commit(commit_changes: true, is_diff: false)
+ set_default_button('edit')
+ refresh
wait_for_requests
- find('.js-edit-blob').click
+
+ if is_diff
+ first('.js-diff-more-actions').click
+ click_link('Edit in single-file editor')
+ else
+ click_link('Edit')
+ end
+
fill_editor(content: 'class NextFeature\\nend\\n')
if commit_changes
@@ -38,7 +48,7 @@ RSpec.describe 'Editing file blob', :js do
context 'from MR diff' do
before do
visit diffs_project_merge_request_path(project, merge_request)
- edit_and_commit
+ edit_and_commit(is_diff: true)
end
it 'returns me to the mr' do
diff --git a/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb b/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
index a271a4f43a8..fda2992af8d 100644
--- a/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
+++ b/spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe 'User creates blob in new project', :js do
+RSpec.describe 'User creates new blob', :js do
+ include WebIdeSpecHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project, :empty_repo) }
@@ -12,16 +14,19 @@ RSpec.describe 'User creates blob in new project', :js do
visit project_path(project)
end
- it 'allows the user to add a new file' do
+ it 'allows the user to add a new file in Web IDE' do
click_link 'New file'
- execute_script("monaco.editor.getModels()[0].setValue('Hello world')")
+ wait_for_requests
+
+ ide_create_new_file('dummy-file', content: "Hello world\n")
- fill_in(:file_name, with: 'dummy-file')
+ ide_commit
- click_button('Commit changes')
+ click_button('Commit')
- expect(page).to have_content('The file has been successfully created')
+ expect(page).to have_content('All changes are committed')
+ expect(project.repository.blob_at('master', 'dummy-file').data).to eql("Hello world\n")
end
end
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
index 8b43687c71c..023e00a3e02 100644
--- a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled
describe 'viewing the new blob page' do
before do
- stub_feature_flags(suggest_pipeline: true)
+ stub_experiment_for_user(suggest_pipeline: true)
sign_in(user)
end
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index 21a1d31bad4..c480c41709c 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe "User deletes branch", :js do
fill_in("branch-search", with: "improve/awesome").native.send_keys(:enter)
page.within(".js-branch-improve\\/awesome") do
- accept_alert { find(".btn-remove").click }
+ accept_alert { find(".btn-danger").click }
end
wait_for_requests
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 0e2444c5434..dcad7ee66a3 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -21,11 +21,11 @@ RSpec.describe 'Branches' do
before do
# Add 4 stale branches
(1..4).reverse_each do |i|
- Timecop.freeze((threshold + i).ago) { create_file(message: "a commit in stale-#{i}", branch_name: "stale-#{i}") }
+ travel_to((threshold + i).ago) { create_file(message: "a commit in stale-#{i}", branch_name: "stale-#{i}") }
end
# Add 6 active branches
(1..6).each do |i|
- Timecop.freeze((threshold - i).ago) { create_file(message: "a commit in active-#{i}", branch_name: "active-#{i}") }
+ travel_to((threshold - i).ago) { create_file(message: "a commit in active-#{i}", branch_name: "active-#{i}") }
end
end
@@ -101,7 +101,7 @@ RSpec.describe 'Branches' do
visit project_branches_filtered_path(project, state: 'all')
expect(all('.all-branches').last).to have_selector('li', count: 20)
- accept_confirm { first('.js-branch-item .btn-remove').click }
+ accept_confirm { first('.js-branch-item .btn-danger').click }
expect(all('.all-branches').last).to have_selector('li', count: 19)
end
@@ -163,7 +163,7 @@ RSpec.describe 'Branches' do
expect(page).to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 1)
- accept_confirm { find('.js-branch-fix .btn-remove').click }
+ accept_confirm { find('.js-branch-fix .btn-danger').click }
expect(page).not_to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 0)
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index ce435151b84..eb2efb4357d 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -8,117 +8,88 @@ RSpec.describe 'CI Lint', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
- shared_examples 'correct ci linting process' do
- describe 'YAML parsing' do
- shared_examples 'validates the YAML' do
- before do
- stub_feature_flags(ci_lint_vue: false)
- click_on 'Validate'
- end
+ let(:content_selector) { '.content .view-lines' }
- context 'YAML is correct' do
- let(:yaml_content) do
- File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
- end
+ before do
+ stub_feature_flags(ci_lint_vue: false)
+ project.add_developer(user)
+ sign_in(user)
- it 'parses Yaml and displays the jobs' do
- expect(page).to have_content('Status: syntax is correct')
+ visit project_ci_lint_path(project)
+ editor_set_value(yaml_content)
- within "table" do
- aggregate_failures do
- expect(page).to have_content('Job - rspec')
- expect(page).to have_content('Job - spinach')
- expect(page).to have_content('Deploy Job - staging')
- expect(page).to have_content('Deploy Job - production')
- end
- end
- end
- end
+ wait_for('YAML content') do
+ find(content_selector).text.present?
+ end
+ end
- context 'YAML is incorrect' do
- let(:yaml_content) { 'value: cannot have :' }
+ describe 'YAML parsing' do
+ shared_examples 'validates the YAML' do
+ before do
+ stub_feature_flags(ci_lint_vue: false)
+ click_on 'Validate'
+ end
- it 'displays information about an error' do
- expect(page).to have_content('Status: syntax is incorrect')
- expect(page).to have_selector(content_selector, text: yaml_content)
- end
+ context 'YAML is correct' do
+ let(:yaml_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
end
- end
- it_behaves_like 'validates the YAML'
+ it 'parses Yaml and displays the jobs' do
+ expect(page).to have_content('Status: syntax is correct')
- context 'when Dry Run is checked' do
- before do
- check 'Simulate a pipeline created for the default branch'
+ within "table" do
+ aggregate_failures do
+ expect(page).to have_content('Job - rspec')
+ expect(page).to have_content('Job - spinach')
+ expect(page).to have_content('Deploy Job - staging')
+ expect(page).to have_content('Deploy Job - production')
+ end
+ end
end
-
- it_behaves_like 'validates the YAML'
end
- describe 'YAML revalidate' do
- let(:yaml_content) { 'my yaml content' }
+ context 'YAML is incorrect' do
+ let(:yaml_content) { 'value: cannot have :' }
- it 'loads previous YAML content after validation' do
- expect(page).to have_field('content', with: 'my yaml content', visible: false, type: 'textarea')
+ it 'displays information about an error' do
+ expect(page).to have_content('Status: syntax is incorrect')
+ expect(page).to have_selector(content_selector, text: yaml_content)
end
end
end
- describe 'YAML clearing' do
+ it_behaves_like 'validates the YAML'
+
+ context 'when Dry Run is checked' do
before do
- click_on 'Clear'
+ check 'Simulate a pipeline created for the default branch'
end
- context 'YAML is present' do
- let(:yaml_content) do
- File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
- end
-
- it 'YAML content is cleared' do
- expect(page).to have_field('content', with: '', visible: false, type: 'textarea')
- end
- end
+ it_behaves_like 'validates the YAML'
end
- end
- context 'with ACE editor' do
- it_behaves_like 'correct ci linting process' do
- let(:content_selector) { '.ace_content' }
+ describe 'YAML revalidate' do
+ let(:yaml_content) { 'my yaml content' }
- before do
- stub_feature_flags(monaco_ci: false)
- stub_feature_flags(ci_lint_vue: false)
- project.add_developer(user)
- sign_in(user)
-
- visit project_ci_lint_path(project)
- find('#ci-editor')
- execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});")
-
- # Ace editor updates a hidden textarea and it happens asynchronously
- wait_for('YAML content') do
- find(content_selector).text.present?
- end
+ it 'loads previous YAML content after validation' do
+ expect(page).to have_field('content', with: 'my yaml content', visible: false, type: 'textarea')
end
end
end
- context 'with Editor Lite' do
- it_behaves_like 'correct ci linting process' do
- let(:content_selector) { '.content .view-lines' }
-
- before do
- stub_feature_flags(monaco_ci: true)
- stub_feature_flags(ci_lint_vue: false)
- project.add_developer(user)
- sign_in(user)
+ describe 'YAML clearing' do
+ before do
+ click_on 'Clear'
+ end
- visit project_ci_lint_path(project)
- editor_set_value(yaml_content)
+ context 'YAML is present' do
+ let(:yaml_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ end
- wait_for('YAML content') do
- find(content_selector).text.present?
- end
+ it 'YAML content is cleared' do
+ expect(page).to have_field('content', with: '', visible: false, type: 'textarea')
end
end
end
diff --git a/spec/features/projects/clusters/eks_spec.rb b/spec/features/projects/clusters/eks_spec.rb
index c5feef6c6f3..9f3f331cfab 100644
--- a/spec/features/projects/clusters/eks_spec.rb
+++ b/spec/features/projects/clusters/eks_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'AWS EKS Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
end
context 'when user creates a cluster on AWS EKS' do
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 04339d20d77..a0519d88532 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Create new cluster'
click_link 'Google GKE'
end
@@ -143,7 +143,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Connect existing cluster'
end
@@ -162,7 +162,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
end
end
end
@@ -178,7 +178,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
end
it 'user sees offer on cluster create page' do
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
expect(page).to have_css('.gcp-signup-offer')
end
@@ -192,10 +192,10 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
it 'user does not see offer after dismissing' do
expect(page).to have_css('.gcp-signup-offer')
- find('.gcp-signup-offer .close').click
+ find('.gcp-signup-offer .js-close').click
wait_for_requests
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
expect(page).not_to have_css('.gcp-signup-offer')
end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 9d0dc65093e..748eba558aa 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Connect existing cluster'
end
@@ -52,6 +52,10 @@ RSpec.describe 'User Cluster', :js do
it 'user sees RBAC is enabled by default' do
expect(page).to have_checked_field('RBAC-enabled cluster')
end
+
+ it 'user sees namespace per environment is enabled by default' do
+ expect(page).to have_checked_field('Namespace per environment')
+ end
end
context 'when user filled form with invalid parameters' do
@@ -112,7 +116,7 @@ RSpec.describe 'User Cluster', :js do
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
end
end
end
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index d674fbc457e..6c6e65005f6 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe 'Clusters', :js do
before do
project.add_maintainer(user)
gitlab_sign_in(user)
- stub_feature_flags(clusters_list_redesign: false)
end
context 'when user does not have a cluster and visits cluster index page' do
@@ -20,7 +19,7 @@ RSpec.describe 'Clusters', :js do
end
it 'sees empty state' do
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
expect(page).to have_selector('.empty-state')
end
end
@@ -42,7 +41,7 @@ RSpec.describe 'Clusters', :js do
context 'when user filled form with environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Connect existing cluster'
fill_in 'cluster_name', with: 'staging-cluster'
fill_in 'cluster_environment_scope', with: 'staging/*'
@@ -71,7 +70,7 @@ RSpec.describe 'Clusters', :js do
context 'when user updates duplicated environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Connect existing cluster'
fill_in 'cluster_name', with: 'staging-cluster'
fill_in 'cluster_environment_scope', with: '*'
@@ -117,7 +116,7 @@ RSpec.describe 'Clusters', :js do
context 'when user filled form with environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Create new cluster'
click_link 'Google GKE'
@@ -162,7 +161,7 @@ RSpec.describe 'Clusters', :js do
context 'when user updates duplicated environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Create new cluster'
click_link 'Google GKE'
@@ -196,8 +195,7 @@ RSpec.describe 'Clusters', :js do
end
it 'user sees a table with one cluster' do
- # One is the header row, the other the cluster row
- expect(page).to have_selector('.gl-responsive-table-row', count: 2)
+ expect(page).to have_selector('[data-testid="cluster_list_table"] tbody tr', count: 1)
end
context 'when user clicks on a cluster' do
@@ -216,7 +214,7 @@ RSpec.describe 'Clusters', :js do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Create new cluster'
end
diff --git a/spec/features/projects/commit/builds_spec.rb b/spec/features/projects/commit/builds_spec.rb
index f97abc5bd8b..00ec9d49a10 100644
--- a/spec/features/projects/commit/builds_spec.rb
+++ b/spec/features/projects/commit/builds_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'project commit pipelines', :js do
context 'when no builds triggered yet' do
it 'shows the ID of the first pipeline' do
- page.within('.table-holder') do
+ page.within('.pipelines .ci-table') do
expect(page).to have_content project.ci_pipelines[0].id # pipeline ids
end
end
diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb
index 87a022d74a3..0fa4975bb25 100644
--- a/spec/features/projects/commit/user_comments_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_comments_on_commit_spec.rb
@@ -6,19 +6,22 @@ RSpec.describe "User comments on commit", :js do
include Spec::Support::Helpers::Features::NotesHelpers
include RepoHelpers
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:comment_text) { "XML attached" }
- before do
- sign_in(user)
+ before_all do
project.add_developer(user)
+ end
- visit(project_commit_path(project, sample_commit.id))
+ before do
+ sign_in(user)
end
context "when adding new comment" do
it "adds comment" do
+ visit(project_commit_path(project, sample_commit.id))
+
emoji_code = ":+1:"
page.within(".js-main-target-form") do
@@ -57,6 +60,8 @@ RSpec.describe "User comments on commit", :js do
context "when editing comment" do
before do
+ visit(project_commit_path(project, sample_commit.id))
+
add_note(comment_text)
end
@@ -87,6 +92,8 @@ RSpec.describe "User comments on commit", :js do
context "when deleting comment" do
before do
+ visit(project_commit_path(project, sample_commit.id))
+
add_note(comment_text)
end
@@ -108,4 +115,35 @@ RSpec.describe "User comments on commit", :js do
expect(page).not_to have_css(".note")
end
end
+
+ context 'when checking task lists' do
+ let(:note_with_task) do
+ <<-EOT.strip_heredoc
+
+ - [ ] Task 1
+ EOT
+ end
+
+ before do
+ create(:note_on_commit, project: project, commit_id: sample_commit.id, note: note_with_task, author: user)
+ create(:note_on_commit, project: project, commit_id: sample_commit.id, note: note_with_task, author: user)
+
+ visit(project_commit_path(project, sample_commit.id))
+ end
+
+ it 'allows the tasks to be checked' do
+ expect(page).to have_selector('li.task-list-item', count: 2)
+ expect(page).to have_selector('li.task-list-item input[checked]', count: 0)
+
+ all('.task-list-item-checkbox').each do |checkbox|
+ checkbox.click
+ end
+ wait_for_requests
+
+ visit(project_commit_path(project, sample_commit.id))
+
+ expect(page).to have_selector('li.task-list-item', count: 2)
+ expect(page).to have_selector('li.task-list-item input[checked]', count: 2)
+ end
+ end
end
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index 865ae3ad8cb..e387ea4d473 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe "Compare", :js do
click_button('Compare')
- page.within('.alert') do
+ page.within('.gl-alert') do
expect(page).to have_text("Too many changes to show. To preserve performance only 3 of 3+ files are displayed.")
end
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index fa10e429af2..1d7be7fa7a3 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -333,7 +333,7 @@ RSpec.describe 'Environment' do
visit project_branches_filtered_path(project, state: 'all', search: 'feature')
remove_branch_with_hooks(project, user, 'feature') do
- page.within('.js-branch-feature') { find('a.btn-remove').click }
+ page.within('.js-branch-feature') { find('a.btn-danger').click }
end
visit_environment(environment)
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 7f2ef61bcbe..8c032660726 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -372,7 +372,7 @@ RSpec.describe 'Environments page', :js do
let(:role) { :developer }
it 'developer creates a new environment with a valid name' do
- within(".top-area") { click_link 'New environment' }
+ within(".environments-section") { click_link 'New environment' }
fill_in('Name', with: 'production')
click_on 'Save'
@@ -380,7 +380,7 @@ RSpec.describe 'Environments page', :js do
end
it 'developer creates a new environmetn with invalid name' do
- within(".top-area") { click_link 'New environment' }
+ within(".environments-section") { click_link 'New environment' }
fill_in('Name', with: 'name,with,commas')
click_on 'Save'
diff --git a/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb b/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb
new file mode 100644
index 00000000000..2a81c706525
--- /dev/null
+++ b/spec/features/projects/feature_flag_user_lists/user_deletes_feature_flag_user_list_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User deletes feature flag user list', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+
+ before do
+ project.add_developer(developer)
+ sign_in(developer)
+ end
+
+ context 'with a list' do
+ before do
+ create(:operations_feature_flag_user_list, project: project, name: 'My List')
+ end
+
+ it 'deletes the list' do
+ visit(project_feature_flags_path(project, scope: 'userLists'))
+
+ delete_user_list_button.click
+ delete_user_list_modal_confirmation_button.click
+
+ expect(page).to have_text('Lists 0')
+ end
+ end
+
+ context 'with a list that is in use' do
+ before do
+ list = create(:operations_feature_flag_user_list, project: project, name: 'My List')
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project)
+ create(:operations_strategy, feature_flag: feature_flag, name: 'gitlabUserList', user_list: list)
+ end
+
+ it 'does not delete the list' do
+ visit(project_feature_flags_path(project, scope: 'userLists'))
+
+ delete_user_list_button.click
+ delete_user_list_modal_confirmation_button.click
+
+ expect(page).to have_text('User list is associated with a strategy')
+ expect(page).to have_text('Lists 1')
+ expect(page).to have_text('My List')
+
+ alert_dismiss_button.click
+
+ expect(page).not_to have_text('User list is associated with a strategy')
+ end
+ end
+
+ def delete_user_list_button
+ find("button[data-testid='delete-user-list']")
+ end
+
+ def delete_user_list_modal_confirmation_button
+ find("button[data-testid='modal-confirm']")
+ end
+
+ def alert_dismiss_button
+ find("div[data-testid='serverErrors'] button")
+ end
+end
diff --git a/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb b/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb
new file mode 100644
index 00000000000..b37c2780827
--- /dev/null
+++ b/spec/features/projects/feature_flag_user_lists/user_edits_feature_flag_user_list_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User edits feature flag user list', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+
+ before do
+ project.add_developer(developer)
+ sign_in(developer)
+ end
+
+ it 'prefills the edit form with the list name' do
+ list = create(:operations_feature_flag_user_list, project: project, name: 'My List Name')
+
+ visit(edit_project_feature_flags_user_list_path(project, list))
+
+ expect(page).to have_field 'Name', with: 'My List Name'
+ end
+end
diff --git a/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb b/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb
new file mode 100644
index 00000000000..dfebe6408bd
--- /dev/null
+++ b/spec/features/projects/feature_flag_user_lists/user_sees_feature_flag_user_list_details_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User sees feature flag user list details', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+
+ before do
+ project.add_developer(developer)
+ sign_in(developer)
+ end
+
+ it 'displays the list name' do
+ list = create(:operations_feature_flag_user_list, project: project, name: 'My List')
+
+ visit(project_feature_flags_user_list_path(project, list))
+
+ expect(page).to have_text('My List')
+ end
+end
diff --git a/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
new file mode 100644
index 00000000000..830dda737b0
--- /dev/null
+++ b/spec/features/projects/feature_flags/user_creates_feature_flag_spec.rb
@@ -0,0 +1,200 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User creates feature flag', :js do
+ include FeatureFlagHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+
+ before do
+ project.add_developer(user)
+ stub_feature_flags(feature_flag_permissions: false)
+ sign_in(user)
+ end
+
+ it 'user creates a flag enabled for user ids' do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('test_feature', 'Test feature')
+ within_strategy_row(1) do
+ select 'User IDs', from: 'Type'
+ fill_in 'User IDs', with: 'user1, user2'
+ environment_plus_button.click
+ environment_search_input.set('production')
+ environment_search_results.first.click
+ end
+ click_button 'Create feature flag'
+
+ expect_user_to_see_feature_flags_index_page
+ expect(page).to have_text('test_feature')
+ end
+
+ it 'user creates a flag with default environment scopes' do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('test_flag', 'Test flag')
+ within_strategy_row(1) do
+ select 'All users', from: 'Type'
+ end
+ click_button 'Create feature flag'
+
+ expect_user_to_see_feature_flags_index_page
+ expect(page).to have_text('test_flag')
+
+ edit_feature_flag_button.click
+
+ within_strategy_row(1) do
+ expect(page).to have_text('All users')
+ expect(page).to have_text('All environments')
+ end
+ end
+
+ it 'removes the correct strategy when a strategy is deleted' do
+ visit(new_project_feature_flag_path(project))
+ click_button 'Add strategy'
+ within_strategy_row(1) do
+ select 'All users', from: 'Type'
+ end
+ within_strategy_row(2) do
+ select 'Percent of users', from: 'Type'
+ end
+ within_strategy_row(1) do
+ delete_strategy_button.click
+ end
+
+ within_strategy_row(1) do
+ expect(page).to have_select('Type', selected: 'Percent of users')
+ end
+ end
+
+ context 'with new version flags disabled' do
+ before do
+ stub_feature_flags(feature_flags_new_version: false)
+ end
+
+ context 'when creates without changing scopes' do
+ before do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('ci_live_trace', 'For live trace')
+ click_button 'Create feature flag'
+ expect(page).to have_current_path(project_feature_flags_path(project))
+ end
+
+ it 'shows the created feature flag' do
+ within_feature_flag_row(1) do
+ expect(page.find('.feature-flag-name')).to have_content('ci_live_trace')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-info-badge"]:nth-child(1)')).to have_content('*')
+ end
+ end
+ end
+ end
+
+ context 'when creates with disabling the default scope' do
+ before do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('ci_live_trace', 'For live trace')
+
+ within_scope_row(1) do
+ within_status { find('.project-feature-toggle').click }
+ end
+
+ click_button 'Create feature flag'
+ end
+
+ it 'shows the created feature flag' do
+ within_feature_flag_row(1) do
+ expect(page.find('.feature-flag-name')).to have_content('ci_live_trace')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-muted-badge"]:nth-child(1)')).to have_content('*')
+ end
+ end
+ end
+ end
+
+ context 'when creates with an additional scope' do
+ before do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('mr_train', '')
+
+ within_scope_row(2) do
+ within_environment_spec do
+ find('.js-env-search > input').set("review/*")
+ find('.js-create-button').click
+ end
+ end
+
+ within_scope_row(2) do
+ within_status { find('.project-feature-toggle').click }
+ end
+
+ click_button 'Create feature flag'
+ end
+
+ it 'shows the created feature flag' do
+ within_feature_flag_row(1) do
+ expect(page.find('.feature-flag-name')).to have_content('mr_train')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-info-badge"]:nth-child(1)')).to have_content('*')
+ expect(page.find('[data-qa-selector="feature-flag-scope-info-badge"]:nth-child(2)')).to have_content('review/*')
+ end
+ end
+ end
+ end
+
+ context 'when searches an environment name for scope creation' do
+ let!(:environment) { create(:environment, name: 'production', project: project) }
+
+ before do
+ visit(new_project_feature_flag_path(project))
+ set_feature_flag_info('mr_train', '')
+
+ within_scope_row(2) do
+ within_environment_spec do
+ find('.js-env-search > input').set('prod')
+ click_button 'production'
+ end
+ end
+
+ click_button 'Create feature flag'
+ end
+
+ it 'shows the created feature flag' do
+ within_feature_flag_row(1) do
+ expect(page.find('.feature-flag-name')).to have_content('mr_train')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-info-badge"]:nth-child(1)')).to have_content('*')
+ expect(page.find('[data-qa-selector="feature-flag-scope-muted-badge"]:nth-child(2)')).to have_content('production')
+ end
+ end
+ end
+ end
+ end
+
+ private
+
+ def set_feature_flag_info(name, description)
+ fill_in 'Name', with: name
+ fill_in 'Description', with: description
+ end
+
+ def environment_plus_button
+ find('.js-new-environments-dropdown')
+ end
+
+ def environment_search_input
+ find('.js-new-environments-dropdown input')
+ end
+
+ def environment_search_results
+ all('.js-new-environments-dropdown button.dropdown-item')
+ end
+end
diff --git a/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
new file mode 100644
index 00000000000..581709aacee
--- /dev/null
+++ b/spec/features/projects/feature_flags/user_deletes_feature_flag_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User deletes feature flag', :js do
+ include FeatureFlagHelpers
+
+ let(:user) { create(:user) }
+ let(:project) { create(:project, namespace: user.namespace) }
+
+ let!(:feature_flag) do
+ create_flag(project, 'ci_live_trace', false,
+ description: 'For live trace feature')
+ end
+
+ before do
+ project.add_developer(user)
+ stub_feature_flags(feature_flag_permissions: false)
+ sign_in(user)
+
+ visit(project_feature_flags_path(project))
+
+ find('.js-feature-flag-delete-button').click
+ click_button('Delete feature flag')
+ expect(page).to have_current_path(project_feature_flags_path(project))
+ end
+
+ it 'user does not see feature flag' do
+ expect(page).to have_no_content('ci_live_trace')
+ end
+end
diff --git a/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
new file mode 100644
index 00000000000..750f4dc5ef4
--- /dev/null
+++ b/spec/features/projects/feature_flags/user_sees_feature_flag_list_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User sees feature flag list', :js do
+ include FeatureFlagHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ context 'with legacy feature flags' do
+ before do
+ create_flag(project, 'ci_live_trace', false).tap do |feature_flag|
+ create_scope(feature_flag, 'review/*', true)
+ end
+ create_flag(project, 'drop_legacy_artifacts', false)
+ create_flag(project, 'mr_train', true).tap do |feature_flag|
+ create_scope(feature_flag, 'production', false)
+ end
+ stub_feature_flags(feature_flags_legacy_read_only_override: false)
+ end
+
+ it 'user sees the first flag' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(1) do
+ expect(page.find('.js-feature-flag-id')).to have_content('^1')
+ expect(page.find('.feature-flag-name')).to have_content('ci_live_trace')
+ expect_status_toggle_button_not_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-muted-badge"]:nth-child(1)')).to have_content('*')
+ expect(page.find('[data-qa-selector="feature-flag-scope-info-badge"]:nth-child(2)')).to have_content('review/*')
+ end
+ end
+ end
+
+ it 'user sees the second flag' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(2) do
+ expect(page.find('.js-feature-flag-id')).to have_content('^2')
+ expect(page.find('.feature-flag-name')).to have_content('drop_legacy_artifacts')
+ expect_status_toggle_button_not_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-muted-badge"]:nth-child(1)')).to have_content('*')
+ end
+ end
+ end
+
+ it 'user sees the third flag' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(3) do
+ expect(page.find('.js-feature-flag-id')).to have_content('^3')
+ expect(page.find('.feature-flag-name')).to have_content('mr_train')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('[data-qa-selector="feature-flag-scope-info-badge"]:nth-child(1)')).to have_content('*')
+ expect(page.find('[data-qa-selector="feature-flag-scope-muted-badge"]:nth-child(2)')).to have_content('production')
+ end
+ end
+ end
+
+ it 'user sees the status toggle disabled' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(1) do
+ expect_status_toggle_button_to_be_disabled
+ end
+ end
+
+ context 'when legacy feature flags are not read-only' do
+ before do
+ stub_feature_flags(feature_flags_legacy_read_only: false)
+ end
+
+ it 'user updates the status toggle' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(1) do
+ status_toggle_button.click
+
+ expect_status_toggle_button_to_be_checked
+ end
+ end
+ end
+
+ context 'when legacy feature flags are read-only but the override is active for a project' do
+ before do
+ stub_feature_flags(
+ feature_flags_legacy_read_only: true,
+ feature_flags_legacy_read_only_override: project
+ )
+ end
+
+ it 'user updates the status toggle' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(1) do
+ status_toggle_button.click
+
+ expect_status_toggle_button_to_be_checked
+ end
+ end
+ end
+ end
+
+ context 'with new version flags' do
+ before do
+ create(:operations_feature_flag, :new_version_flag, project: project,
+ name: 'my_flag', active: false)
+ end
+
+ it 'user updates the status toggle' do
+ visit(project_feature_flags_path(project))
+
+ within_feature_flag_row(1) do
+ status_toggle_button.click
+
+ expect_status_toggle_button_to_be_checked
+ end
+ end
+ end
+
+ context 'when there are no feature flags' do
+ before do
+ visit(project_feature_flags_path(project))
+ end
+
+ it 'shows empty page' do
+ expect(page).to have_text 'Get started with feature flags'
+ expect(page).to have_selector('.btn-success', text: 'New feature flag')
+ expect(page).to have_selector('[data-qa-selector="configure_feature_flags_button"]', text: 'Configure')
+ end
+ end
+end
diff --git a/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb b/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
new file mode 100644
index 00000000000..bc2d63e1953
--- /dev/null
+++ b/spec/features/projects/feature_flags/user_updates_feature_flag_spec.rb
@@ -0,0 +1,195 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User updates feature flag', :js do
+ include FeatureFlagHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ before do
+ stub_feature_flags(
+ feature_flag_permissions: false,
+ feature_flags_legacy_read_only_override: false
+ )
+ sign_in(user)
+ end
+
+ context 'with a new version feature flag' do
+ let!(:feature_flag) do
+ create_flag(project, 'test_flag', false, version: Operations::FeatureFlag.versions['new_version_flag'],
+ description: 'For testing')
+ end
+
+ let!(:strategy) do
+ create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ end
+
+ let!(:scope) do
+ create(:operations_scope, strategy: strategy, environment_scope: '*')
+ end
+
+ it 'user adds a second strategy' do
+ visit(edit_project_feature_flag_path(project, feature_flag))
+
+ wait_for_requests
+
+ click_button 'Add strategy'
+ within_strategy_row(2) do
+ select 'Percent of users', from: 'Type'
+ fill_in 'Percentage', with: '15'
+ end
+ click_button 'Save changes'
+
+ edit_feature_flag_button.click
+
+ within_strategy_row(1) do
+ expect(page).to have_text 'All users'
+ expect(page).to have_text 'All environments'
+ end
+ within_strategy_row(2) do
+ expect(page).to have_text 'Percent of users'
+ expect(page).to have_field 'Percentage', with: '15'
+ expect(page).to have_text 'All environments'
+ end
+ end
+
+ it 'user toggles the flag on' do
+ visit(edit_project_feature_flag_path(project, feature_flag))
+ status_toggle_button.click
+ click_button 'Save changes'
+
+ within_feature_flag_row(1) do
+ expect_status_toggle_button_to_be_checked
+ end
+ end
+ end
+
+ context 'with a legacy feature flag' do
+ let!(:feature_flag) do
+ create_flag(project, 'ci_live_trace', true,
+ description: 'For live trace feature')
+ end
+
+ let!(:scope) { create_scope(feature_flag, 'review/*', true) }
+
+ context 'when legacy flags are editable' do
+ before do
+ stub_feature_flags(feature_flags_legacy_read_only: false)
+
+ visit(edit_project_feature_flag_path(project, feature_flag))
+ end
+
+ it 'user sees persisted default scope' do
+ within_scope_row(1) do
+ within_environment_spec do
+ expect(page).to have_content('* (All Environments)')
+ end
+
+ within_status do
+ expect(find('.project-feature-toggle')['aria-label'])
+ .to eq('Toggle Status: ON')
+ end
+ end
+ end
+
+ context 'when user updates the status of a scope' do
+ before do
+ within_scope_row(2) do
+ within_status { find('.project-feature-toggle').click }
+ end
+
+ click_button 'Save changes'
+ expect(page).to have_current_path(project_feature_flags_path(project))
+ end
+
+ it 'shows the updated feature flag' do
+ within_feature_flag_row(1) do
+ expect(page.find('.feature-flag-name')).to have_content('ci_live_trace')
+ expect_status_toggle_button_to_be_checked
+
+ within_feature_flag_scopes do
+ expect(page.find('.badge:nth-child(1)')).to have_content('*')
+ expect(page.find('.badge:nth-child(1)')['class']).to include('badge-info')
+ expect(page.find('.badge:nth-child(2)')).to have_content('review/*')
+ expect(page.find('.badge:nth-child(2)')['class']).to include('badge-muted')
+ end
+ end
+ end
+ end
+
+ context 'when user adds a new scope' do
+ before do
+ within_scope_row(3) do
+ within_environment_spec do
+ find('.js-env-search > input').set('production')
+ find('.js-create-button').click
+ end
+ end
+
+ click_button 'Save changes'
+ expect(page).to have_current_path(project_feature_flags_path(project))
+ end
+
+ it 'shows the newly created scope' do
+ within_feature_flag_row(1) do
+ within_feature_flag_scopes do
+ expect(page.find('.badge:nth-child(3)')).to have_content('production')
+ expect(page.find('.badge:nth-child(3)')['class']).to include('badge-muted')
+ end
+ end
+ end
+ end
+
+ context 'when user deletes a scope' do
+ before do
+ within_scope_row(2) do
+ within_delete { find('.js-delete-scope').click }
+ end
+
+ click_button 'Save changes'
+ expect(page).to have_current_path(project_feature_flags_path(project))
+ end
+
+ it 'shows the updated feature flag' do
+ within_feature_flag_row(1) do
+ within_feature_flag_scopes do
+ expect(page).to have_css('.badge:nth-child(1)')
+ expect(page).not_to have_css('.badge:nth-child(2)')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when legacy flags are read-only' do
+ it 'the user cannot edit the flag' do
+ visit(edit_project_feature_flag_path(project, feature_flag))
+
+ expect(page).to have_text 'This feature flag is read-only, and it will be removed in 14.0.'
+ expect(page).to have_css('button.js-ff-submit.disabled')
+ end
+ end
+
+ context 'when legacy flags are read-only, but the override is active for one project' do
+ it 'the user can edit the flag' do
+ stub_feature_flags(feature_flags_legacy_read_only_override: project)
+
+ visit(edit_project_feature_flag_path(project, feature_flag))
+ status_toggle_button.click
+ click_button 'Save changes'
+
+ expect(page).to have_current_path(project_feature_flags_path(project))
+ within_feature_flag_row(1) do
+ expect_status_toggle_button_not_to_be_checked
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index 8d3ca9d9fd1..467adb25a17 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -201,7 +201,7 @@ RSpec.describe 'Edit Project Settings' do
visit project_path(project)
- expect(page).to have_content "Customize your workflow!"
+ expect(page).to have_content "joined project"
end
it "hides project activity tabs" do
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index eed1e7aaf1b..d28e31c08dc 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > Project owner sees a link to create a license file in empty project', :js do
+ include WebIdeSpecHelpers
+
let(:project) { create(:project_empty_repo) }
let(:project_maintainer) { project.owner }
@@ -10,36 +12,35 @@ RSpec.describe 'Projects > Files > Project owner sees a link to create a license
sign_in(project_maintainer)
end
- it 'project maintainer creates a license file from a template' do
+ it 'allows project maintainer creates a license file from a template in Web IDE' do
visit project_path(project)
click_on 'Add LICENSE'
- expect(page).to have_content('New file')
- expect(current_path).to eq(
- project_new_blob_path(project, 'master'))
- expect(find('#file_name').value).to eq('LICENSE')
- expect(page).to have_selector('.license-selector')
+ expect(current_path).to eq("/-/ide/project/#{project.full_path}/edit/master/-/LICENSE")
+
+ expect(page).to have_selector('.qa-file-templates-bar')
select_template('MIT License')
- file_content = first('.file-editor')
- expect(file_content).to have_content('MIT License')
- expect(file_content).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ expect(ide_editor_value).to have_content('MIT License')
+ expect(ide_editor_value).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+
+ ide_commit
+
+ click_button('Commit')
+
+ expect(current_path).to eq("/-/ide/project/#{project.full_path}/tree/master/-/")
- fill_in :commit_message, with: 'Add a LICENSE file', visible: true
- click_button 'Commit changes'
+ expect(page).to have_content('All changes are committed')
- expect(current_path).to eq(
- project_blob_path(project, 'master/LICENSE'))
- expect(page).to have_content('MIT License')
- expect(page).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
+ license_file = project.repository.blob_at('master', 'LICENSE').data
+ expect(license_file).to have_content('MIT License')
+ expect(license_file).to have_content("Copyright (c) #{Time.now.year} #{project.namespace.human_name}")
end
def select_template(template)
- page.within('.js-license-selector-wrap') do
- click_button 'Apply a template'
- click_link template
- wait_for_requests
- end
+ click_button 'Choose a template...'
+ click_button template
+ wait_for_requests
end
end
diff --git a/spec/features/projects/files/user_browses_lfs_files_spec.rb b/spec/features/projects/files/user_browses_lfs_files_spec.rb
index ecc56b794b2..3be5ab64834 100644
--- a/spec/features/projects/files/user_browses_lfs_files_spec.rb
+++ b/spec/features/projects/files/user_browses_lfs_files_spec.rb
@@ -66,10 +66,30 @@ RSpec.describe 'Projects > Files > User browses LFS files' do
expect(page).to have_content('History')
expect(page).to have_content('Permalink')
expect(page).to have_content('Replace')
+ expect(page).to have_link('Download')
+
expect(page).not_to have_content('Annotate')
expect(page).not_to have_content('Blame')
- expect(page).not_to have_content('Edit')
- expect(page).to have_link('Download')
+
+ expect(page).not_to have_selector(:link_or_button, text: /^Edit$/)
+ expect(page).to have_selector(:link_or_button, 'Edit in Web IDE')
+ end
+ end
+
+ context 'when feature flag :consolidated_edit_button is off' do
+ before do
+ stub_feature_flags(consolidated_edit_button: false)
+
+ click_link('files')
+ click_link('lfs')
+ click_link('lfs_object.iso')
+ end
+
+ it 'does not show single file edit link' do
+ page.within('.content') do
+ expect(page).to have_selector(:link_or_button, 'Web IDE')
+ expect(page).not_to have_selector(:link_or_button, 'Edit')
+ end
end
end
end
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index 39bc139656b..fd83547d064 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User creates files', :js do
+ include BlobSpecHelpers
+
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
@@ -103,6 +105,8 @@ RSpec.describe 'Projects > Files > User creates files', :js do
end
it 'creates and commit a new file with new lines at the end of file' do
+ set_default_button('edit')
+
find('#editor')
execute_script('monaco.editor.getModels()[0].setValue("Sample\n\n\n")')
fill_in(:file_name, with: 'not_a_file.md')
@@ -113,7 +117,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
expect(current_path).to eq(new_file_path)
- find('.js-edit-blob').click
+ click_link('Edit')
find('#editor')
expect(evaluate_script('monaco.editor.getModels()[0].getValue()')).to eq("Sample\n\n\n")
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index d3e075001c8..c18ff9ddbbc 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
RSpec.describe 'Projects > Files > User edits files', :js do
include ProjectForksHelper
+ include BlobSpecHelpers
+
let(:project) { create(:project, :repository, name: 'Shop') }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
@@ -14,6 +16,10 @@ RSpec.describe 'Projects > Files > User edits files', :js do
sign_in(user)
end
+ after do
+ unset_default_button
+ end
+
shared_examples 'unavailable for an archived project' do
it 'does not show the edit link for an archived project', :js do
project.update!(archived: true)
@@ -39,14 +45,15 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'inserts a content of a file' do
+ set_default_button('edit')
click_link('.gitignore')
- find('.js-edit-blob').click
+ click_link_or_button('Edit')
find('.file-editor', match: :first)
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
- expect(evaluate_script('monaco.editor.getModels()[0].getValue()')).to eq('*.rbca')
+ expect(editor_value).to eq('*.rbca')
end
it 'does not show the edit link if a file is binary' do
@@ -60,12 +67,13 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'commits an edited file' do
+ set_default_button('edit')
click_link('.gitignore')
- find('.js-edit-blob').click
+ click_link_or_button('Edit')
find('.file-editor', match: :first)
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -77,13 +85,14 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'commits an edited file to a new branch' do
+ set_default_button('edit')
click_link('.gitignore')
- find('.js-edit-blob').click
+ click_link_or_button('Edit')
find('.file-editor', match: :first)
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
fill_in(:branch_name, with: 'new_branch_name', visible: true)
click_button('Commit changes')
@@ -96,12 +105,13 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'shows the diff of an edited file' do
+ set_default_button('edit')
click_link('.gitignore')
- find('.js-edit-blob').click
+ click_link_or_button('Edit')
find('.file-editor', match: :first)
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
click_link('Preview changes')
expect(page).to have_css('.line_holder.new')
@@ -118,8 +128,8 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
def expect_fork_prompt
- expect(page).to have_link('Fork')
- expect(page).to have_button('Cancel')
+ expect(page).to have_selector(:link_or_button, 'Fork')
+ expect(page).to have_selector(:link_or_button, 'Cancel')
expect(page).to have_content(
"You're not allowed to edit files in this project directly. "\
"Please fork this project, make your changes there, and submit a merge request."
@@ -134,30 +144,32 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
click_link('.gitignore')
- click_button('Edit')
+ click_link_or_button('Edit')
expect_fork_prompt
- click_link('Fork')
+ click_link_or_button('Fork project')
expect_fork_status
find('.file-editor', match: :first)
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
- expect(evaluate_script('monaco.editor.getModels()[0].getValue()')).to eq('*.rbca')
+ expect(editor_value).to eq('*.rbca')
end
it 'opens the Web IDE in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('webide')
click_link('.gitignore')
- click_button('Web IDE')
+ click_link_or_button('Web IDE')
expect_fork_prompt
- click_link('Fork')
+ click_link_or_button('Fork project')
expect_fork_status
@@ -166,17 +178,17 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
click_link('.gitignore')
- find('.js-edit-blob').click
+ click_link_or_button('Edit')
expect_fork_prompt
-
- click_link('Fork')
+ click_link_or_button('Fork project')
find('.file-editor', match: :first)
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -198,14 +210,14 @@ RSpec.describe 'Projects > Files > User edits files', :js do
end
it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
click_link('.gitignore')
- find('.js-edit-blob').click
+ click_link_or_button('Edit')
- expect(page).not_to have_link('Fork')
- expect(page).not_to have_button('Cancel')
+ expect(page).not_to have_link('Fork project')
find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ set_editor_value('*.rbca')
fill_in(:commit_message, with: 'Another commit', visible: true)
click_button('Commit changes')
@@ -224,5 +236,116 @@ RSpec.describe 'Projects > Files > User edits files', :js do
let(:project) { project2 }
end
end
+
+ context 'when feature flag :consolidated_edit_button is off' do
+ before do
+ stub_feature_flags(consolidated_edit_button: false)
+ end
+
+ context 'when an user does not have write access', :js do
+ before do
+ project2.add_reporter(user)
+ visit(project2_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'inserts a content of a file in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ expect_fork_prompt
+
+ click_link_or_button('Fork')
+
+ expect_fork_status
+
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+
+ expect(editor_value).to eq('*.rbca')
+ end
+
+ it 'opens the Web IDE in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('webide')
+ click_link('.gitignore')
+ click_link_or_button('Web IDE')
+
+ expect_fork_prompt
+
+ click_link_or_button('Fork')
+
+ expect_fork_status
+
+ expect(page).to have_css('.ide-sidebar-project-title', text: "#{project2.name} #{user.namespace.full_path}/#{project2.path}")
+ expect(page).to have_css('.ide .multi-file-tab', text: '.gitignore')
+ end
+
+ it 'commits an edited file in a forked project', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ expect_fork_prompt
+
+ click_link_or_button('Fork')
+
+ expect_fork_status
+
+ find('.file-editor', match: :first)
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ fill_in(:commit_message, with: 'New commit message', visible: true)
+ click_button('Commit changes')
+
+ fork = user.fork_of(project2.reload)
+
+ expect(current_path).to eq(project_new_merge_request_path(fork))
+
+ wait_for_requests
+
+ expect(page).to have_content('New commit message')
+ end
+
+ context 'when the user already had a fork of the project', :js do
+ let!(:forked_project) { fork_project(project2, user, namespace: user.namespace, repository: true) }
+
+ before do
+ visit(project2_tree_path_root_ref)
+ wait_for_requests
+ end
+
+ it 'links to the forked project for editing', :sidekiq_might_not_need_inline do
+ set_default_button('edit')
+ click_link('.gitignore')
+ click_link_or_button('Edit')
+
+ expect(page).not_to have_link('Fork')
+
+ find('#editor')
+ set_editor_value('*.rbca')
+ fill_in(:commit_message, with: 'Another commit', visible: true)
+ click_button('Commit changes')
+
+ fork = user.fork_of(project2)
+
+ expect(current_path).to eq(project_new_merge_request_path(fork))
+
+ wait_for_requests
+
+ expect(page).to have_content('Another commit')
+ expect(page).to have_content("From #{forked_project.full_path}")
+ expect(page).to have_content("into #{project2.full_path}")
+ end
+
+ it_behaves_like 'unavailable for an archived project' do
+ let(:project) { project2 }
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb b/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
index 8d5e99d7e2b..78fb470d4ea 100644
--- a/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
+++ b/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
@@ -90,34 +90,5 @@ RSpec.describe 'viewing issues with design references' do
expect(page).not_to have_link(design_ref_b)
end
end
-
- context 'design management is enabled, but the filter is disabled globally' do
- before do
- enable_design_management
- stub_feature_flags(
- Banzai::Filter::DesignReferenceFilter::FEATURE_FLAG => false
- )
- end
-
- it 'processes design tab links successfully, and design references as issue references', :aggregate_failures do
- visit_page_with_design_references
-
- expect(page).to have_text('The designs I mentioned')
- expect(page).to have_link(design_tab_ref)
- expect(page).to have_link(issue_ref)
- expect(page).not_to have_link(design_ref_a)
- expect(page).not_to have_link(design_ref_b)
- end
- end
-
- context 'design management is enabled, and the filter is enabled for the current project' do
- before do
- stub_feature_flags(
- Banzai::Filter::DesignReferenceFilter::FEATURE_FLAG => public_project
- )
- end
-
- it_behaves_like 'successful use of design link references'
- end
end
end
diff --git a/spec/features/projects/issues/viewing_relocated_issues_spec.rb b/spec/features/projects/issues/viewing_relocated_issues_spec.rb
new file mode 100644
index 00000000000..10d5ad1747c
--- /dev/null
+++ b/spec/features/projects/issues/viewing_relocated_issues_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'issues canonical link' do
+ include Spec::Support::Helpers::Features::CanonicalLinkHelpers
+
+ let_it_be(:original_project) { create(:project, :public) }
+ let_it_be(:original_issue) { create(:issue, project: original_project) }
+ let_it_be(:canonical_issue) { create(:issue) }
+ let_it_be(:canonical_url) { issue_url(canonical_issue, Gitlab::Application.routes.default_url_options) }
+
+ it "doesn't show the canonical URL" do
+ visit(issue_path(original_issue))
+
+ expect(page).not_to have_any_canonical_links
+ end
+
+ context 'when the issue was moved' do
+ it 'shows the canonical URL' do
+ original_issue.moved_to = canonical_issue
+ original_issue.save!
+
+ visit(issue_path(original_issue))
+
+ expect(page).to have_canonical_link(canonical_url)
+ end
+ end
+
+ context 'when the issue was duplicated' do
+ it 'shows the canonical URL' do
+ original_issue.duplicated_to = canonical_issue
+ original_issue.save!
+
+ visit(issue_path(original_issue))
+
+ expect(page).to have_canonical_link(canonical_url)
+ end
+ end
+end
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index b935b99642b..9b199157d79 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'User browses a job', :js do
wait_for_all_requests
within('.builds-container') do
expect(page).to have_selector(
- ".build-job > a[data-original-title='test - failed - (unknown failure)']")
+ ".build-job > a[title='test - failed - (unknown failure)']")
end
end
end
@@ -55,7 +55,7 @@ RSpec.describe 'User browses a job', :js do
wait_for_all_requests
within('.builds-container') do
expect(page).to have_selector(
- ".build-job > a[data-original-title='test - failed - (unknown failure) (retried)']")
+ ".build-job > a[title='test - failed - (unknown failure) (retried)']")
end
end
end
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index 2ee6bc103e9..d59f8eb4b1d 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -3,20 +3,23 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > Groups with access list', :js do
- let(:user) { create(:user) }
- let(:group) { create(:group, :public) }
- let(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public) }
+
+ let(:additional_link_attrs) { {} }
+ let!(:group_link) { create(:project_group_link, project: project, group: group, **additional_link_attrs) }
before do
- project.add_maintainer(user)
- @group_link = create(:project_group_link, project: project, group: group)
+ travel_to Time.now.utc.beginning_of_day
+ project.add_maintainer(user)
sign_in(user)
visit project_project_members_path(project)
end
it 'updates group access level' do
- click_button @group_link.human_access
+ click_button group_link.human_access
page.within '.dropdown-menu' do
click_link 'Guest'
@@ -30,20 +33,38 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
end
it 'updates expiry date' do
- tomorrow = Date.today + 3
+ expires_at_field = "member_expires_at_#{group.id}"
+ fill_in expires_at_field, with: 3.days.from_now.to_date
- fill_in "member_expires_at_#{group.id}", with: tomorrow.strftime("%F")
- find('body').click
+ find_field(expires_at_field).native.send_keys :enter
wait_for_requests
page.within(find('li.group_member')) do
- expect(page).to have_content('Expires in')
+ expect(page).to have_content('Expires in 3 days')
+ end
+ end
+
+ context 'when link has expiry date set' do
+ let(:additional_link_attrs) { { expires_at: 3.days.from_now.to_date } }
+
+ it 'clears expiry date' do
+ page.within(find('li.group_member')) do
+ expect(page).to have_content('Expires in 3 days')
+
+ page.within(find('.js-edit-member-form')) do
+ find('.js-clear-input').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Expires in')
+ end
end
end
it 'deletes group link' do
page.within(first('.group_member')) do
- accept_confirm { find('.btn-remove').click }
+ accept_confirm { find('.btn-danger').click }
end
wait_for_requests
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index b32ccb0ccef..36ff461aac2 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe 'Project members list' do
visit_members_page
expect(page).not_to have_selector("#edit_project_member_#{project_member.id}")
- expect(page).not_to have_selector("#project_member_#{project_member.id} .btn-remove")
+ expect(page).to have_no_selector("#project_member_#{project_member.id} .btn-danger")
end
end
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index 979bbd57aa3..d69c3f2652c 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -6,43 +6,64 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
include Select2Helper
include ActiveSupport::Testing::TimeHelpers
- let(:maintainer) { create(:user) }
- let(:project) { create(:project) }
- let!(:new_member) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let(:new_member) { create(:user) }
before do
+ travel_to Time.now.utc.beginning_of_day
+
project.add_maintainer(maintainer)
sign_in(maintainer)
end
it 'expiration date is displayed in the members list' do
- travel_to Time.zone.parse('2016-08-06 08:00') do
- date = 4.days.from_now
- visit project_project_members_path(project)
-
- page.within '.invite-users-form' do
- select2(new_member.id, from: '#user_ids', multiple: true)
- fill_in 'expires_at', with: date.to_s(:medium) + "\n"
- click_on 'Invite'
- end
-
- page.within "#project_member_#{new_member.project_members.first.id}" do
- expect(page).to have_content('Expires in 4 days')
- end
+ visit project_project_members_path(project)
+
+ page.within '.invite-users-form' do
+ select2(new_member.id, from: '#user_ids', multiple: true)
+
+ fill_in 'expires_at', with: 3.days.from_now.to_date
+ find_field('expires_at').native.send_keys :enter
+
+ click_on 'Invite'
+ end
+
+ page.within "#project_member_#{project_member_id}" do
+ expect(page).to have_content('Expires in 3 days')
+ end
+ end
+
+ it 'changes expiration date' do
+ project.team.add_users([new_member.id], :developer, expires_at: Date.today.to_date)
+ visit project_project_members_path(project)
+
+ page.within "#project_member_#{project_member_id}" do
+ fill_in 'Expiration date', with: 3.days.from_now.to_date
+ find_field('Expiration date').native.send_keys :enter
+
+ wait_for_requests
+
+ expect(page).to have_content('Expires in 3 days')
end
end
- it 'change expiration date' do
- travel_to Time.zone.parse('2016-08-06 08:00') do
- date = 3.days.from_now
- project.team.add_users([new_member.id], :developer, expires_at: Date.today.to_s(:medium))
- visit project_project_members_path(project)
-
- page.within "#project_member_#{new_member.project_members.first.id}" do
- find('.js-access-expiration-date').set date.to_s(:medium) + "\n"
- wait_for_requests
- expect(page).to have_content('Expires in 3 days')
- end
+ it 'clears expiration date' do
+ project.team.add_users([new_member.id], :developer, expires_at: 3.days.from_now.to_date)
+ visit project_project_members_path(project)
+
+ page.within "#project_member_#{project_member_id}" do
+ expect(page).to have_content('Expires in 3 days')
+
+ find('.js-clear-input').click
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Expires in')
end
end
+
+ def project_member_id
+ project.members.find_by(user_id: new_member).id
+ end
end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 07f65fe62df..4ff3827b240 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -12,20 +12,10 @@ RSpec.describe 'Project navbar' do
let_it_be(:project) { create(:project, :repository) }
before do
- stub_feature_flags(project_iterations: false)
-
insert_package_nav(_('Operations'))
project.add_maintainer(user)
sign_in(user)
-
- if Gitlab.ee?
- insert_after_sub_nav_item(
- _('Kubernetes'),
- within: _('Operations'),
- new_sub_nav_item_name: _('Feature Flags')
- )
- end
end
it_behaves_like 'verified navigation bar' do
diff --git a/spec/features/projects/pages_spec.rb b/spec/features/projects/pages_spec.rb
index 243579ee2f7..c3eea0195a6 100644
--- a/spec/features/projects/pages_spec.rb
+++ b/spec/features/projects/pages_spec.rb
@@ -336,7 +336,7 @@ RSpec.shared_examples 'pages settings editing' do
expect(page).not_to have_field(:project_pages_https_only)
expect(page).not_to have_content('Force HTTPS (requires valid certificates)')
- expect(page).not_to have_button('Save')
+ expect(page).to have_button('Save')
end
end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index f59dc5dd074..51826d867cd 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -172,10 +172,17 @@ RSpec.describe 'Pipeline', :js do
end
end
- it_behaves_like 'showing user status' do
- let(:user_with_status) { pipeline.user }
+ describe 'pipelines details view' do
+ let!(:status) { create(:user_status, user: pipeline.user, emoji: 'smirk', message: 'Authoring this object') }
- subject { visit project_pipeline_path(project, pipeline) }
+ it 'pipeline header shows the user status and emoji' do
+ visit project_pipeline_path(project, pipeline)
+
+ within '[data-testid="ci-header-content"]' do
+ expect(page).to have_selector("[data-testid='#{status.message}']")
+ expect(page).to have_selector("[data-name='#{status.emoji}']")
+ end
+ end
end
describe 'pipeline graph' do
@@ -400,7 +407,7 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
- find('[data-testid="retryButton"]').click
+ find('[data-testid="retryPipeline"]').click
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
@@ -902,7 +909,7 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
- find('[data-testid="retryButton"]').click
+ find('[data-testid="retryPipeline"]').click
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index a9c196bb84b..3e78dfc3bc7 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe 'Pipelines', :js do
context 'when canceling' do
before do
find('.js-pipelines-cancel-button').click
- find('.js-modal-primary-action').click
+ click_button 'Stop pipeline'
wait_for_requests
end
@@ -407,7 +407,7 @@ RSpec.describe 'Pipelines', :js do
context 'when canceling' do
before do
find('.js-pipelines-cancel-button').click
- find('.js-modal-primary-action').click
+ click_button 'Stop pipeline'
end
it 'indicates that pipeline was canceled', :sidekiq_might_not_need_inline do
diff --git a/spec/features/projects/releases/user_creates_release_spec.rb b/spec/features/projects/releases/user_creates_release_spec.rb
index 5d05a7e4c91..0a5f7cc7edd 100644
--- a/spec/features/projects/releases/user_creates_release_spec.rb
+++ b/spec/features/projects/releases/user_creates_release_spec.rb
@@ -11,14 +11,11 @@ RSpec.describe 'User creates release', :js do
let_it_be(:user) { create(:user) }
let(:new_page_url) { new_project_release_path(project) }
- let(:show_feature_flag) { true }
before do
- stub_feature_flags(release_show_page: show_feature_flag)
-
project.add_developer(user)
- gitlab_sign_in(user)
+ sign_in(user)
visit new_page_url
@@ -75,14 +72,6 @@ RSpec.describe 'User creates release', :js do
expect(page).to have_current_path(project_release_path(project, release))
end
-
- context 'when the release_show_page feature flag is disabled' do
- let(:show_feature_flag) { false }
-
- it 'redirects to the main "Releases" page' do
- expect(page).to have_current_path(project_releases_path(project))
- end
- end
end
context 'when the "Cancel" button is clicked' do
@@ -108,6 +97,24 @@ RSpec.describe 'User creates release', :js do
end
end
+ context 'when the release notes "Preview" tab is clicked' do
+ before do
+ find_field('Release notes').click
+
+ fill_release_notes('**some** _markdown_ [content](https://example.com)')
+
+ click_on 'Preview'
+
+ wait_for_all_requests
+ end
+
+ it 'renders a preview of the release notes markdown' do
+ within('[data-testid="release-notes"]') do
+ expect(page).to have_text('some markdown content')
+ end
+ end
+ end
+
def fill_out_form_and_submit
fill_tag_name(tag_name)
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index 4ed1be6db6b..9115a135aeb 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -6,14 +6,11 @@ RSpec.describe 'User edits Release', :js do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:release) { create(:release, project: project, name: 'The first release' ) }
let_it_be(:user) { create(:user) }
- let(:show_feature_flag) { true }
before do
- stub_feature_flags(release_show_page: show_feature_flag)
-
project.add_developer(user)
- gitlab_sign_in(user)
+ sign_in(user)
visit edit_project_release_path(project, release)
@@ -42,7 +39,7 @@ RSpec.describe 'User edits Release', :js do
it 'renders the edit Release form' do
expect(page).to have_content('Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0, v2.0-pre.')
- expect(find_field('Tag name', { disabled: true }).value).to eq(release.tag)
+ expect(find_field('Tag name', disabled: true).value).to eq(release.tag)
expect(find_field('Release title').value).to eq(release.name)
expect(find_field('Release notes').value).to eq(release.description)
@@ -71,42 +68,24 @@ RSpec.describe 'User edits Release', :js do
expect(release.description).to eq('Updated Release notes')
end
- context 'when the release_show_page feature flag is disabled' do
- let(:show_feature_flag) { false }
-
- it 'redirects to the main Releases page when "Cancel" is clicked' do
- fill_out_form_and_click 'Cancel'
-
- expect(page).to have_current_path(project_releases_path(project))
- end
+ it 'redirects to the previous page when "Cancel" is clicked when the url includes a back_url query parameter' do
+ back_path = project_releases_path(project, params: { page: 2 })
+ visit edit_project_release_path(project, release, params: { back_url: back_path })
- it 'redirects to the main Releases page when "Save changes" is clicked' do
- fill_out_form_and_click 'Save changes'
+ fill_out_form_and_click 'Cancel'
- expect(page).to have_current_path(project_releases_path(project))
- end
+ expect(page).to have_current_path(back_path)
end
- context 'when the release_show_page feature flag is enabled' do
- it 'redirects to the previous page when "Cancel" is clicked when the url includes a back_url query parameter' do
- back_path = project_releases_path(project, params: { page: 2 })
- visit edit_project_release_path(project, release, params: { back_url: back_path })
-
- fill_out_form_and_click 'Cancel'
-
- expect(page).to have_current_path(back_path)
- end
-
- it 'redirects to the main Releases page when "Cancel" is clicked when the url does not include a back_url query parameter' do
- fill_out_form_and_click 'Cancel'
+ it 'redirects to the main Releases page when "Cancel" is clicked when the url does not include a back_url query parameter' do
+ fill_out_form_and_click 'Cancel'
- expect(page).to have_current_path(project_releases_path(project))
- end
+ expect(page).to have_current_path(project_releases_path(project))
+ end
- it 'redirects to the dedicated Release page when "Save changes" is clicked' do
- fill_out_form_and_click 'Save changes'
+ it 'redirects to the dedicated Release page when "Save changes" is clicked' do
+ fill_out_form_and_click 'Save changes'
- expect(page).to have_current_path(project_release_path(project, release))
- end
+ expect(page).to have_current_path(project_release_path(project, release))
end
end
diff --git a/spec/features/projects/releases/user_views_release_spec.rb b/spec/features/projects/releases/user_views_release_spec.rb
index c82588746a8..186122536ce 100644
--- a/spec/features/projects/releases/user_views_release_spec.rb
+++ b/spec/features/projects/releases/user_views_release_spec.rb
@@ -4,34 +4,57 @@ require 'spec_helper'
RSpec.describe 'User views Release', :js do
let(:project) { create(:project, :repository) }
- let(:release) { create(:release, project: project, name: 'The first release' ) }
let(:user) { create(:user) }
+ let(:graphql_feature_flag) { true }
+
+ let(:release) do
+ create(:release,
+ project: project,
+ name: 'The first release',
+ description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)')
+ end
before do
+ stub_feature_flags(graphql_individual_release_page: graphql_feature_flag)
+
project.add_developer(user)
- gitlab_sign_in(user)
+ sign_in(user)
visit project_release_path(project, release)
end
- it 'renders the breadcrumbs' do
- within('.breadcrumbs') do
- expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name}")
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
- expect(page).to have_link(project.creator.name, href: user_path(project.creator))
- expect(page).to have_link(project.name, href: project_path(project))
- expect(page).to have_link('Releases', href: project_releases_path(project))
- expect(page).to have_link(release.name, href: project_release_path(project, release))
+ shared_examples 'release page' do
+ it 'renders the breadcrumbs' do
+ within('.breadcrumbs') do
+ expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name}")
+
+ expect(page).to have_link(project.creator.name, href: user_path(project.creator))
+ expect(page).to have_link(project.name, href: project_path(project))
+ expect(page).to have_link('Releases', href: project_releases_path(project))
+ expect(page).to have_link(release.name, href: project_release_path(project, release))
+ end
end
- end
- it 'renders the release details' do
- within('.release-block') do
- expect(page).to have_content(release.name)
- expect(page).to have_content(release.tag)
- expect(page).to have_content(release.commit.short_id)
- expect(page).to have_content(release.description)
+ it 'renders the release details' do
+ within('.release-block') do
+ expect(page).to have_content(release.name)
+ expect(page).to have_content(release.tag)
+ expect(page).to have_content(release.commit.short_id)
+ expect(page).to have_content('Lorem ipsum dolor sit amet')
+ end
end
end
+
+ describe 'when the graphql_individual_release_page feature flag is enabled' do
+ it_behaves_like 'release page'
+ end
+
+ describe 'when the graphql_individual_release_page feature flag is disabled' do
+ let(:graphql_feature_flag) { false }
+
+ it_behaves_like 'release page'
+ end
end
diff --git a/spec/features/projects/releases/user_views_releases_spec.rb b/spec/features/projects/releases/user_views_releases_spec.rb
index 993d3371904..323c57570c3 100644
--- a/spec/features/projects/releases/user_views_releases_spec.rb
+++ b/spec/features/projects/releases/user_views_releases_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'User views releases', :js do
shared_examples 'releases page' do
context('when the user is a maintainer') do
before do
- gitlab_sign_in(maintainer)
+ sign_in(maintainer)
end
it 'sees the release' do
@@ -27,11 +27,23 @@ RSpec.describe 'User views releases', :js do
expect(page).not_to have_content('Upcoming Release')
end
- shared_examples 'asset link tests' do
- context 'when there is a link as an asset' do
- let!(:release_link) { create(:release_link, release: release, url: url ) }
+ context 'when there is a link as an asset' do
+ let!(:release_link) { create(:release_link, release: release, url: url ) }
+ let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
+ let(:direct_asset_link) { Gitlab::Routing.url_helpers.project_release_url(project, release) << release_link.filepath }
+
+ it 'sees the link' do
+ visit project_releases_path(project)
+
+ page.within('.js-assets-list') do
+ expect(page).to have_link release_link.name, href: direct_asset_link
+ expect(page).not_to have_css('[data-testid="external-link-indicator"]')
+ end
+ end
+
+ context 'when there is a link redirect' do
+ let!(:release_link) { create(:release_link, release: release, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: url) }
let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
- let(:direct_asset_link) { Gitlab::Routing.url_helpers.project_release_url(project, release) << release_link.filepath }
it 'sees the link' do
visit project_releases_path(project)
@@ -41,51 +53,21 @@ RSpec.describe 'User views releases', :js do
expect(page).not_to have_css('[data-testid="external-link-indicator"]')
end
end
+ end
- context 'when there is a link redirect' do
- let!(:release_link) { create(:release_link, release: release, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: url) }
- let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
-
- it 'sees the link' do
- visit project_releases_path(project)
-
- page.within('.js-assets-list') do
- expect(page).to have_link release_link.name, href: direct_asset_link
- expect(page).not_to have_css('[data-testid="external-link-indicator"]')
- end
- end
- end
-
- context 'when url points to external resource' do
- let(:url) { 'http://google.com/download' }
+ context 'when url points to external resource' do
+ let(:url) { 'http://google.com/download' }
- it 'sees that the link is external resource' do
- visit project_releases_path(project)
+ it 'sees that the link is external resource' do
+ visit project_releases_path(project)
- page.within('.js-assets-list') do
- expect(page).to have_css('[data-testid="external-link-indicator"]')
- end
+ page.within('.js-assets-list') do
+ expect(page).to have_css('[data-testid="external-link-indicator"]')
end
end
end
end
- context 'when the release_asset_link_type feature flag is enabled' do
- before do
- stub_feature_flags(release_asset_link_type: true)
- end
-
- it_behaves_like 'asset link tests'
- end
-
- context 'when the release_asset_link_type feature flag is disabled' do
- before do
- stub_feature_flags(release_asset_link_type: false)
- end
-
- it_behaves_like 'asset link tests'
- end
-
context 'with an upcoming release' do
let(:tomorrow) { Time.zone.now + 1.day }
let!(:release) { create(:release, project: project, released_at: tomorrow ) }
@@ -110,7 +92,7 @@ RSpec.describe 'User views releases', :js do
context('when the user is a guest') do
before do
- gitlab_sign_in(guest)
+ sign_in(guest)
end
it 'renders release info except for Git-related data' do
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index 0358acc8dcc..ffc0ecc4966 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe "Projects > Settings > Pipelines settings" do
it 'updates forward_deployment_enabled' do
visit project_settings_ci_cd_path(project)
- checkbox = find_field('project_forward_deployment_enabled')
+ checkbox = find_field('project_ci_cd_settings_attributes_forward_deployment_enabled')
expect(checkbox).to be_checked
checkbox.set(false)
@@ -79,7 +79,7 @@ RSpec.describe "Projects > Settings > Pipelines settings" do
expect(page).to have_button('Save changes', disabled: false)
end
- checkbox = find_field('project_forward_deployment_enabled')
+ checkbox = find_field('project_ci_cd_settings_attributes_forward_deployment_enabled')
expect(checkbox).not_to be_checked
end
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 8e2f97fd6a0..4e1b53ffc87 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -3,27 +3,35 @@
require 'spec_helper'
RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration policy', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, namespace: user.namespace, container_registry_enabled: container_registry_enabled) }
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
+
let(:container_registry_enabled) { true }
+ let(:container_registry_enabled_on_project) { true }
+
+ subject { visit project_settings_ci_cd_path(project) }
before do
+ project.update!(container_registry_enabled: container_registry_enabled_on_project)
+
sign_in(user)
- stub_container_registry_config(enabled: true)
+ stub_container_registry_config(enabled: container_registry_enabled)
stub_feature_flags(new_variables_ui: false)
end
context 'as owner' do
- before do
- visit project_settings_ci_cd_path(project)
- end
-
it 'shows available section' do
+ subject
+
settings_block = find('#js-registry-policies')
expect(settings_block).to have_text 'Cleanup policy for tags'
end
it 'saves cleanup policy submit the form' do
+ subject
+
within '#js-registry-policies' do
within '.card-body' do
select('7 days until tags are automatically removed', from: 'Expiration interval:')
@@ -40,6 +48,8 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
end
it 'does not save cleanup policy submit form with invalid regex' do
+ subject
+
within '#js-registry-policies' do
within '.card-body' do
fill_in('Tags with names matching this regex pattern will expire:', with: '*-production')
@@ -53,25 +63,53 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
end
end
- context 'when registry is disabled' do
- before do
- stub_container_registry_config(enabled: false)
- visit project_settings_ci_cd_path(project)
+ context 'with a project without expiration policy' do
+ where(:application_setting, :feature_flag, :result) do
+ true | true | :available_section
+ true | false | :available_section
+ false | true | :available_section
+ false | false | :disabled_message
end
- it 'does not exists' do
- expect(page).not_to have_selector('#js-registry-policies')
+ with_them do
+ before do
+ project.container_expiration_policy.destroy!
+ stub_feature_flags(container_expiration_policies_historic_entry: false)
+ stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
+ stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ end
+
+ it 'displays the expected result' do
+ subject
+
+ within '#js-registry-policies' do
+ case result
+ when :available_section
+ expect(find('.card-header')).to have_content('Tag expiration policy')
+ when :disabled_message
+ expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
+ end
+ end
+ end
end
end
- context 'when container registry is disabled on project' do
+ context 'when registry is disabled' do
let(:container_registry_enabled) { false }
- before do
- visit project_settings_ci_cd_path(project)
+ it 'does not exists' do
+ subject
+
+ expect(page).not_to have_selector('#js-registry-policies')
end
+ end
+
+ context 'when container registry is disabled on project' do
+ let(:container_registry_enabled_on_project) { false }
it 'does not exists' do
+ subject
+
expect(page).not_to have_selector('#js-registry-policies')
end
end
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index 9d9a75c22be..d444ea27d35 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -18,7 +18,9 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
click_notifications_button
click_link 'On mention'
- wait_for_requests
+ page.within('.notification-dropdown') do
+ expect(page).not_to have_css('.gl-spinner')
+ end
click_notifications_button
expect(find('.update-notification.is-active')).to have_content('On mention')
@@ -30,7 +32,9 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
click_notifications_button
click_link 'Disabled'
- wait_for_requests
+ page.within('.notification-dropdown') do
+ expect(page).not_to have_css('.gl-spinner')
+ end
expect(page).to have_css('.notifications-icon[data-testid="notifications-off-icon"]')
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 81736fefae9..189aa45ff75 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -46,21 +46,21 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project)
end
- it '"New file" button linked to new file page' do
+ it '"New file" button linked to IDE new file page' do
page.within('.project-buttons') do
- expect(page).to have_link('New file', href: project_new_blob_path(project, project.default_branch || 'master'))
+ expect(page).to have_link('New file', href: presenter.ide_edit_path(project, project.default_branch || 'master'))
end
end
- it '"Add README" button linked to new file populated for a README' do
+ it '"Add README" button linked to IDE new file populated for a README' do
page.within('.project-buttons') do
- expect(page).to have_link('Add README', href: presenter.add_readme_path)
+ expect(page).to have_link('Add README', href: presenter.add_readme_ide_path)
end
end
- it '"Add license" button linked to new file populated for a license' do
+ it '"Add license" button linked to IDE new file populated for a license' do
page.within('.project-buttons') do
- expect(page).to have_link('Add LICENSE', href: presenter.add_license_path)
+ expect(page).to have_link('Add LICENSE', href: presenter.add_license_ide_path)
end
end
@@ -74,9 +74,9 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
visit project_path(project)
end
- it '"New file" button linked to new file page' do
+ it '"New file" button linked to IDE new file page' do
page.within('.project-buttons') do
- expect(page).to have_link('New file', href: project_new_blob_path(project, 'example_branch'))
+ expect(page).to have_link('New file', href: presenter.ide_edit_path(project, 'example_branch'))
end
end
end
@@ -144,7 +144,7 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
expect(project.repository.readme).not_to be_nil
page.within('.project-buttons') do
- expect(page).not_to have_link('Add README', href: presenter.add_readme_path)
+ expect(page).not_to have_link('Add README', href: presenter.add_readme_ide_path)
expect(page).to have_link('README', href: presenter.readme_path)
end
end
@@ -164,7 +164,7 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
end
context 'when the project does not have a README' do
- it 'shows the "Add README" button' do
+ it 'shows the single file editor "Add README" button' do
allow(project.repository).to receive(:readme).and_return(nil)
visit project_path(project)
@@ -226,7 +226,7 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
expect(project.repository.gitlab_ci_yml).to be_nil
page.within('.project-buttons') do
- expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_ide_path)
+ expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_path)
end
end
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index 503246bbdcf..28fe0a0b7e1 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -17,115 +17,81 @@ RSpec.describe 'Projects > Snippets > Create Snippet', :js do
let(:file_content) { 'Hello World!' }
let(:md_description) { 'My Snippet **Description**' }
let(:description) { 'My Snippet Description' }
- let(:snippet_title_field) { 'project_snippet_title' }
- shared_examples 'snippet creation' do
- def fill_form
- snippet_fill_in_form(title: title, content: file_content, description: md_description)
- end
-
- it 'shows collapsible description input' do
- collapsed = description_field
+ def fill_form
+ snippet_fill_in_form(title: title, content: file_content, description: md_description)
+ end
- expect(page).not_to have_field(snippet_description_field)
- expect(collapsed).to be_visible
+ before do
+ sign_in(user)
- collapsed.click
+ visit new_project_snippet_path(project)
+ end
- expect(page).to have_field(snippet_description_field)
- expect(collapsed).not_to be_visible
- end
+ it 'shows collapsible description input' do
+ collapsed = snippet_description_field_collapsed
- it 'creates a new snippet' do
- fill_form
- click_button('Create snippet')
- wait_for_requests
+ expect(page).not_to have_field(snippet_description_locator)
+ expect(collapsed).to be_visible
- expect(page).to have_content(title)
- expect(page).to have_content(file_content)
- page.within(snippet_description_view_selector) do
- expect(page).to have_content(description)
- expect(page).to have_selector('strong')
- end
- end
+ collapsed.click
- it 'uploads a file when dragging into textarea' do
- fill_form
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
-
- expect(snippet_description_value).to have_content('banana_sample')
+ expect(page).to have_field(snippet_description_locator)
+ expect(collapsed).not_to be_visible
+ end
- click_button('Create snippet')
- wait_for_requests
+ it 'creates a new snippet' do
+ fill_form
+ click_button('Create snippet')
+ wait_for_requests
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
+ expect(page).to have_content(title)
+ expect(page).to have_content(file_content)
+ page.within('.snippet-header .snippet-description') do
+ expect(page).to have_content(description)
+ expect(page).to have_selector('strong')
end
+ end
- context 'when the git operation fails' do
- let(:error) { 'Error creating the snippet' }
-
- before do
- allow_next_instance_of(Snippets::CreateService) do |instance|
- allow(instance).to receive(:create_commit).and_raise(StandardError, error)
- end
+ it 'uploads a file when dragging into textarea' do
+ fill_form
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
- fill_form
+ expect(snippet_description_value).to have_content('banana_sample')
- click_button('Create snippet')
- wait_for_requests
- end
+ click_button('Create snippet')
+ wait_for_requests
- it 'renders the new page and displays the error' do
- expect(page).to have_content(error)
- expect(page).to have_content('New Snippet')
- end
- end
+ link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
+ expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
end
- context 'Vue application' do
- let(:snippet_description_field) { 'snippet-description' }
- let(:snippet_description_view_selector) { '.snippet-header .snippet-description' }
+ context 'when the git operation fails' do
+ let(:error) { 'Error creating the snippet' }
before do
- sign_in(user)
-
- visit new_project_snippet_path(project)
- end
-
- it_behaves_like 'snippet creation'
-
- it 'does not allow submitting the form without title and content' do
- fill_in snippet_title_field, with: title
+ allow_next_instance_of(Snippets::CreateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, error)
+ end
- expect(page).not_to have_button('Create snippet')
+ fill_form
- snippet_fill_in_form(title: title, content: file_content)
- expect(page).to have_button('Create snippet')
+ click_button('Create snippet')
+ wait_for_requests
end
- end
-
- context 'non-Vue application' do
- let(:snippet_description_field) { 'project_snippet_description' }
- let(:snippet_description_view_selector) { '.snippet-header .description' }
-
- before do
- stub_feature_flags(snippets_vue: false)
- stub_feature_flags(snippets_edit_vue: false)
-
- sign_in(user)
- visit new_project_snippet_path(project)
+ it 'renders the new page and displays the error' do
+ expect(page).to have_content(error)
+ expect(page).to have_content('New Snippet')
end
+ end
- it_behaves_like 'snippet creation'
+ it 'does not allow submitting the form without title and content' do
+ snippet_fill_in_title(title)
- it 'displays validation errors' do
- fill_in snippet_title_field, with: title
- click_button('Create snippet')
- wait_for_requests
+ expect(page).not_to have_button('Create snippet')
- expect(page).to have_selector('#error_explanation')
- end
+ snippet_fill_in_form(title: title, content: file_content)
+ expect(page).to have_button('Create snippet')
end
end
diff --git a/spec/features/projects/snippets/show_spec.rb b/spec/features/projects/snippets/show_spec.rb
index 8fded3cde80..5937ff75457 100644
--- a/spec/features/projects/snippets/show_spec.rb
+++ b/spec/features/projects/snippets/show_spec.rb
@@ -13,8 +13,6 @@ RSpec.describe 'Projects > Snippets > Project snippet', :js do
let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
before do
- stub_feature_flags(snippets_vue: false)
-
sign_in(user)
end
@@ -28,12 +26,8 @@ RSpec.describe 'Projects > Snippets > Project snippet', :js do
end
end
- it_behaves_like 'showing user status' do
- let(:file_path) { 'files/ruby/popen.rb' }
- let(:user_with_status) { snippet.author }
-
- subject { visit project_snippet_path(project, snippet) }
- end
+ # it_behaves_like 'showing user status' do
+ # This will be handled in https://gitlab.com/gitlab-org/gitlab/-/issues/262394
it_behaves_like 'does not show New Snippet button' do
let(:file_path) { 'files/ruby/popen.rb' }
diff --git a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
index 2784fec3dc1..b37d40c0eed 100644
--- a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Projects > Snippets > User comments on a snippet', :js do
let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
before do
- stub_feature_flags(snippets_vue: false)
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/projects/snippets/user_deletes_snippet_spec.rb b/spec/features/projects/snippets/user_deletes_snippet_spec.rb
index 44fe9834484..6d526e60512 100644
--- a/spec/features/projects/snippets/user_deletes_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_deletes_snippet_spec.rb
@@ -2,13 +2,12 @@
require 'spec_helper'
-RSpec.describe 'Projects > Snippets > User deletes a snippet' do
+RSpec.describe 'Projects > Snippets > User deletes a snippet', :js do
let(:project) { create(:project) }
- let!(:snippet) { create(:project_snippet, project: project, author: user) }
+ let!(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
let(:user) { create(:user) }
before do
- stub_feature_flags(snippets_vue: false)
project.add_maintainer(user)
sign_in(user)
@@ -16,7 +15,11 @@ RSpec.describe 'Projects > Snippets > User deletes a snippet' do
end
it 'deletes a snippet' do
- first(:link, 'Delete').click
+ expect(page).to have_content(snippet.title)
+
+ click_button('Delete')
+ click_button('Delete snippet')
+ wait_for_requests
expect(page).not_to have_content(snippet.title)
end
diff --git a/spec/features/projects/snippets/user_updates_snippet_spec.rb b/spec/features/projects/snippets/user_updates_snippet_spec.rb
index 193eaa9576a..aa498163f52 100644
--- a/spec/features/projects/snippets/user_updates_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_updates_snippet_spec.rb
@@ -9,9 +9,7 @@ RSpec.describe 'Projects > Snippets > User updates a snippet', :js do
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:snippet, reload: true) { create(:project_snippet, :repository, project: project, author: user) }
- let(:snippet_title_field) { 'project_snippet_title' }
-
- def bootstrap_snippet
+ before do
project.add_maintainer(user)
sign_in(user)
@@ -20,64 +18,36 @@ RSpec.describe 'Projects > Snippets > User updates a snippet', :js do
wait_for_all_requests
end
- shared_examples 'snippet update' do
- it 'displays the snippet blob path and content' do
- blob = snippet.blobs.first
-
- aggregate_failures do
- expect(snippet_get_first_blob_path).to eq blob.path
- expect(snippet_get_first_blob_value).to have_content(blob.data.strip)
- end
- end
-
- it 'updates a snippet' do
- fill_in('project_snippet_title', with: 'Snippet new title')
- click_button('Save')
+ it 'displays the snippet blob path and content' do
+ blob = snippet.blobs.first
- expect(page).to have_content('Snippet new title')
- end
-
- context 'when the git operation fails' do
- before do
- allow_next_instance_of(Snippets::UpdateService) do |instance|
- allow(instance).to receive(:create_commit).and_raise(StandardError, 'Error Message')
- end
-
- fill_in(snippet_title_field, with: 'Snippet new title')
- fill_in(snippet_blob_path_field, match: :first, with: 'new_file_name')
-
- click_button('Save')
- end
-
- it 'renders edit page and displays the error' do
- expect(page.find('.flash-container')).to have_content('Error updating the snippet - Error Message')
- expect(page).to have_content('Edit Snippet')
- end
+ aggregate_failures do
+ expect(snippet_get_first_blob_path).to eq blob.path
+ expect(snippet_get_first_blob_value).to have_content(blob.data.strip)
end
end
- context 'Vue application' do
- before do
- bootstrap_snippet
- end
+ it 'updates a snippet' do
+ fill_in('snippet-title', with: 'Snippet new title')
+ click_button('Save')
- it_behaves_like 'snippet update' do
- let(:snippet_blob_path_field) { 'snippet_file_name' }
- let(:snippet_blob_content_selector) { '.file-content' }
- end
+ expect(page).to have_content('Snippet new title')
end
- context 'non-Vue application' do
+ context 'when the git operation fails' do
before do
- stub_feature_flags(snippets_vue: false)
- stub_feature_flags(snippets_edit_vue: false)
+ allow_next_instance_of(Snippets::UpdateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, 'Error Message')
+ end
- bootstrap_snippet
+ snippet_fill_in_form(title: 'Snippet new title', file_name: 'new_file_name')
+
+ click_button('Save')
end
- it_behaves_like 'snippet update' do
- let(:snippet_blob_path_field) { 'project_snippet_file_name' }
- let(:snippet_blob_content_selector) { '.file-content' }
+ it 'renders edit page and displays the error' do
+ expect(page.find('.flash-container')).to have_content('Error updating the snippet - Error Message')
+ expect(page).to have_content('Edit Snippet')
end
end
end
diff --git a/spec/features/projects/tracings_spec.rb b/spec/features/projects/tracings_spec.rb
new file mode 100644
index 00000000000..c4a4f1382ed
--- /dev/null
+++ b/spec/features/projects/tracings_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Tracings Content Security Policy' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ subject { response_headers['Content-Security-Policy'] }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when there is no global config' do
+ before do
+ expect_next_instance_of(Projects::TracingsController) do |controller|
+ expect(controller).to receive(:current_content_security_policy)
+ .and_return(ActionDispatch::ContentSecurityPolicy.new)
+ end
+ end
+
+ it 'does not add CSP directives' do
+ visit project_tracing_path(project)
+
+ is_expected.to be_blank
+ end
+ end
+
+ context 'when a global CSP config exists' do
+ before do
+ csp = ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.frame_src 'https://global-policy.com'
+ end
+
+ expect_next_instance_of(Projects::TracingsController) do |controller|
+ expect(controller).to receive(:current_content_security_policy).and_return(csp)
+ end
+ end
+
+ context 'when external_url is set' do
+ let!(:project_tracing_setting) { create(:project_tracing_setting, project: project) }
+
+ it 'overwrites frame-src' do
+ visit project_tracing_path(project)
+
+ is_expected.to eq("frame-src https://example.com")
+ end
+ end
+
+ context 'when external_url is not set' do
+ it 'uses global policy' do
+ visit project_tracing_path(project)
+
+ is_expected.to eq("frame-src https://global-policy.com")
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb
index bd2af66710a..ca9e0a23888 100644
--- a/spec/features/projects/tree/tree_show_spec.rb
+++ b/spec/features/projects/tree/tree_show_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe 'Projects tree', :js do
# Check last commit
expect(find('.commit-content').text).to include(message)
- expect(find('.commit-sha-group').text).to eq(short_newrev)
+ expect(find('.js-commit-sha-group').text).to eq(short_newrev)
end
end
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index 50d7b353c46..616c5065c07 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -128,6 +128,59 @@ RSpec.describe 'Projects > User sees sidebar' do
end
end
+ context 'as anonymous' do
+ let(:project) { create(:project, :public) }
+ let!(:issue) { create(:issue, :opened, project: project, author: user) }
+
+ describe 'project landing page' do
+ before do
+ project.project_feature.update!(
+ builds_access_level: ProjectFeature::DISABLED,
+ merge_requests_access_level: ProjectFeature::DISABLED,
+ repository_access_level: ProjectFeature::DISABLED,
+ issues_access_level: ProjectFeature::DISABLED,
+ wiki_access_level: ProjectFeature::DISABLED
+ )
+ end
+
+ it 'does not show the project file list landing page, but the activity' do
+ visit project_path(project)
+
+ expect(page).not_to have_selector '.project-stats'
+ expect(page).not_to have_selector '.project-last-commit'
+ expect(page).not_to have_selector '.project-show-files'
+ expect(page).to have_selector '.project-show-activity'
+ end
+
+ it 'shows the wiki when enabled' do
+ project.project_feature.update!(wiki_access_level: ProjectFeature::ENABLED)
+
+ visit project_path(project)
+
+ expect(page).to have_selector '.project-show-wiki'
+ end
+
+ it 'shows the issues when enabled' do
+ project.project_feature.update!(issues_access_level: ProjectFeature::ENABLED)
+
+ visit project_path(project)
+
+ expect(page).to have_selector '.issues-list'
+ end
+
+ it 'shows the wiki when wiki and issues are enabled' do
+ project.project_feature.update!(
+ issues_access_level: ProjectFeature::ENABLED,
+ wiki_access_level: ProjectFeature::ENABLED
+ )
+
+ visit project_path(project)
+
+ expect(page).to have_selector '.project-show-wiki'
+ end
+ end
+ end
+
context 'as guest' do
let(:guest) { create(:user) }
let!(:issue) { create(:issue, :opened, project: project, author: guest) }
@@ -145,11 +198,11 @@ RSpec.describe 'Projects > User sees sidebar' do
expect(page).to have_content 'Project'
expect(page).to have_content 'Issues'
expect(page).to have_content 'Wiki'
+ expect(page).to have_content 'Operations'
expect(page).not_to have_content 'Repository'
expect(page).not_to have_content 'CI / CD'
expect(page).not_to have_content 'Merge Requests'
- expect(page).not_to have_content 'Operations'
end
end
@@ -194,13 +247,13 @@ RSpec.describe 'Projects > User sees sidebar' do
expect(page).not_to have_selector '.project-stats'
expect(page).not_to have_selector '.project-last-commit'
expect(page).not_to have_selector '.project-show-files'
- expect(page).to have_selector '.project-show-customize_workflow'
+ expect(page).to have_selector '.project-show-activity'
end
- it 'shows the customize workflow when issues and wiki are disabled' do
+ it 'shows the project activity when issues and wiki are disabled' do
visit project_path(project)
- expect(page).to have_selector '.project-show-customize_workflow'
+ expect(page).to have_selector '.project-show-activity'
end
it 'shows the wiki when enabled' do
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
deleted file mode 100644
index 8f2fb9e827c..00000000000
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ /dev/null
@@ -1,168 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Wiki > User previews markdown changes', :js do
- let_it_be(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'home', content: '[some link](other-page)') }
- let(:wiki_content) do
- <<-HEREDOC
-Some text so key event for [ does not trigger an incorrect replacement.
-[regular link](regular)
-[relative link 1](../relative)
-[relative link 2](./relative)
-[relative link 3](./e/f/relative)
-[spaced link](title with spaces)
- HEREDOC
- end
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
- end
-
- context "while creating a new wiki page" do
- context "when there are no spaces or hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a/b/c/d', content: wiki_content)
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are spaces in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a page/b page/c page/d page', content: wiki_content)
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a-page/b-page/c-page/d-page', content: wiki_content)
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
- end
-
- context "while editing a wiki page" do
- context "when there are no spaces or hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a/b/c/d')
- click_link 'Edit'
-
- fill_in :wiki_content, with: wiki_content
- click_on "Preview"
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are spaces in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a page/b page/c page/d page')
- click_link 'Edit'
-
- fill_in :wiki_content, with: wiki_content
- click_on "Preview"
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a-page/b-page/c-page/d-page')
- click_link 'Edit'
-
- fill_in :wiki_content, with: wiki_content
- click_on "Preview"
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context 'when rendering the preview' do
- it 'renders content with CommonMark' do
- create_wiki_page('a-page/b-page/c-page/common-mark')
- click_link 'Edit'
-
- fill_in :wiki_content, with: "1. one\n - sublist\n"
- click_on "Preview"
-
- # the above generates two separate lists (not embedded) in CommonMark
- expect(page).to have_content("sublist")
- expect(page).not_to have_xpath("//ol//li//ul")
- end
- end
- end
-
- it "does not linkify double brackets inside code blocks as expected" do
- wiki_content = <<-HEREDOC
- `[[do_not_linkify]]`
- ```
- [[also_do_not_linkify]]
- ```
- HEREDOC
-
- create_wiki_page('linkify_test', wiki_content)
-
- expect(page).to have_content("do_not_linkify")
-
- expect(page.html).to include('[[do_not_linkify]]')
- expect(page.html).to include('[[also_do_not_linkify]]')
- end
-
- private
-
- def create_wiki_page(path, content = 'content')
- visit project_wiki_path(project, wiki_page)
-
- click_link 'New page'
-
- fill_in :wiki_title, with: path
- fill_in :wiki_content, with: content
-
- click_button 'Create page'
- end
-end
diff --git a/spec/features/projects/wiki/shortcuts_spec.rb b/spec/features/projects/wiki/shortcuts_spec.rb
deleted file mode 100644
index 170e7afb51f..00000000000
--- a/spec/features/projects/wiki/shortcuts_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Wiki shortcuts', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'home', content: 'Home page') }
-
- before do
- sign_in(user)
- visit project_wiki_path(project, wiki_page)
- end
-
- it 'Visit edit wiki page using "e" keyboard shortcut' do
- find('body').native.send_key('e')
-
- expect(find('.wiki-page-title')).to have_content('Edit Page')
- end
-end
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
deleted file mode 100644
index eba1b63765a..00000000000
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ /dev/null
@@ -1,360 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe "User creates wiki page" do
- include WikiHelpers
-
- let(:user) { create(:user) }
- let(:wiki) { ProjectWiki.new(project, user) }
- let(:project) { create(:project) }
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
- end
-
- context "when wiki is empty" do
- before do |example|
- visit(project_wikis_path(project))
-
- wait_for_svg_to_be_loaded(example)
-
- click_link "Create your first page"
- end
-
- context "in a user namespace" do
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
-
- it "shows validation error message" do
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "")
-
- click_on("Create page")
- end
-
- expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
-
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "[link test](test)")
-
- click_on("Create page")
- end
-
- expect(page).to have_content("Home").and have_content("link test")
-
- click_link("link test")
-
- expect(page).to have_content("Create New Page")
- end
-
- it "shows non-escaped link in the pages list" do
- fill_in(:wiki_title, with: "one/two/three-test")
-
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "wiki content")
-
- click_on("Create page")
- end
-
- expect(current_path).to include("one/two/three-test")
- expect(page).to have_xpath("//a[@href='/#{project.full_path}/-/wikis/one/two/three-test']")
- end
-
- it "has `Create home` as a commit message", :js do
- wait_for_requests
-
- expect(page).to have_field("wiki[message]", with: "Create home")
- end
-
- it "creates a page from the home page" do
- fill_in(:wiki_content, with: "[test](test)\n[GitLab API doc](api)\n[Rake tasks](raketasks)\n# Wiki header\n")
- fill_in(:wiki_message, with: "Adding links to wiki")
-
- page.within(".wiki-form") do
- click_button("Create page")
- end
-
- expect(current_path).to eq(project_wiki_path(project, "home"))
- expect(page).to have_content("test GitLab API doc Rake tasks Wiki header")
- .and have_content("Home")
- .and have_content("Last edited by #{user.name}")
- .and have_header_with_correct_id_and_link(1, "Wiki header", "wiki-header")
-
- click_link("test")
-
- expect(current_path).to eq(project_wiki_path(project, "test"))
-
- page.within(:css, ".nav-text") do
- expect(page).to have_content("Create New Page")
- end
-
- click_link("Home")
-
- expect(current_path).to eq(project_wiki_path(project, "home"))
-
- click_link("GitLab API")
-
- expect(current_path).to eq(project_wiki_path(project, "api"))
-
- page.within(:css, ".nav-text") do
- expect(page).to have_content("Create")
- end
-
- click_link("Home")
-
- expect(current_path).to eq(project_wiki_path(project, "home"))
-
- click_link("Rake tasks")
-
- expect(current_path).to eq(project_wiki_path(project, "raketasks"))
-
- page.within(:css, ".nav-text") do
- expect(page).to have_content("Create")
- end
- end
-
- it "creates ASCII wiki with LaTeX blocks", :js do
- stub_application_setting(plantuml_url: "http://localhost", plantuml_enabled: true)
-
- ascii_content = <<~MD
- :stem: latexmath
-
- [stem]
- ++++
- \\sqrt{4} = 2
- ++++
-
- another part
-
- [latexmath]
- ++++
- \\beta_x \\gamma
- ++++
-
- stem:[2+2] is 4
- MD
-
- find("#wiki_format option[value=asciidoc]").select_option
-
- fill_in(:wiki_content, with: ascii_content)
-
- page.within(".wiki-form") do
- click_button("Create page")
- end
-
- page.within ".md" do
- expect(page).to have_selector(".katex", count: 3).and have_content("2+2 is 4")
- end
- end
-
- it 'creates a wiki page with Org markup', :aggregate_failures do
- org_content = <<~ORG
- * Heading
- ** Subheading
- [[home][Link to Home]]
- ORG
-
- page.within('.wiki-form') do
- find('#wiki_format option[value=org]').select_option
- fill_in(:wiki_content, with: org_content)
- click_button('Create page')
- end
-
- expect(page).to have_selector('h1', text: 'Heading')
- expect(page).to have_selector('h2', text: 'Subheading')
- expect(page).to have_link('Link to Home', href: "/#{project.full_path}/-/wikis/home")
- end
-
- it_behaves_like 'wiki file attachments'
- end
-
- context "in a group namespace", :js do
- let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
-
- it "has `Create home` as a commit message" do
- wait_for_requests
-
- expect(page).to have_field("wiki[message]", with: "Create home")
- end
-
- it "creates a page from the home page" do
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "My awesome wiki!")
-
- click_button("Create page")
- end
-
- expect(page).to have_content("Home")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
- end
- end
-
- context "when wiki is not empty", :js do
- before do
- create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page')
-
- visit(project_wikis_path(project))
- end
-
- context "in a user namespace" do
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
-
- context "via the `new wiki page` page" do
- it "creates a page with a single word" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "foo")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create foo")
-
- click_button("Create page")
-
- expect(page).to have_content("foo")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
-
- it "creates a page with spaces in the name" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "Spaces in the name")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create Spaces in the name")
-
- click_button("Create page")
-
- expect(page).to have_content("Spaces in the name")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
-
- it "creates a page with hyphens in the name" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "hyphens-in-the-name")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create hyphens in the name")
-
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "My awesome wiki!")
-
- click_button("Create page")
- end
-
- expect(page).to have_content("hyphens in the name")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
- end
-
- it "shows the emoji autocompletion dropdown" do
- click_link("New page")
-
- page.within(".wiki-form") do
- find("#wiki_content").native.send_keys("")
-
- fill_in(:wiki_content, with: ":")
- end
-
- expect(page).to have_selector(".atwho-view")
- end
- end
-
- context "in a group namespace" do
- let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
-
- context "via the `new wiki page` page" do
- it "creates a page" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "foo")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create foo")
-
- click_button("Create page")
-
- expect(page).to have_content("foo")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
- end
- end
- end
-
- describe 'sidebar feature' do
- context 'when there are some existing pages' do
- before do
- create(:wiki_page, wiki: wiki, title: 'home', content: 'home')
- create(:wiki_page, wiki: wiki, title: 'another', content: 'another')
- end
-
- it 'renders a default sidebar when there is no customized sidebar' do
- visit(project_wikis_path(project))
-
- expect(page).to have_content('another')
- expect(page).not_to have_link('View All Pages')
- end
-
- context 'when there is a customized sidebar' do
- before do
- create(:wiki_page, wiki: wiki, title: '_sidebar', content: 'My customized sidebar')
- end
-
- it 'renders my customized sidebar instead of the default one' do
- visit(project_wikis_path(project))
-
- expect(page).to have_content('My customized sidebar')
- expect(page).not_to have_content('Another')
- end
- end
- end
-
- context 'when there are 15 existing pages' do
- before do
- (1..5).each { |i| create(:wiki_page, wiki: wiki, title: "my page #{i}") }
- (6..10).each { |i| create(:wiki_page, wiki: wiki, title: "parent/my page #{i}") }
- (11..15).each { |i| create(:wiki_page, wiki: wiki, title: "grandparent/parent/my page #{i}") }
- end
-
- it 'shows all pages in the sidebar' do
- visit(project_wikis_path(project))
-
- (1..15).each { |i| expect(page).to have_content("my page #{i}") }
- expect(page).not_to have_link('View All Pages')
- end
-
- context 'when there are more than 15 existing pages' do
- before do
- create(:wiki_page, wiki: wiki, title: 'my page 16')
- end
-
- it 'shows the first 15 pages in the sidebar' do
- visit(project_wikis_path(project))
-
- expect(page).to have_text('my page', count: 15)
- expect(page).to have_link('View All Pages')
- end
- end
- end
- end
-end
diff --git a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
deleted file mode 100644
index a5d865d581b..00000000000
--- a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User deletes wiki page', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
-
- before do
- sign_in(user)
- visit(project_wiki_path(project, wiki_page))
- end
-
- it 'deletes a page' do
- click_on('Edit')
- click_on('Delete')
- find('.modal-footer .btn-danger').click
-
- expect(page).to have_content('Page was successfully deleted')
- end
-end
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
deleted file mode 100644
index fdab63a56b8..00000000000
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ /dev/null
@@ -1,263 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User updates wiki page' do
- include WikiHelpers
-
- let(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- context 'when wiki is empty' do
- before do |example|
- visit(project_wikis_path(project))
-
- wait_for_svg_to_be_loaded(example)
-
- click_link "Create your first page"
- end
-
- context 'in a user namespace' do
- let(:project) { create(:project, :wiki_repo) }
-
- it 'redirects back to the home edit page' do
- page.within(:css, '.wiki-form .form-actions') do
- click_on('Cancel')
- end
-
- expect(current_path).to eq wiki_path(project.wiki)
- end
-
- it 'updates a page that has a path', :js do
- fill_in(:wiki_title, with: 'one/two/three-test')
-
- page.within '.wiki-form' do
- fill_in(:wiki_content, with: 'wiki content')
- click_on('Create page')
- end
-
- expect(current_path).to include('one/two/three-test')
- expect(find('.wiki-pages')).to have_content('three')
-
- first(:link, text: 'three').click
-
- expect(find('.nav-text')).to have_content('three')
-
- click_on('Edit')
-
- expect(current_path).to include('one/two/three-test')
- expect(page).to have_content('Edit Page')
-
- fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
-
- expect(page).to have_content('Updated Wiki Content')
- end
-
- it_behaves_like 'wiki file attachments'
- end
- end
-
- context 'when wiki is not empty' do
- let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
- let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, title: 'home', content: 'Home page') }
-
- before do
- visit(project_wikis_path(project))
-
- click_link('Edit')
- end
-
- context 'in a user namespace' do
- let(:project) { create(:project, :wiki_repo) }
-
- it 'updates a page', :js do
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
-
- fill_in(:wiki_content, with: 'My awesome wiki!')
- click_button('Save changes')
-
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
- end
-
- it 'updates the commit message as the title is changed', :js do
- fill_in(:wiki_title, with: '& < > \ \ { } &')
-
- expect(page).to have_field('wiki[message]', with: 'Update & < > \ \ { } &')
- end
-
- it 'correctly escapes the commit message entities', :js do
- fill_in(:wiki_title, with: 'Wiki title')
-
- expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
- end
-
- it 'shows a validation error message' do
- fill_in(:wiki_content, with: '')
- click_button('Save changes')
-
- expect(page).to have_selector('.wiki-form')
- expect(page).to have_content('Edit Page')
- expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content("Content can't be blank")
- expect(find('textarea#wiki_content').value).to eq('')
- end
-
- it 'shows the emoji autocompletion dropdown', :js do
- find('#wiki_content').native.send_keys('')
- fill_in(:wiki_content, with: ':')
-
- expect(page).to have_selector('.atwho-view')
- end
-
- it 'shows the error message' do
- wiki_page.update(content: 'Update')
-
- click_button('Save changes')
-
- expect(page).to have_content('Someone edited the page the same time you did.')
- end
-
- it 'updates a page' do
- fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
-
- expect(page).to have_content('Updated Wiki Content')
- end
-
- it 'cancels editing of a page' do
- page.within(:css, '.wiki-form .form-actions') do
- click_on('Cancel')
- end
-
- expect(current_path).to eq(project_wiki_path(project, wiki_page))
- end
-
- it_behaves_like 'wiki file attachments'
- end
-
- context 'in a group namespace' do
- let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
-
- it 'updates a page', :js do
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
-
- fill_in(:wiki_content, with: 'My awesome wiki!')
-
- click_button('Save changes')
-
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
- end
-
- it_behaves_like 'wiki file attachments'
- end
- end
-
- context 'when the page is in a subdir' do
- let!(:project) { create(:project, :wiki_repo) }
- let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
- let(:page_name) { 'page_name' }
- let(:page_dir) { "foo/bar/#{page_name}" }
- let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, title: page_dir, content: 'Home page') }
-
- before do
- visit(project_wiki_edit_path(project, wiki_page))
- end
-
- it 'moves the page to the root folder' do
- fill_in(:wiki_title, with: "/#{page_name}")
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, page_name))
- end
-
- it 'moves the page to other dir' do
- new_page_dir = "foo1/bar1/#{page_name}"
-
- fill_in(:wiki_title, with: new_page_dir)
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, new_page_dir))
- end
-
- it 'remains in the same place if title has not changed' do
- original_path = project_wiki_path(project, wiki_page)
-
- fill_in(:wiki_title, with: page_name)
-
- click_button('Save changes')
-
- expect(current_path).to eq(original_path)
- end
-
- it 'can be moved to a different dir with a different name' do
- new_page_dir = "foo1/bar1/new_page_name"
-
- fill_in(:wiki_title, with: new_page_dir)
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, new_page_dir))
- end
-
- it 'can be renamed and moved to the root folder' do
- new_name = 'new_page_name'
-
- fill_in(:wiki_title, with: "/#{new_name}")
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, new_name))
- end
-
- it 'squishes the title before creating the page' do
- new_page_dir = " foo1 / bar1 / #{page_name} "
-
- fill_in(:wiki_title, with: new_page_dir)
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}"))
- end
-
- it_behaves_like 'wiki file attachments'
- end
-
- context 'when an existing page exceeds the content size limit' do
- let_it_be(:project) { create(:project, :wiki_repo) }
- let!(:wiki_page) { create(:wiki_page, wiki: project.wiki, content: "one\ntwo\nthree") }
-
- before do
- stub_application_setting(wiki_page_max_content_bytes: 10)
-
- visit wiki_page_path(wiki_page.wiki, wiki_page, action: :edit)
- end
-
- it 'allows changing the title if the content does not change' do
- fill_in 'Title', with: 'new title'
- click_on 'Save changes'
-
- expect(page).to have_content('Wiki was successfully updated.')
- end
-
- it 'shows a validation error when trying to change the content' do
- fill_in 'Content', with: 'new content'
- click_on 'Save changes'
-
- expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content('Content is too long (11 Bytes). The maximum size is 10 Bytes.')
- end
- end
-end
diff --git a/spec/features/projects/wiki/user_views_wiki_empty_spec.rb b/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
index 0af40a2d760..1f460f39267 100644
--- a/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
@@ -2,108 +2,86 @@
require 'spec_helper'
-RSpec.describe 'User views empty wiki' do
- let(:user) { create(:user) }
- let(:confluence_link) { 'Enable the Confluence Wiki integration' }
- let(:element) { page.find('.row.empty-state') }
-
- shared_examples 'empty wiki and accessible issues' do
- it 'show "issue tracker" message' do
- visit(project_wikis_path(project))
-
- expect(element).to have_content('This project has no wiki pages')
- expect(element).to have_content('You must be a project member')
- expect(element).to have_content('improve the wiki for this project')
- expect(element).to have_link("issue tracker", href: project_issues_path(project))
- expect(element).to have_link("Suggest wiki improvement", href: new_project_issue_path(project))
- expect(element).to have_no_link(confluence_link)
- end
- end
-
- shared_examples 'empty wiki and non-accessible issues' do
- it 'does not show "issue tracker" message' do
- visit(project_wikis_path(project))
+RSpec.describe 'Project > User views empty wiki' do
+ let_it_be(:user) { create(:user) }
- expect(element).to have_content('This project has no wiki pages')
- expect(element).to have_content('You must be a project member')
- expect(element).to have_no_link('Suggest wiki improvement')
- expect(element).to have_no_link(confluence_link)
- end
- end
+ let(:wiki) { create(:project_wiki, project: project) }
- context 'when user is logged out and issue tracker is public' do
- let(:project) { create(:project, :public, :wiki_repo) }
+ it_behaves_like 'User views empty wiki' do
+ context 'when project is public' do
+ let(:project) { create(:project, :public) }
- it_behaves_like 'empty wiki and accessible issues'
- end
+ it_behaves_like 'empty wiki message', issuable: true
- context 'when user is logged in and not a member' do
- let(:project) { create(:project, :public, :wiki_repo) }
+ context 'when issue tracker is private' do
+ let(:project) { create(:project, :public, :issues_private) }
- before do
- sign_in(user)
- end
+ it_behaves_like 'empty wiki message', issuable: false
+ end
- it_behaves_like 'empty wiki and accessible issues'
- end
+ context 'when issue tracker is disabled' do
+ let(:project) { create(:project, :public, :issues_disabled) }
- context 'when issue tracker is private' do
- let(:project) { create(:project, :public, :wiki_repo, :issues_private) }
+ it_behaves_like 'empty wiki message', issuable: false
+ end
- it_behaves_like 'empty wiki and non-accessible issues'
- end
+ context 'and user is logged in' do
+ before do
+ sign_in(user)
+ end
- context 'when issue tracker is disabled' do
- let(:project) { create(:project, :public, :wiki_repo, :issues_disabled) }
+ context 'and user is not a member' do
+ it_behaves_like 'empty wiki message', issuable: true
+ end
- it_behaves_like 'empty wiki and non-accessible issues'
- end
+ context 'and user is a member' do
+ before do
+ project.add_developer(user)
+ end
- context 'when user is logged in and a member' do
- let(:project) { create(:project, :public) }
-
- before do
- sign_in(user)
- project.add_developer(user)
+ it_behaves_like 'empty wiki message', writable: true, issuable: true
+ end
+ end
end
- it 'shows "create first page" message' do
- visit(project_wikis_path(project))
-
- expect(element).to have_content('your project', count: 2)
+ context 'when project is private' do
+ let(:project) { create(:project, :private) }
- element.click_link 'Create your first page'
+ it_behaves_like 'wiki is not found'
- expect(page).to have_button('Create page')
- end
+ context 'and user is logged in' do
+ before do
+ sign_in(user)
+ end
- it 'does not show the "enable confluence" button' do
- visit(project_wikis_path(project))
+ context 'and user is not a member' do
+ it_behaves_like 'wiki is not found'
+ end
- expect(element).to have_no_link(confluence_link)
- end
- end
+ context 'and user is a member' do
+ before do
+ project.add_developer(user)
+ end
- context 'when user is logged in and an admin' do
- let(:project) { create(:project, :public, :wiki_repo) }
+ it_behaves_like 'empty wiki message', writable: true, issuable: true
+ end
- before do
- sign_in(user)
- project.add_maintainer(user)
- end
-
- it 'shows the "enable confluence" button' do
- visit(project_wikis_path(project))
-
- expect(element).to have_link(confluence_link)
- end
+ context 'and user is a maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
- it 'does not show "enable confluence" button if confluence is already enabled' do
- create(:confluence_service, project: project)
+ it_behaves_like 'empty wiki message', writable: true, issuable: true, confluence: true
- visit(project_wikis_path(project))
+ context 'and Confluence is already enabled' do
+ before do
+ create(:confluence_service, project: project)
+ end
- expect(element).to have_no_link(confluence_link)
+ it_behaves_like 'empty wiki message', writable: true, issuable: true, confluence: false
+ end
+ end
+ end
end
end
end
diff --git a/spec/features/projects/wikis_spec.rb b/spec/features/projects/wikis_spec.rb
new file mode 100644
index 00000000000..1c66ad81145
--- /dev/null
+++ b/spec/features/projects/wikis_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe 'Project wikis' do
+ let_it_be(:user) { create(:user) }
+
+ let(:wiki) { create(:project_wiki, user: user, project: project) }
+ let(:project) { create(:project, namespace: user.namespace, creator: user) }
+
+ it_behaves_like 'User creates wiki page'
+ it_behaves_like 'User deletes wiki page'
+ it_behaves_like 'User previews wiki changes'
+ it_behaves_like 'User updates wiki page'
+ it_behaves_like 'User uses wiki shortcuts'
+ it_behaves_like 'User views AsciiDoc page with includes'
+ it_behaves_like 'User views a wiki page'
+ it_behaves_like 'User views wiki pages'
+ it_behaves_like 'User views wiki sidebar'
+end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 970500985ae..6baeb4ce368 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -6,25 +6,35 @@ RSpec.describe 'Project' do
include ProjectForksHelper
include MobileHelpers
- describe 'creating from template' do
+ describe 'template' do
let(:user) { create(:user) }
- let(:template) { Gitlab::ProjectTemplate.find(:rails) }
before do
sign_in user
visit new_project_path
end
- it "allows creation from templates", :js do
- find('#create-from-template-tab').click
- find("label[for=#{template.name}]").click
- fill_in("project_name", with: template.name)
+ shared_examples 'creates from template' do |template, sub_template_tab = nil|
+ it "is created from template", :js do
+ find('#create-from-template-tab').click
+ find(".project-template #{sub_template_tab}").click if sub_template_tab
+ find("label[for=#{template.name}]").click
+ fill_in("project_name", with: template.name)
- page.within '#content-body' do
- click_button "Create project"
+ page.within '#content-body' do
+ click_button "Create project"
+ end
+
+ expect(page).to have_content template.name
end
+ end
- expect(page).to have_content template.name
+ context 'create with project template' do
+ it_behaves_like 'creates from template', Gitlab::ProjectTemplate.find(:rails)
+ end
+
+ context 'create with sample data template' do
+ it_behaves_like 'creates from template', Gitlab::SampleDataTemplate.find(:basic), '.sample-data-templates-tab'
end
end
@@ -99,6 +109,15 @@ RSpec.describe 'Project' do
expect(page).to have_css('.home-panel-description .is-expanded')
end
end
+
+ context 'page description' do
+ before do
+ project.update_attribute(:description, '**Lorem** _ipsum_ dolor sit [amet](https://example.com)')
+ visit path
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
end
describe 'project topics' do
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index f0707610c3f..3be01595502 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe 'Protected Branches', :js do
let(:admin) { create(:admin) }
let(:project) { create(:project, :repository) }
+ before do
+ stub_feature_flags(deploy_keys_on_protected_branches: false)
+ end
+
context 'logged in as developer' do
before do
project.add_developer(user)
@@ -27,7 +31,7 @@ RSpec.describe 'Protected Branches', :js do
fill_in 'branch-search', with: 'fix'
find('#branch-search').native.send_keys(:enter)
- expect(page).to have_css('.btn-remove.disabled')
+ expect(page).to have_css('.btn-danger.disabled')
end
end
end
@@ -163,4 +167,14 @@ RSpec.describe 'Protected Branches', :js do
include_examples "protected branches > access control > CE"
end
end
+
+ context 'when the users for protected branches feature is off' do
+ before do
+ stub_licensed_features(protected_refs_for_users: false)
+ end
+
+ include_examples 'when the deploy_keys_on_protected_branches FF is turned on' do
+ let(:all_dropdown_sections) { %w(Roles Deploy\ Keys) }
+ end
+ end
end
diff --git a/spec/features/reportable_note/snippets_spec.rb b/spec/features/reportable_note/snippets_spec.rb
index 4d61e5d8285..92bf304ac86 100644
--- a/spec/features/reportable_note/snippets_spec.rb
+++ b/spec/features/reportable_note/snippets_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe 'Reportable note on snippets', :js do
let_it_be(:project) { create(:project) }
before do
- stub_feature_flags(snippets_vue: false)
project.add_maintainer(user)
sign_in(user)
end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 0dff4c28270..6e18de3be7b 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -173,9 +173,9 @@ RSpec.describe 'Runners' do
it 'user enables shared runners' do
visit project_runners_path(project)
- click_on 'Enable shared Runners'
+ click_on 'Enable shared runners'
- expect(page.find('.shared-runners-description')).to have_content('Disable shared Runners')
+ expect(page.find('.shared-runners-description')).to have_content('Disable shared runners')
end
end
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 227e75088d2..a88043c98ac 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe 'User searches for code' do
expect(page).to have_selector('.results', text: 'application.js')
expect(page).to have_selector('.file-content .code')
expect(page).to have_selector("span.line[lang='javascript']")
+ expect(page).to have_link('application.js', href: /master\/files\/js\/application.js/)
end
context 'when on a project page', :js do
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index cfda25b9ab4..5cbfacf4e48 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -30,6 +30,8 @@ RSpec.describe 'User uses header search field', :js do
before do
find('#search')
find('body').native.send_keys('s')
+
+ wait_for_all_requests
end
it 'shows the category search dropdown' do
@@ -89,9 +91,7 @@ RSpec.describe 'User uses header search field', :js do
context 'when entering text into the search field' do
it 'does not display the category search dropdown' do
- page.within('.search-input-wrap') do
- fill_in('search', with: scope_name.first(4))
- end
+ fill_in_search(scope_name.first(4))
expect(page).not_to have_selector('.dropdown-header', text: /#{scope_name}/i)
end
@@ -105,9 +105,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
end
@@ -140,9 +138,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).to have_selector(scoped_search_link('test', group_id: group.id))
@@ -157,9 +153,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).not_to have_selector(scoped_search_link('test', group_id: project.namespace_id))
@@ -182,9 +176,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).to have_selector(scoped_search_link('test', group_id: group.id))
@@ -208,9 +200,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).to have_selector(scoped_search_link('test', group_id: subgroup.id))
diff --git a/spec/features/search/user_uses_search_filters_spec.rb b/spec/features/search/user_uses_search_filters_spec.rb
index f39a1f8fe37..080cced21c3 100644
--- a/spec/features/search/user_uses_search_filters_spec.rb
+++ b/spec/features/search/user_uses_search_filters_spec.rb
@@ -12,12 +12,12 @@ RSpec.describe 'User uses search filters', :js do
project.add_reporter(user)
group.add_owner(user)
sign_in(user)
-
- visit(search_path)
end
context 'when filtering by group' do
it 'shows group projects' do
+ visit search_path
+
find('.js-search-group-dropdown').click
wait_for_requests
@@ -36,10 +36,27 @@ RSpec.describe 'User uses search filters', :js do
expect(page).to have_link(group_project.full_name)
end
end
+
+ context 'when the group filter is set' do
+ before do
+ visit search_path(search: "test", group_id: group.id, project_id: project.id)
+ end
+
+ describe 'clear filter button' do
+ it 'removes Group and Project filters' do
+ link = find('[data-testid="group-filter"] .js-search-clear')
+ params = CGI.parse(URI.parse(link[:href]).query)
+
+ expect(params).not_to include(:group_id, :project_id)
+ end
+ end
+ end
end
context 'when filtering by project' do
it 'shows a project' do
+ visit search_path
+
page.within('.project-filter') do
find('.js-search-project-dropdown').click
@@ -50,5 +67,22 @@ RSpec.describe 'User uses search filters', :js do
expect(find('.js-search-project-dropdown')).to have_content(project.full_name)
end
+
+ context 'when the project filter is set' do
+ before do
+ visit search_path(search: "test", project_id: project.id)
+ end
+
+ let(:query) { { project_id: project.id } }
+
+ describe 'clear filter button' do
+ it 'removes Project filters' do
+ link = find('.project-filter .js-search-clear')
+ params = CGI.parse(URI.parse(link[:href]).query)
+
+ expect(params).not_to include(:project_id)
+ end
+ end
+ end
end
end
diff --git a/spec/features/sentry_js_spec.rb b/spec/features/sentry_js_spec.rb
index 1d277ba7b3c..aa0ad17340a 100644
--- a/spec/features/sentry_js_spec.rb
+++ b/spec/features/sentry_js_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Sentry' do
expect(has_requested_sentry).to eq(false)
end
- it 'loads sentry if sentry is enabled' do
+ xit 'loads sentry if sentry is enabled' do
stub_sentry_settings
visit new_user_session_path
diff --git a/spec/features/snippets/internal_snippet_spec.rb b/spec/features/snippets/internal_snippet_spec.rb
index 3ce297ab22d..2fcd11c2a47 100644
--- a/spec/features/snippets/internal_snippet_spec.rb
+++ b/spec/features/snippets/internal_snippet_spec.rb
@@ -3,11 +3,8 @@
require 'spec_helper'
RSpec.describe 'Internal Snippets', :js do
- let(:internal_snippet) { create(:personal_snippet, :internal) }
-
- before do
- stub_feature_flags(snippets_vue: false)
- end
+ let(:internal_snippet) { create(:personal_snippet, :internal, :repository) }
+ let(:content) { internal_snippet.blobs.first.data.strip! }
describe 'normal user' do
before do
@@ -17,13 +14,13 @@ RSpec.describe 'Internal Snippets', :js do
it 'sees internal snippets' do
visit snippet_path(internal_snippet)
- expect(page).to have_content(internal_snippet.content)
+ expect(page).to have_content(content)
end
it 'sees raw internal snippets' do
visit raw_snippet_path(internal_snippet)
- expect(page).to have_content(internal_snippet.content)
+ expect(page).to have_content(content)
end
end
end
diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb
index e98bb22d3ea..ce9a2d1461e 100644
--- a/spec/features/snippets/notes_on_personal_snippets_spec.rb
+++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb
@@ -18,7 +18,6 @@ RSpec.describe 'Comments on personal snippets', :js do
end
before do
- stub_feature_flags(snippets_vue: false)
sign_in user
visit snippet_path(snippet)
diff --git a/spec/features/snippets/private_snippets_spec.rb b/spec/features/snippets/private_snippets_spec.rb
index 6b45f3485e7..03745c1025a 100644
--- a/spec/features/snippets/private_snippets_spec.rb
+++ b/spec/features/snippets/private_snippets_spec.rb
@@ -4,19 +4,18 @@ require 'spec_helper'
RSpec.describe 'Private Snippets', :js do
let(:user) { create(:user) }
+ let(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
+ let(:content) { private_snippet.blobs.first.data.strip! }
before do
- stub_feature_flags(snippets_vue: false)
sign_in(user)
end
it 'Private Snippet renders for creator' do
- private_snippet = create(:personal_snippet, :private, author: user)
-
visit snippet_path(private_snippet)
wait_for_requests
- expect(page).to have_content(private_snippet.content)
+ expect(page).to have_content(content)
expect(page).not_to have_css('.js-embed-btn')
expect(page).not_to have_css('.js-share-btn')
end
diff --git a/spec/features/snippets/public_snippets_spec.rb b/spec/features/snippets/public_snippets_spec.rb
index 4b72b33245d..d2dc85a9614 100644
--- a/spec/features/snippets/public_snippets_spec.rb
+++ b/spec/features/snippets/public_snippets_spec.rb
@@ -3,27 +3,24 @@
require 'spec_helper'
RSpec.describe 'Public Snippets', :js do
- before do
- stub_feature_flags(snippets_vue: false)
- end
+ let(:public_snippet) { create(:personal_snippet, :public, :repository) }
+ let(:content) { public_snippet.blobs.first.data.strip! }
it 'Unauthenticated user should see public snippets' do
- public_snippet = create(:personal_snippet, :public)
+ url = Gitlab::UrlBuilder.build(public_snippet)
visit snippet_path(public_snippet)
wait_for_requests
- expect(page).to have_content(public_snippet.content)
- expect(page).to have_css('.js-embed-btn', visible: false)
- expect(page).to have_css('.js-share-btn', visible: false)
- expect(page.find('.js-snippet-url-area')).to be_readonly
+ expect(page).to have_content(content)
+ click_button('Embed')
+ expect(page).to have_field('Embed', readonly: true, with: "<script src=\"#{url}.js\"></script>")
+ expect(page).to have_field('Share', readonly: true, with: url)
end
it 'Unauthenticated user should see raw public snippets' do
- public_snippet = create(:personal_snippet, :public)
-
visit raw_snippet_path(public_snippet)
- expect(page).to have_content(public_snippet.content)
+ expect(page).to have_content(content)
end
end
diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb
index 981ed12d540..2103d362f94 100644
--- a/spec/features/snippets/show_spec.rb
+++ b/spec/features/snippets/show_spec.rb
@@ -6,10 +6,6 @@ RSpec.describe 'Snippet', :js do
let_it_be(:user) { create(:user) }
let_it_be(:snippet) { create(:personal_snippet, :public, :repository, author: user) }
- before do
- stub_feature_flags(snippets_vue: false)
- end
-
it_behaves_like 'show and render proper snippet blob' do
let(:anchor) { nil }
@@ -20,12 +16,8 @@ RSpec.describe 'Snippet', :js do
end
end
- it_behaves_like 'showing user status' do
- let(:file_path) { 'files/ruby/popen.rb' }
- let(:user_with_status) { snippet.author }
-
- subject { visit snippet_path(snippet) }
- end
+ # it_behaves_like 'showing user status' do
+ # This will be handled in https://gitlab.com/gitlab-org/gitlab/-/issues/262394
it_behaves_like 'does not show New Snippet button' do
let(:file_path) { 'files/ruby/popen.rb' }
diff --git a/spec/features/snippets/spam_snippets_spec.rb b/spec/features/snippets/spam_snippets_spec.rb
index 1483ba4bf8f..54a56ac962c 100644
--- a/spec/features/snippets/spam_snippets_spec.rb
+++ b/spec/features/snippets/spam_snippets_spec.rb
@@ -2,9 +2,11 @@
require 'spec_helper'
-RSpec.shared_examples_for 'snippet editor' do
+RSpec.describe 'snippet editor with spam', skip: "Will be handled in https://gitlab.com/gitlab-org/gitlab/-/issues/217722" do
include_context 'includes Spam constants'
+ let_it_be(:user) { create(:user) }
+
def description_field
find('.js-description-input').find('input,textarea')
end
@@ -119,24 +121,3 @@ RSpec.shared_examples_for 'snippet editor' do
end
end
end
-
-RSpec.describe 'User creates snippet', :js do
- let_it_be(:user) { create(:user) }
-
- context 'Vue application' do
- before do
- stub_feature_flags(snippets_edit_vue: false)
- end
-
- it_behaves_like "snippet editor"
- end
-
- context 'non-Vue application' do
- before do
- stub_feature_flags(snippets_vue: false)
- stub_feature_flags(snippets_edit_vue: false)
- end
-
- it_behaves_like "snippet editor"
- end
-end
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index eabca028b8c..1e51210c2b8 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -13,163 +13,127 @@ RSpec.describe 'User creates snippet', :js do
let(:md_description) { 'My Snippet **Description**' }
let(:description) { 'My Snippet Description' }
let(:created_snippet) { Snippet.last }
- let(:snippet_title_field) { 'personal_snippet_title' }
+ let(:snippet_title_field) { 'snippet-title' }
- def description_field
- find('.js-description-input').find('input,textarea')
+ before do
+ sign_in(user)
+
+ visit new_snippet_path
end
- shared_examples 'snippet creation' do
- def fill_form
- snippet_fill_in_form(title: title, content: file_content, description: md_description)
- end
+ def fill_form
+ snippet_fill_in_form(title: title, content: file_content, description: md_description)
+ end
- it 'Authenticated user creates a snippet' do
- fill_form
+ it 'Authenticated user creates a snippet' do
+ fill_form
- click_button('Create snippet')
- wait_for_requests
+ click_button('Create snippet')
+ wait_for_requests
- expect(page).to have_content(title)
- page.within(snippet_description_view_selector) do
- expect(page).to have_content(description)
- expect(page).to have_selector('strong')
- end
- expect(page).to have_content(file_content)
+ expect(page).to have_content(title)
+ page.within(snippet_description_view_selector) do
+ expect(page).to have_content(description)
+ expect(page).to have_selector('strong')
end
+ expect(page).to have_content(file_content)
+ end
- it 'uploads a file when dragging into textarea' do
- fill_form
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
-
- expect(snippet_description_value).to have_content('banana_sample')
-
- click_button('Create snippet')
- wait_for_requests
-
- link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
+ it 'uploads a file when dragging into textarea' do
+ fill_form
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
- reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
- expect(reqs.first.status_code).to eq(200)
- end
+ expect(snippet_description_value).to have_content('banana_sample')
- context 'when the git operation fails' do
- let(:error) { 'Error creating the snippet' }
+ click_button('Create snippet')
+ wait_for_requests
- before do
- allow_next_instance_of(Snippets::CreateService) do |instance|
- allow(instance).to receive(:create_commit).and_raise(StandardError, error)
- end
+ link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
+ expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
- fill_form
- click_button('Create snippet')
- wait_for_requests
- end
+ reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
+ expect(reqs.first.status_code).to eq(200)
+ end
- it 'renders the new page and displays the error' do
- expect(page).to have_content(error)
- expect(page).to have_content('New Snippet')
+ context 'when the git operation fails' do
+ let(:error) { 'Error creating the snippet' }
- action = find('form.snippet-form')['action']
- expect(action).to include("/snippets")
- end
- end
-
- context 'when snippets default visibility level is restricted' do
- before do
- stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE],
- default_snippet_visibility: Gitlab::VisibilityLevel::PRIVATE)
+ before do
+ allow_next_instance_of(Snippets::CreateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, error)
end
- it 'creates a snippet using the lowest available visibility level as default' do
- visit new_snippet_path
-
- fill_form
-
- click_button('Create snippet')
- wait_for_requests
-
- expect(find('.blob-content')).to have_content(file_content)
- expect(Snippet.last.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
- end
+ fill_form
+ click_button('Create snippet')
+ wait_for_requests
end
- it_behaves_like 'personal snippet with references' do
- let(:container) { snippet_description_view_selector }
- let(:md_description) { references }
+ it 'renders the new page and displays the error' do
+ expect(page).to have_content(error)
+ expect(page).to have_content('New Snippet')
- subject do
- fill_form
- click_button('Create snippet')
-
- wait_for_requests
- end
+ action = find('form.snippet-form')['action']
+ expect(action).to include("/snippets")
end
end
- context 'Vue application' do
- let(:snippet_description_field) { 'snippet-description' }
- let(:snippet_description_view_selector) { '.snippet-header .snippet-description' }
-
+ context 'when snippets default visibility level is restricted' do
before do
- sign_in(user)
-
- visit new_snippet_path
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE],
+ default_snippet_visibility: Gitlab::VisibilityLevel::PRIVATE)
end
- it_behaves_like 'snippet creation'
+ it 'creates a snippet using the lowest available visibility level as default' do
+ visit new_snippet_path
- it 'validation fails for the first time' do
- fill_in snippet_title_field, with: title
+ fill_form
- expect(page).not_to have_button('Create snippet')
+ click_button('Create snippet')
+ wait_for_requests
- snippet_fill_in_form(title: title, content: file_content)
- expect(page).to have_button('Create snippet')
+ expect(find('.blob-content')).to have_content(file_content)
+ expect(Snippet.last.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end
end
- context 'non-Vue application' do
- let(:snippet_description_field) { 'personal_snippet_description' }
- let(:snippet_description_view_selector) { '.snippet-header .description' }
+ it_behaves_like 'personal snippet with references' do
+ let(:container) { snippet_description_view_selector }
+ let(:md_description) { references }
- before do
- stub_feature_flags(snippets_vue: false)
- stub_feature_flags(snippets_edit_vue: false)
-
- sign_in(user)
+ subject do
+ fill_form
+ click_button('Create snippet')
- visit new_snippet_path
+ wait_for_requests
end
+ end
- it_behaves_like 'snippet creation'
+ it 'validation fails for the first time' do
+ fill_in snippet_title_field, with: title
- it 'validation fails for the first time' do
- fill_in snippet_title_field, with: title
- click_button('Create snippet')
+ expect(page).not_to have_button('Create snippet')
- expect(page).to have_selector('#error_explanation')
- end
+ snippet_fill_in_form(title: title, content: file_content)
+ expect(page).to have_button('Create snippet')
+ end
- it 'previews a snippet with file' do
- # Click placeholder first to expand full description field
- description_field.click
- fill_in snippet_description_field, with: 'My Snippet'
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
- find('.js-md-preview-button').click
+ it 'previews a snippet with file' do
+ # Click placeholder first to expand full description field
+ snippet_fill_in_description('My Snippet')
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
+ find('.js-md-preview-button').click
- page.within('.md-preview-holder') do
- expect(page).to have_content('My Snippet')
+ page.within('.md-preview-holder') do
+ expect(page).to have_content('My Snippet')
- link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/-/system/user/#{user.id}/\h{32}/banana_sample\.gif\z})
+ link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
+ expect(link).to match(%r{/uploads/-/system/user/#{user.id}/\h{32}/banana_sample\.gif\z})
- # Adds a cache buster for checking if the image exists as Selenium is now handling the cached requests
- # not anymore as requests when they come straight from memory cache.
- reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
- expect(reqs.first.status_code).to eq(200)
- end
+ # Adds a cache buster for checking if the image exists as Selenium is now handling the cached requests
+ # not anymore as requests when they come straight from memory cache.
+ # accept_confirm is needed because of https://gitlab.com/gitlab-org/gitlab/-/issues/262102
+ reqs = inspect_requests { accept_confirm { visit("#{link}?ran=#{SecureRandom.base64(20)}") } }
+ expect(reqs.first.status_code).to eq(200)
end
end
end
diff --git a/spec/features/snippets/user_deletes_snippet_spec.rb b/spec/features/snippets/user_deletes_snippet_spec.rb
index d7cfc67df13..e896f7eb25b 100644
--- a/spec/features/snippets/user_deletes_snippet_spec.rb
+++ b/spec/features/snippets/user_deletes_snippet_spec.rb
@@ -2,21 +2,23 @@
require 'spec_helper'
-RSpec.describe 'User deletes snippet' do
+RSpec.describe 'User deletes snippet', :js do
let(:user) { create(:user) }
let(:content) { 'puts "test"' }
- let(:snippet) { create(:personal_snippet, :public, content: content, author: user) }
+ let(:snippet) { create(:personal_snippet, :repository, :public, content: content, author: user) }
before do
sign_in(user)
- stub_feature_flags(snippets_vue: false)
-
visit snippet_path(snippet)
end
it 'deletes the snippet' do
- first(:link, 'Delete').click
+ expect(page).to have_content(snippet.title)
+
+ click_button('Delete')
+ click_button('Delete snippet')
+ wait_for_requests
expect(page).not_to have_content(snippet.title)
end
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 9a83eb58b63..a04c59b53d2 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -11,107 +11,77 @@ RSpec.describe 'User edits snippet', :js do
let_it_be(:user) { create(:user) }
let_it_be(:snippet, reload: true) { create(:personal_snippet, :repository, :public, file_name: file_name, content: content, author: user) }
- let(:snippet_title_field) { 'personal_snippet_title' }
+ before do
+ sign_in(user)
- shared_examples 'snippet editing' do
- it 'displays the snippet blob path and content' do
- blob = snippet.blobs.first
-
- aggregate_failures do
- expect(snippet_get_first_blob_path).to eq blob.path
- expect(snippet_get_first_blob_value).to have_content(blob.data.strip)
- end
- end
-
- it 'updates the snippet' do
- fill_in snippet_title_field, with: 'New Snippet Title'
-
- click_button('Save changes')
- wait_for_requests
-
- expect(page).to have_content('New Snippet Title')
- end
-
- it 'updates the snippet with files attached' do
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
- expect(snippet_description_value).to have_content('banana_sample')
+ visit edit_snippet_path(snippet)
+ wait_for_all_requests
+ end
- click_button('Save changes')
- wait_for_requests
+ it 'displays the snippet blob path and content' do
+ blob = snippet.blobs.first
- link = find('a.no-attachment-icon img:not(.lazy)[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/-/system/personal_snippet/#{snippet.id}/\h{32}/banana_sample\.gif\z})
+ aggregate_failures do
+ expect(snippet_get_first_blob_path).to eq blob.path
+ expect(snippet_get_first_blob_value).to have_content(blob.data.strip)
end
+ end
- it 'updates the snippet to make it internal' do
- choose 'Internal'
-
- click_button 'Save changes'
- wait_for_requests
+ it 'updates the snippet' do
+ snippet_fill_in_title('New Snippet Title')
+ expect(page).not_to have_selector('.gl-spinner')
- expect(page).to have_no_selector('[data-testid="lock-icon"]')
- expect(page).to have_selector('[data-testid="shield-icon"]')
- end
+ click_button('Save changes')
+ wait_for_requests
- it 'updates the snippet to make it public' do
- choose 'Public'
+ expect(page).to have_content('New Snippet Title')
+ end
- click_button 'Save changes'
- wait_for_requests
+ it 'updates the snippet with files attached' do
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
+ expect(snippet_description_value).to have_content('banana_sample')
- expect(page).to have_no_selector('[data-testid="lock-icon"]')
- expect(page).to have_selector('[data-testid="earth-icon"]')
- end
+ click_button('Save changes')
+ wait_for_requests
- context 'when the git operation fails' do
- before do
- allow_next_instance_of(Snippets::UpdateService) do |instance|
- allow(instance).to receive(:create_commit).and_raise(StandardError, 'Error Message')
- end
+ link = find('a.no-attachment-icon img:not(.lazy)[alt="banana_sample"]')['src']
+ expect(link).to match(%r{/uploads/-/system/personal_snippet/#{snippet.id}/\h{32}/banana_sample\.gif\z})
+ end
- fill_in snippet_title_field, with: 'New Snippet Title'
- fill_in snippet_blob_path_field, with: 'new_file_name', match: :first
+ it 'updates the snippet to make it internal' do
+ snippet_fill_in_visibility('Internal')
- click_button('Save changes')
- end
+ click_button 'Save changes'
+ wait_for_requests
- it 'renders edit page and displays the error' do
- expect(page.find('.flash-container')).to have_content('Error updating the snippet - Error Message')
- expect(page).to have_content('Edit Snippet')
- end
- end
+ expect(page).to have_no_selector('[data-testid="lock-icon"]')
+ expect(page).to have_selector('[data-testid="shield-icon"]')
end
- context 'Vue application' do
- it_behaves_like 'snippet editing' do
- let(:snippet_blob_path_field) { 'snippet_file_name' }
- let(:snippet_blob_content_selector) { '.file-content' }
- let(:snippet_description_field) { 'snippet-description' }
+ it 'updates the snippet to make it public' do
+ snippet_fill_in_visibility('Public')
- before do
- sign_in(user)
+ click_button 'Save changes'
+ wait_for_requests
- visit edit_snippet_path(snippet)
- wait_for_all_requests
- end
- end
+ expect(page).to have_no_selector('[data-testid="lock-icon"]')
+ expect(page).to have_selector('[data-testid="earth-icon"]')
end
- context 'non-Vue application' do
- it_behaves_like 'snippet editing' do
- let(:snippet_blob_path_field) { 'personal_snippet_file_name' }
- let(:snippet_blob_content_selector) { '.file-content' }
- let(:snippet_description_field) { 'personal_snippet_description' }
+ context 'when the git operation fails' do
+ before do
+ allow_next_instance_of(Snippets::UpdateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, 'Error Message')
+ end
- before do
- stub_feature_flags(snippets_vue: false)
- stub_feature_flags(snippets_edit_vue: false)
+ snippet_fill_in_form(title: 'New Snippet Title', file_name: 'new_file_name')
- sign_in(user)
+ click_button('Save changes')
+ end
- visit edit_snippet_path(snippet)
- wait_for_all_requests
- end
+ it 'renders edit page and displays the error' do
+ expect(page.find('.flash-container')).to have_content('Error updating the snippet - Error Message')
+ expect(page).to have_content('Edit Snippet')
end
end
end
diff --git a/spec/features/snippets_spec.rb b/spec/features/snippets_spec.rb
index 75309ca3e7c..8cdb4bc3344 100644
--- a/spec/features/snippets_spec.rb
+++ b/spec/features/snippets_spec.rb
@@ -18,11 +18,8 @@ RSpec.describe 'Snippets' do
describe 'rendering engine' do
let_it_be(:snippet) { create(:personal_snippet, :public) }
- let(:snippets_vue_feature_flag_enabled) { true }
before do
- stub_feature_flags(snippets_vue: snippets_vue_feature_flag_enabled)
-
visit snippet_path(snippet)
end
@@ -30,14 +27,5 @@ RSpec.describe 'Snippets' do
expect(page).to have_selector('#js-snippet-view')
expect(page).not_to have_selector('.personal-snippets')
end
-
- context 'when feature flag is disabled' do
- let(:snippets_vue_feature_flag_enabled) { false }
-
- it 'renders HAML application and not Vue' do
- expect(page).not_to have_selector('#js-snippet-view')
- expect(page).to have_selector('.personal-snippets')
- end
- end
end
end
diff --git a/spec/features/static_site_editor_spec.rb b/spec/features/static_site_editor_spec.rb
index b67e47b6ac4..03085917d67 100644
--- a/spec/features/static_site_editor_spec.rb
+++ b/spec/features/static_site_editor_spec.rb
@@ -6,18 +6,71 @@ RSpec.describe 'Static Site Editor' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+ let(:sse_path) { project_show_sse_path(project, 'master/README.md') }
+
+ before_all do
+ project.add_developer(user)
+ end
+
before do
- project.add_maintainer(user)
sign_in(user)
+ end
+
+ context "when no config file is present" do
+ before do
+ visit sse_path
+ end
- visit project_show_sse_path(project, 'master/README.md')
+ it 'renders SSE page with all generated config values and default config file values' do
+ node = page.find('#static-site-editor')
+
+ # assert generated config values are present
+ expect(node['data-base-url']).to eq("/#{project.full_path}/-/sse/master%2FREADME.md")
+ expect(node['data-branch']).to eq('master')
+ expect(node['data-commit-id']).to match(/\A[0-9a-f]{40}\z/)
+ expect(node['data-is-supported-content']).to eq('true')
+ expect(node['data-merge-requests-illustration-path'])
+ .to match(%r{/assets/illustrations/merge_requests-.*\.svg})
+ expect(node['data-namespace']).to eq(project.namespace.full_path)
+ expect(node['data-project']).to eq(project.path)
+ expect(node['data-project-id']).to eq(project.id.to_s)
+
+ # assert default config file values are present
+ expect(node['data-image-upload-path']).to eq('source/images')
+ expect(node['data-mounts']).to eq('[{"source":"source","target":""}]')
+ expect(node['data-static-site-generator']).to eq('middleman')
+ end
end
- it 'renders Static Site Editor page with generated and file attributes' do
- # assert generated config value is present
- expect(page).to have_css('#static-site-editor[data-branch="master"]')
+ context "when a config file is present" do
+ let(:config_file_yml) do
+ <<~YAML
+ image_upload_path: custom-image-upload-path
+ mounts:
+ - source: source1
+ target: ""
+ - source: source2
+ target: target2
+ static_site_generator: middleman
+ YAML
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:blob_data_at).and_return(config_file_yml)
+ end
+
+ visit sse_path
+ end
+
+ it 'renders Static Site Editor page values read from config file' do
+ node = page.find('#static-site-editor')
- # assert file config value is present
- expect(page).to have_css('#static-site-editor[data-static-site-generator="middleman"]')
+ # assert user-specified config file values are present
+ expected_mounts = '[{"source":"source1","target":""},{"source":"source2","target":"target2"}]'
+ expect(node['data-image-upload-path']).to eq('custom-image-upload-path')
+ expect(node['data-mounts']).to eq(expected_mounts)
+ expect(node['data-static-site-generator']).to eq('middleman')
+ end
end
end
diff --git a/spec/features/tags/developer_deletes_tag_spec.rb b/spec/features/tags/developer_deletes_tag_spec.rb
index de9296bc08e..7c4c6f54685 100644
--- a/spec/features/tags/developer_deletes_tag_spec.rb
+++ b/spec/features/tags/developer_deletes_tag_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Developer deletes tag' do
+RSpec.describe 'Developer deletes tag', :js do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, :repository, namespace: group) }
@@ -13,11 +13,12 @@ RSpec.describe 'Developer deletes tag' do
visit project_tags_path(project)
end
- context 'from the tags list page', :js do
+ context 'from the tags list page' do
it 'deletes the tag' do
expect(page).to have_content 'v1.1.0'
- delete_tag 'v1.1.0'
+ container = page.find('.content .flex-row', text: 'v1.1.0')
+ delete_tag container
expect(page).not_to have_content 'v1.1.0'
end
@@ -29,15 +30,15 @@ RSpec.describe 'Developer deletes tag' do
expect(current_path).to eq(
project_tag_path(project, 'v1.0.0'))
- click_on 'Delete tag'
+ container = page.find('.nav-controls')
+ delete_tag container
- expect(current_path).to eq(
- project_tags_path(project))
+ expect(current_path).to eq("#{project_tags_path(project)}/")
expect(page).not_to have_content 'v1.0.0'
end
end
- context 'when pre-receive hook fails', :js do
+ context 'when pre-receive hook fails' do
before do
allow_next_instance_of(Gitlab::GitalyClient::OperationService) do |instance|
allow(instance).to receive(:rm_tag)
@@ -46,15 +47,17 @@ RSpec.describe 'Developer deletes tag' do
end
it 'shows the error message' do
- delete_tag 'v1.1.0'
+ container = page.find('.content .flex-row', text: 'v1.1.0')
+ delete_tag container
expect(page).to have_content('Do not delete tags')
end
end
- def delete_tag(tag)
- page.within('.content') do
- accept_confirm { find("li > .row-fixed-content.controls a.btn-remove[href='/#{project.full_path}/-/tags/#{tag}']").click }
- end
+ def delete_tag(container)
+ container.find('.js-remove-tag').click
+
+ page.within('.modal') { click_button('Delete tag') }
+ wait_for_requests
end
end
diff --git a/spec/features/tags/developer_views_tags_spec.rb b/spec/features/tags/developer_views_tags_spec.rb
index 4888611472c..6bae53afe6f 100644
--- a/spec/features/tags/developer_views_tags_spec.rb
+++ b/spec/features/tags/developer_views_tags_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Developer views tags' do
+ include RepoHelpers
+
let(:user) { create(:user) }
let(:group) { create(:group) }
@@ -15,10 +17,13 @@ RSpec.describe 'Developer views tags' do
let(:project) { create(:project_empty_repo, namespace: group) }
before do
- visit project_path(project)
- click_on 'Add README'
- fill_in :commit_message, with: 'Add a README file', visible: true
- click_button 'Commit changes'
+ project.repository.create_file(
+ user,
+ 'README.md',
+ 'Example readme',
+ message: 'Add README',
+ branch_name: 'master')
+
visit project_tags_path(project)
end
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index a9cfe794177..0f8daaf8e15 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Task Lists' do
+RSpec.describe 'Task Lists', :js do
include Warden::Test::Helpers
let_it_be(:project) { create(:project, :public, :repository) }
@@ -38,41 +38,7 @@ RSpec.describe 'Task Lists' do
MARKDOWN
end
- let(:nested_tasks_markdown) do
- <<-EOT.strip_heredoc
- - [ ] Task a
- - [x] Task a.1
- - [ ] Task a.2
- - [ ] Task b
-
- 1. [ ] Task 1
- 1. [ ] Task 1.1
- 1. [x] Task 1.2
- EOT
- end
-
- let(:commented_tasks_markdown) do
- <<-EOT.strip_heredoc
- <!--
- - [ ] a
- -->
-
- - [ ] b
- EOT
- end
-
- let(:summary_no_blank_line_markdown) do
- <<-EOT.strip_heredoc
- <details>
- <summary>No blank line after summary element breaks task list</summary>
- 1. [ ] People Ops: do such and such
- </details>
-
- * [ ] Task 1
- EOT
- end
-
- before(:all) do
+ before_all do
project.add_maintainer(user)
project.add_guest(user2)
end
@@ -86,7 +52,7 @@ RSpec.describe 'Task Lists' do
end
describe 'for Issues' do
- describe 'multiple tasks', :js do
+ describe 'multiple tasks' do
let!(:issue) { create(:issue, description: markdown, author: user, project: project) }
it 'renders' do
@@ -127,7 +93,7 @@ RSpec.describe 'Task Lists' do
end
end
- describe 'single incomplete task', :js do
+ describe 'single incomplete task' do
let!(:issue) { create(:issue, description: singleIncompleteMarkdown, author: user, project: project) }
it 'renders' do
@@ -146,7 +112,7 @@ RSpec.describe 'Task Lists' do
end
end
- describe 'single complete task', :js do
+ describe 'single complete task' do
let!(:issue) { create(:issue, description: singleCompleteMarkdown, author: user, project: project) }
it 'renders' do
@@ -175,7 +141,7 @@ RSpec.describe 'Task Lists' do
project: project, author: user)
end
- it 'renders for note body', :js do
+ it 'renders for note body' do
visit_issue(project, issue)
expect(page).to have_selector('.note ul.task-list', count: 1)
@@ -183,14 +149,14 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('.note ul input[checked]', count: 2)
end
- it 'contains the required selectors', :js do
+ it 'contains the required selectors' do
visit_issue(project, issue)
expect(page).to have_selector('.note .js-task-list-container')
expect(page).to have_selector('.note .js-task-list-container .task-list .task-list-item .task-list-item-checkbox')
end
- it 'is only editable by author', :js do
+ it 'is only editable by author' do
visit_issue(project, issue)
expect(page).to have_selector('.js-task-list-container')
@@ -209,7 +175,7 @@ RSpec.describe 'Task Lists' do
project: project, author: user)
end
- it 'renders for note body', :js do
+ it 'renders for note body' do
visit_issue(project, issue)
expect(page).to have_selector('.note ul.task-list', count: 1)
@@ -224,7 +190,7 @@ RSpec.describe 'Task Lists' do
project: project, author: user)
end
- it 'renders for note body', :js do
+ it 'renders for note body' do
visit_issue(project, issue)
expect(page).to have_selector('.note ul.task-list', count: 1)
@@ -240,7 +206,7 @@ RSpec.describe 'Task Lists' do
end
shared_examples 'multiple tasks' do
- it 'renders for description', :js do
+ it 'renders for description' do
visit_merge_request(project, merge)
wait_for_requests
@@ -249,7 +215,7 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('ul input[checked]', count: 2)
end
- it 'contains the required selectors', :js do
+ it 'contains the required selectors' do
visit_merge_request(project, merge)
wait_for_requests
@@ -261,7 +227,7 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('form.js-issuable-update')
end
- it 'is only editable by author', :js do
+ it 'is only editable by author' do
visit_merge_request(project, merge)
wait_for_requests
@@ -300,7 +266,7 @@ RSpec.describe 'Task Lists' do
describe 'single incomplete task' do
let!(:merge) { create(:merge_request, :simple, description: singleIncompleteMarkdown, author: user, source_project: project) }
- it 'renders for description', :js do
+ it 'renders for description' do
visit_merge_request(project, merge)
wait_for_requests
@@ -319,7 +285,7 @@ RSpec.describe 'Task Lists' do
describe 'single complete task' do
let!(:merge) { create(:merge_request, :simple, description: singleCompleteMarkdown, author: user, source_project: project) }
- it 'renders for description', :js do
+ it 'renders for description' do
visit_merge_request(project, merge)
wait_for_requests
@@ -337,7 +303,17 @@ RSpec.describe 'Task Lists' do
end
describe 'markdown task edge cases' do
- describe 'commented tasks', :js do
+ describe 'commented tasks' do
+ let(:commented_tasks_markdown) do
+ <<-EOT.strip_heredoc
+ <!--
+ - [ ] a
+ -->
+
+ - [ ] b
+ EOT
+ end
+
let!(:issue) { create(:issue, description: commented_tasks_markdown, author: user, project: project) }
it 'renders' do
@@ -360,7 +336,18 @@ RSpec.describe 'Task Lists' do
end
end
- describe 'summary with no blank line', :js do
+ describe 'summary with no blank line' do
+ let(:summary_no_blank_line_markdown) do
+ <<-EOT.strip_heredoc
+ <details>
+ <summary>No blank line after summary element breaks task list</summary>
+ 1. [ ] People Ops: do such and such
+ </details>
+
+ * [ ] Task 1
+ EOT
+ end
+
let!(:issue) { create(:issue, description: summary_no_blank_line_markdown, author: user, project: project) }
it 'renders' do
@@ -382,5 +369,31 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('ul input[checked]', count: 1)
end
end
+
+ describe 'markdown starting with new line character' do
+ let(:markdown_starting_with_new_line) do
+ <<-EOT.strip_heredoc
+
+ - [ ] Task 1
+ EOT
+ end
+
+ let(:merge_request) { create(:merge_request, description: markdown_starting_with_new_line, author: user, source_project: project) }
+
+ it 'allows the task to be checked' do
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ expect(page).to have_selector('ul input[checked]', count: 0)
+
+ find('.task-list-item-checkbox').click
+ wait_for_requests
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ expect(page).to have_selector('ul input[checked]', count: 1)
+ end
+ end
end
end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 4be27673adf..6fa805d8c74 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -19,114 +19,132 @@ RSpec.describe 'Triggers', :js do
visit project_settings_ci_cd_path(@project)
end
- describe 'create trigger workflow' do
- it 'prevents adding new trigger with no description' do
- fill_in 'trigger_description', with: ''
- click_button 'Add trigger'
-
- # See if input has error due to empty value
- expect(page.find('form.gl-show-field-errors .gl-field-error')).to be_visible
- end
+ shared_examples 'triggers page' do
+ describe 'create trigger workflow' do
+ it 'prevents adding new trigger with no description' do
+ fill_in 'trigger_description', with: ''
+ click_button 'Add trigger'
+
+ # See if input has error due to empty value
+ expect(page.find('form.gl-show-field-errors .gl-field-error')).to be_visible
+ end
- it 'adds new trigger with description' do
- fill_in 'trigger_description', with: 'trigger desc'
- click_button 'Add trigger'
+ it 'adds new trigger with description' do
+ fill_in 'trigger_description', with: 'trigger desc'
+ click_button 'Add trigger'
- # See if "trigger creation successful" message displayed and description and owner are correct
- expect(page.find('.flash-notice')).to have_content 'Trigger was created successfully.'
- expect(page.find('.triggers-list')).to have_content 'trigger desc'
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ aggregate_failures 'display creation notice and trigger is created' do
+ expect(page.find('.flash-notice')).to have_content 'Trigger was created successfully.'
+ expect(page.find('.triggers-list')).to have_content 'trigger desc'
+ expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ end
+ end
end
- end
-
- describe 'edit trigger workflow' do
- let(:new_trigger_title) { 'new trigger' }
- it 'click on edit trigger opens edit trigger page' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ describe 'edit trigger workflow' do
+ let(:new_trigger_title) { 'new trigger' }
- # See if edit page has correct descrption
- find('a[title="Edit"]').send_keys(:return)
- expect(page.find('#trigger_description').value).to have_content 'trigger desc'
- end
+ it 'click on edit trigger opens edit trigger page' do
+ create(:ci_trigger, owner: user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
- it 'edit trigger and save' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ # See if edit page has correct descrption
+ find('a[title="Edit"]').send_keys(:return)
+ expect(page.find('#trigger_description').value).to have_content 'trigger desc'
+ end
- # See if edit page opens, then fill in new description and save
- find('a[title="Edit"]').send_keys(:return)
- fill_in 'trigger_description', with: new_trigger_title
- click_button 'Save trigger'
+ it 'edit trigger and save' do
+ create(:ci_trigger, owner: user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
- # See if "trigger updated successfully" message displayed and description and owner are correct
- expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
- expect(page.find('.triggers-list')).to have_content new_trigger_title
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
- end
- end
+ # See if edit page opens, then fill in new description and save
+ find('a[title="Edit"]').send_keys(:return)
+ fill_in 'trigger_description', with: new_trigger_title
+ click_button 'Save trigger'
- describe 'trigger "Revoke" workflow' do
- before do
- create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ aggregate_failures 'display update notice and trigger is updated' do
+ expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
+ expect(page.find('.triggers-list')).to have_content new_trigger_title
+ expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ end
+ end
end
- it 'button "Revoke" has correct alert' do
- expected_alert = 'By revoking a trigger you will break any processes making use of it. Are you sure?'
- expect(page.find('a.btn-trigger-revoke')['data-confirm']).to eq expected_alert
- end
+ describe 'trigger "Revoke" workflow' do
+ before do
+ create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
+ end
- it 'revoke trigger' do
- # See if "Revoke" on trigger works post trigger creation
- page.accept_confirm do
- find('a.btn-trigger-revoke').send_keys(:return)
+ it 'button "Revoke" has correct alert' do
+ expected_alert = 'By revoking a trigger you will break any processes making use of it. Are you sure?'
+ expect(page.find('[data-testid="trigger_revoke_button"]')['data-confirm']).to eq expected_alert
end
- expect(page.find('.flash-notice')).to have_content 'Trigger removed'
- expect(page).to have_selector('p.settings-message.text-center.gl-mb-3')
- end
- end
+ it 'revoke trigger' do
+ # See if "Revoke" on trigger works post trigger creation
+ page.accept_confirm do
+ find('[data-testid="trigger_revoke_button"]').send_keys(:return)
+ end
- describe 'show triggers workflow' do
- it 'contains trigger description placeholder' do
- expect(page.find('#trigger_description')['placeholder']).to eq 'Trigger description'
+ aggregate_failures 'trigger is removed' do
+ expect(page.find('.flash-notice')).to have_content 'Trigger removed'
+ expect(page).to have_css('[data-testid="no_triggers_content"]')
+ end
+ end
end
- it 'show "invalid" badge for trigger with owner having insufficient permissions' do
- create(:ci_trigger, owner: guest_user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ describe 'show triggers workflow' do
+ it 'contains trigger description placeholder' do
+ expect(page.find('#trigger_description')['placeholder']).to eq 'Trigger description'
+ end
- expect(page.find('.triggers-list')).to have_content 'invalid'
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
- end
+ it 'show "invalid" badge for trigger with owner having insufficient permissions' do
+ create(:ci_trigger, owner: guest_user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
+
+ aggregate_failures 'has invalid badge and no edit link' do
+ expect(page.find('.triggers-list')).to have_content 'invalid'
+ expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
+ end
+ end
- it 'do not show "Edit" or full token for not owned trigger' do
- # Create trigger with user different from current_user
- create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ it 'do not show "Edit" or full token for not owned trigger' do
+ # Create trigger with user different from current_user
+ create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
+
+ aggregate_failures 'shows truncated token, no clipboard button and no edit link' do
+ expect(page.find('.triggers-list')).to have_content(@project.triggers.first.token[0..3])
+ expect(page.find('.triggers-list')).not_to have_selector('[data-testid="clipboard-btn"]')
+ expect(page.find('.triggers-list .trigger-owner')).not_to have_content user.name
+ expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
+ end
+ end
- # See if trigger not owned by current_user shows only first few token chars and doesn't have copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content(@project.triggers.first.token[0..3])
- expect(page.find('.triggers-list')).not_to have_selector('button.btn-clipboard')
+ it 'show "Edit" and full token for owned trigger' do
+ create(:ci_trigger, owner: user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
- # See if trigger owner name doesn't match with current_user and trigger is non-editable
- expect(page.find('.triggers-list .trigger-owner')).not_to have_content user.name
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
+ aggregate_failures 'shows full token, clipboard button and edit link' do
+ expect(page.find('.triggers-list')).to have_content @project.triggers.first.token
+ expect(page.find('.triggers-list')).to have_selector('[data-testid="clipboard-btn"]')
+ expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
+ end
+ end
end
+ end
- it 'show "Edit" and full token for owned trigger' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger shows full token and has copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content @project.triggers.first.token
- expect(page.find('.triggers-list')).to have_selector('button.btn-clipboard')
+ context 'when ci_pipeline_triggers_settings_vue_ui is enabled' do
+ it_behaves_like 'triggers page'
+ end
- # See if trigger owner name matches with current_user and is editable
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
- expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
+ context 'when ci_pipeline_triggers_settings_vue_ui is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_triggers_settings_vue_ui: false)
end
+
+ it_behaves_like 'triggers page'
end
end
diff --git a/spec/features/users/overview_spec.rb b/spec/features/users/overview_spec.rb
index 549087e5950..67216b04504 100644
--- a/spec/features/users/overview_spec.rb
+++ b/spec/features/users/overview_spec.rb
@@ -21,15 +21,15 @@ RSpec.describe 'Overview tab on a user profile', :js do
sign_in user
end
- describe 'activities section' do
- shared_context 'visit overview tab' do
- before do
- visit user.username
- page.find('.js-overview-tab a').click
- wait_for_requests
- end
+ shared_context 'visit overview tab' do
+ before do
+ visit user.username
+ page.find('.js-overview-tab a').click
+ wait_for_requests
end
+ end
+ describe 'activities section' do
describe 'user has no activities' do
include_context 'visit overview tab'
@@ -84,14 +84,6 @@ RSpec.describe 'Overview tab on a user profile', :js do
end
describe 'projects section' do
- shared_context 'visit overview tab' do
- before do
- visit user.username
- page.find('.js-overview-tab a').click
- wait_for_requests
- end
- end
-
describe 'user has no personal projects' do
include_context 'visit overview tab'
@@ -158,4 +150,52 @@ RSpec.describe 'Overview tab on a user profile', :js do
end
end
end
+
+ describe 'bot user' do
+ let(:bot_user) { create(:user, user_type: :security_bot) }
+
+ shared_context "visit bot's overview tab" do
+ before do
+ visit bot_user.username
+ page.find('.js-overview-tab a').click
+ wait_for_requests
+ end
+ end
+
+ describe 'feature flag enabled' do
+ before do
+ stub_feature_flags(security_auto_fix: true)
+ end
+
+ include_context "visit bot's overview tab"
+
+ it "activity panel's title is 'Bot activity'" do
+ page.within('.activities-block') do
+ expect(page).to have_text('Bot activity')
+ end
+ end
+
+ it 'does not show projects panel' do
+ expect(page).not_to have_selector('.projects-block')
+ end
+ end
+
+ describe 'feature flag disabled' do
+ before do
+ stub_feature_flags(security_auto_fix: false)
+ end
+
+ include_context "visit bot's overview tab"
+
+ it "activity panel's title is not 'Bot activity'" do
+ page.within('.activities-block') do
+ expect(page).not_to have_text('Bot activity')
+ end
+ end
+
+ it 'shows projects panel' do
+ expect(page).to have_selector('.projects-block')
+ end
+ end
+ end
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index dd5c2442d00..466b7361da9 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'User page' do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
+ let(:user) { create(:user, bio: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
context 'with public profile' do
it 'shows all the tabs' do
@@ -174,4 +174,54 @@ RSpec.describe 'User page' do
end
end
end
+
+ context 'page description' do
+ before do
+ visit(user_path(user))
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
+
+ context 'with a bot user' do
+ let(:user) { create(:user, user_type: :security_bot) }
+
+ describe 'feature flag enabled' do
+ before do
+ stub_feature_flags(security_auto_fix: true)
+ end
+
+ it 'only shows Overview and Activity tabs' do
+ visit(user_path(user))
+
+ page.within '.nav-links' do
+ expect(page).to have_link('Overview')
+ expect(page).to have_link('Activity')
+ expect(page).not_to have_link('Groups')
+ expect(page).not_to have_link('Contributed projects')
+ expect(page).not_to have_link('Personal projects')
+ expect(page).not_to have_link('Snippets')
+ end
+ end
+ end
+
+ describe 'feature flag disabled' do
+ before do
+ stub_feature_flags(security_auto_fix: false)
+ end
+
+ it 'only shows Overview and Activity tabs' do
+ visit(user_path(user))
+
+ page.within '.nav-links' do
+ expect(page).to have_link('Overview')
+ expect(page).to have_link('Activity')
+ expect(page).to have_link('Groups')
+ expect(page).to have_link('Contributed projects')
+ expect(page).to have_link('Personal projects')
+ expect(page).to have_link('Snippets')
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 5fd0e677cd0..c59121626f0 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -7,6 +7,14 @@ RSpec.shared_examples 'Signup' do
let(:new_user) { build_stubbed(:user) }
+ def fill_in_signup_form
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ fill_in 'new_user_password', with: new_user.password
+ end
+
describe 'username validation', :js do
before do
visit new_user_registration_path
@@ -144,20 +152,9 @@ RSpec.shared_examples 'Signup' do
it 'creates the user account and sends a confirmation email' do
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
expect { click_button 'Register' }.to change { User.count }.by(1)
-
expect(current_path).to eq users_almost_there_path
expect(page).to have_content('Please check your email to confirm your account')
end
@@ -171,46 +168,14 @@ RSpec.shared_examples 'Signup' do
it 'creates the user account and sends a confirmation email' do
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
expect { click_button 'Register' }.to change { User.count }.by(1)
-
expect(current_path).to eq users_sign_up_welcome_path
end
end
end
- context "when sigining up with different cased emails" do
- it "creates the user successfully" do
- visit new_user_registration_path
-
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
- click_button "Register"
-
- expect(current_path).to eq users_sign_up_welcome_path
- end
- end
-
context "when not sending confirmation email" do
before do
stub_application_setting(send_user_confirmation_email: false)
@@ -219,17 +184,7 @@ RSpec.shared_examples 'Signup' do
it 'creates the user account and goes to dashboard' do
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
click_button "Register"
expect(current_path).to eq users_sign_up_welcome_path
@@ -239,20 +194,10 @@ RSpec.shared_examples 'Signup' do
context 'with errors' do
it "displays the errors" do
- existing_user = create(:user)
-
+ create(:user, email: new_user.email)
visit new_user_registration_path
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: existing_user.email
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
click_button "Register"
expect(current_path).to eq user_registration_path
@@ -261,20 +206,10 @@ RSpec.shared_examples 'Signup' do
end
it 'does not redisplay the password' do
- existing_user = create(:user)
-
+ create(:user, email: new_user.email)
visit new_user_registration_path
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: existing_user.email
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
click_button "Register"
expect(current_path).to eq user_registration_path
@@ -287,45 +222,14 @@ RSpec.shared_examples 'Signup' do
enforce_terms
end
- it 'requires the user to check the checkbox' do
+ it 'renders text that the user confirms terms by clicking register' do
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ expect(page).to have_content(/By clicking Register, I agree that I have read and accepted the Terms of Use and Privacy Policy/)
+ fill_in_signup_form
click_button 'Register'
- expect(current_path).to eq new_user_session_path
- expect(page).to have_content(/you must accept our terms of service/i)
- end
-
- it 'asks the user to accept terms before going to the dashboard' do
- visit new_user_registration_path
-
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
- check :terms_opt_in
-
- click_button "Register"
-
expect(current_path).to eq users_sign_up_welcome_path
end
end
@@ -353,17 +257,7 @@ RSpec.shared_examples 'Signup' do
it 'prevents from signing up' do
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
expect { click_button 'Register' }.not_to change { User.count }
expect(page).to have_content('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.')
@@ -374,17 +268,7 @@ RSpec.shared_examples 'Signup' do
it 'prevents from signing up' do
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
expect { click_button 'Register' }.not_to change { User.count }
expect(page).to have_content('That was a bit too quick! Please resubmit.')
@@ -393,36 +277,27 @@ RSpec.shared_examples 'Signup' do
end
it 'redirects to step 2 of the signup process, sets the role and redirects back' do
- new_user = build_stubbed(:user)
visit new_user_registration_path
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
-
- if Gitlab::Experimentation.enabled?(:signup_flow)
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- else
- fill_in 'new_user_name', with: new_user.name
- end
-
- fill_in 'new_user_password', with: new_user.password
+ fill_in_signup_form
click_button 'Register'
+
visit new_project_path
expect(page).to have_current_path(users_sign_up_welcome_path)
select 'Software Developer', from: 'user_role'
click_button 'Get started!'
- new_user = User.find_by_username(new_user.username)
- expect(new_user.software_developer_role?).to be_truthy
- expect(new_user.setup_for_company).to be_nil
+ created_user = User.find_by_username(new_user.username)
+
+ expect(created_user.software_developer_role?).to be_truthy
+ expect(created_user.setup_for_company).to be_nil
expect(page).to have_current_path(new_project_path)
end
end
-RSpec.shared_examples 'Signup name validation' do |field, max_length|
+RSpec.shared_examples 'Signup name validation' do |field, max_length, label|
before do
visit new_user_registration_path
end
@@ -446,10 +321,10 @@ RSpec.shared_examples 'Signup name validation' do |field, max_length|
expect(find('.name')).to have_css '.gl-field-error-outline'
end
- it "shows an error message if the user\'s fullname is longer than #{max_length} characters" do
+ it "shows an error message if the user\'s #{label} is longer than #{max_length} characters" do
fill_in field, with: 'n' * (max_length + 1)
- expect(page).to have_content("Name is too long (maximum is #{max_length} characters).")
+ expect(page).to have_content("#{label} is too long (maximum is #{max_length} characters).")
end
it 'shows an error message if the username contains emojis' do
@@ -467,7 +342,8 @@ RSpec.describe 'With original flow' do
end
it_behaves_like 'Signup'
- it_behaves_like 'Signup name validation', 'new_user_name', 255
+ it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
+ it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
end
RSpec.describe 'With experimental flow' do
@@ -477,30 +353,6 @@ RSpec.describe 'With experimental flow' do
end
it_behaves_like 'Signup'
- it_behaves_like 'Signup name validation', 'new_user_first_name', 127
- it_behaves_like 'Signup name validation', 'new_user_last_name', 127
-
- context 'when terms_opt_in experimental is enabled' do
- include TermsHelper
-
- before do
- enforce_terms
- stub_experiment(signup_flow: true, terms_opt_in: true)
- stub_experiment_for_user(signup_flow: true, terms_opt_in: true)
- end
-
- it 'terms are checked by default' do
- new_user = build_stubbed(:user)
-
- visit new_user_registration_path
- fill_in 'new_user_first_name', with: new_user.first_name
- fill_in 'new_user_last_name', with: new_user.last_name
- fill_in 'new_user_username', with: new_user.username
- fill_in 'new_user_email', with: new_user.email
- fill_in 'new_user_password', with: new_user.password
- click_button 'Register'
-
- expect(current_path).to eq users_sign_up_welcome_path
- end
- end
+ it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
+ it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
end
diff --git a/spec/features/users/terms_spec.rb b/spec/features/users/terms_spec.rb
index 5275845fe5b..7500f2fe59a 100644
--- a/spec/features/users/terms_spec.rb
+++ b/spec/features/users/terms_spec.rb
@@ -26,6 +26,21 @@ RSpec.describe 'Users > Terms' do
expect(page).not_to have_content('Continue')
end
+ context 'when user is a project bot' do
+ let(:project_bot) { create(:user, :project_bot) }
+
+ before do
+ enforce_terms
+ end
+
+ it 'auto accepts the terms' do
+ visit terms_path
+
+ expect(page).not_to have_content('Accept terms')
+ expect(project_bot.terms_accepted?).to be(true)
+ end
+ end
+
context 'when signed in' do
let(:user) { create(:user) }
diff --git a/spec/finders/alert_management/alerts_finder_spec.rb b/spec/finders/alert_management/alerts_finder_spec.rb
index 926446b31d5..e74f3ac68ed 100644
--- a/spec/finders/alert_management/alerts_finder_spec.rb
+++ b/spec/finders/alert_management/alerts_finder_spec.rb
@@ -39,19 +39,19 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
end
context 'status given' do
- let(:params) { { status: AlertManagement::Alert::STATUSES[:resolved] } }
+ let(:params) { { status: :resolved } }
it { is_expected.to match_array(resolved_alert) }
context 'with an array of statuses' do
let(:triggered_alert) { create(:alert_management_alert) }
- let(:params) { { status: [AlertManagement::Alert::STATUSES[:resolved]] } }
+ let(:params) { { status: [:resolved] } }
it { is_expected.to match_array(resolved_alert) }
end
context 'with no alerts of status' do
- let(:params) { { status: AlertManagement::Alert::STATUSES[:acknowledged] } }
+ let(:params) { { status: :acknowledged } }
it { is_expected.to be_empty }
end
@@ -169,12 +169,6 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
end
context 'when sorting by status' do
- let(:statuses) { AlertManagement::Alert::STATUSES }
- let(:triggered) { statuses[:triggered] }
- let(:acknowledged) { statuses[:acknowledged] }
- let(:resolved) { statuses[:resolved] }
- let(:ignored) { statuses[:ignored] }
-
let_it_be(:alert_triggered) { create(:alert_management_alert, project: project) }
let_it_be(:alert_acknowledged) { create(:alert_management_alert, :acknowledged, project: project) }
let_it_be(:alert_resolved) { create(:alert_management_alert, :resolved, project: project) }
@@ -184,7 +178,7 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
let(:params) { { sort: 'status_asc' } }
it 'sorts by status: Ignored > Resolved > Acknowledged > Triggered' do
- expect(execute.map(&:status).uniq).to eq([ignored, resolved, acknowledged, triggered])
+ expect(execute.map(&:status_name).uniq).to eq([:ignored, :resolved, :acknowledged, :triggered])
end
end
@@ -192,64 +186,83 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
let(:params) { { sort: 'status_desc' } }
it 'sorts by status: Triggered > Acknowledged > Resolved > Ignored' do
- expect(execute.map(&:status).uniq).to eq([triggered, acknowledged, resolved, ignored])
+ expect(execute.map(&:status_name).uniq).to eq([:triggered, :acknowledged, :resolved, :ignored])
end
end
end
end
- end
- context 'search query given' do
- let_it_be(:alert) do
- create(:alert_management_alert,
- :with_fingerprint,
- title: 'Title',
- description: 'Desc',
- service: 'Service',
- monitoring_tool: 'Monitor'
- )
- end
+ context 'search query given' do
+ let_it_be(:alert) do
+ create(:alert_management_alert,
+ :with_fingerprint,
+ project: project,
+ title: 'Title',
+ description: 'Desc',
+ service: 'Service',
+ monitoring_tool: 'Monitor'
+ )
+ end
- before do
- alert.project.add_developer(current_user)
- end
+ context 'searching title' do
+ let(:params) { { search: alert.title } }
- subject { described_class.new(current_user, alert.project, params).execute }
+ it { is_expected.to match_array([alert]) }
+ end
- context 'searching title' do
- let(:params) { { search: alert.title } }
+ context 'searching description' do
+ let(:params) { { search: alert.description } }
- it { is_expected.to match_array([alert]) }
- end
+ it { is_expected.to match_array([alert]) }
+ end
- context 'searching description' do
- let(:params) { { search: alert.description } }
+ context 'searching service' do
+ let(:params) { { search: alert.service } }
- it { is_expected.to match_array([alert]) }
- end
+ it { is_expected.to match_array([alert]) }
+ end
- context 'searching service' do
- let(:params) { { search: alert.service } }
+ context 'searching monitoring tool' do
+ let(:params) { { search: alert.monitoring_tool } }
- it { is_expected.to match_array([alert]) }
- end
+ it { is_expected.to match_array([alert]) }
+ end
- context 'searching monitoring tool' do
- let(:params) { { search: alert.monitoring_tool } }
+ context 'searching something else' do
+ let(:params) { { search: alert.fingerprint } }
- it { is_expected.to match_array([alert]) }
- end
+ it { is_expected.to be_empty }
+ end
- context 'searching something else' do
- let(:params) { { search: alert.fingerprint } }
+ context 'empty search' do
+ let(:params) { { search: ' ' } }
- it { is_expected.to be_empty }
+ it { is_expected.not_to include(alert) }
+ end
end
- context 'empty search' do
- let(:params) { { search: ' ' } }
+ context 'assignee username given' do
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, assignees: [assignee]) }
+ let(:params) { { assignee_username: username } }
+
+ context 'with valid assignee_username' do
+ let(:username) { assignee.username }
+
+ it { is_expected.to match_array([alert]) }
+ end
- it { is_expected.to match_array([alert]) }
+ context 'with invalid assignee_username' do
+ let(:username) { 'unknown username' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with empty assignee_username' do
+ let(:username) { ' ' }
+
+ it { is_expected.not_to include(alert) }
+ end
end
end
end
@@ -261,12 +274,12 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
project.add_developer(current_user)
end
- it { is_expected.to match({ 2 => 1, 3 => 1 }) } # one resolved and one ignored
+ it { is_expected.to match(resolved: 1, ignored: 1) }
context 'when filtering params are included' do
- let(:params) { { status: AlertManagement::Alert::STATUSES[:resolved] } }
+ let(:params) { { status: :resolved } }
- it { is_expected.to match({ 2 => 1 }) } # one resolved
+ it { is_expected.to match(resolved: 1) }
end
end
end
diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
index 196fde5efe0..65f6dc0ba74 100644
--- a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
+++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
end
context 'with unsaved merge request' do
- let(:merge_request) { build(:merge_request) }
+ let(:merge_request) { build(:merge_request, source_project: create(:project, :repository)) }
let!(:pipeline) do
create(:ci_empty_pipeline, project: project,
diff --git a/spec/finders/environment_names_finder_spec.rb b/spec/finders/environment_names_finder_spec.rb
new file mode 100644
index 00000000000..9244e4fb369
--- /dev/null
+++ b/spec/finders/environment_names_finder_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe EnvironmentNamesFinder do
+ describe '#execute' do
+ let!(:group) { create(:group) }
+ let!(:project1) { create(:project, :public, namespace: group) }
+ let!(:project2) { create(:project, :private, namespace: group) }
+ let!(:user) { create(:user) }
+
+ before do
+ create(:environment, name: 'gstg', project: project1)
+ create(:environment, name: 'gprd', project: project1)
+ create(:environment, name: 'gprd', project: project2)
+ create(:environment, name: 'gcny', project: project2)
+ end
+
+ context 'using a group and a group member' do
+ it 'returns environment names for all projects' do
+ group.add_developer(user)
+
+ names = described_class.new(group, user).execute
+
+ expect(names).to eq(%w[gcny gprd gstg])
+ end
+ end
+
+ context 'using a group and a guest' do
+ it 'returns environment names for all public projects' do
+ names = described_class.new(group, user).execute
+
+ expect(names).to eq(%w[gprd gstg])
+ end
+ end
+
+ context 'using a public project and a project member' do
+ it 'returns all the unique environment names' do
+ project1.team.add_developer(user)
+
+ names = described_class.new(project1, user).execute
+
+ expect(names).to eq(%w[gprd gstg])
+ end
+ end
+
+ context 'using a public project and a guest' do
+ it 'returns all the unique environment names' do
+ names = described_class.new(project1, user).execute
+
+ expect(names).to eq(%w[gprd gstg])
+ end
+ end
+
+ context 'using a private project and a guest' do
+ it 'returns all the unique environment names' do
+ names = described_class.new(project2, user).execute
+
+ expect(names).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/finders/group_labels_finder_spec.rb b/spec/finders/group_labels_finder_spec.rb
deleted file mode 100644
index d65a8fb4fed..00000000000
--- a/spec/finders/group_labels_finder_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GroupLabelsFinder, '#execute' do
- let!(:group) { create(:group) }
- let!(:user) { create(:user) }
- let!(:label1) { create(:group_label, title: 'Foo', description: 'Lorem ipsum', group: group) }
- let!(:label2) { create(:group_label, title: 'Bar', description: 'Fusce consequat', group: group) }
-
- it 'returns all group labels sorted by name if no params' do
- result = described_class.new(user, group).execute
-
- expect(result.to_a).to match_array([label2, label1])
- end
-
- it 'returns all group labels sorted by name desc' do
- result = described_class.new(user, group, sort: 'name_desc').execute
-
- expect(result.to_a).to match_array([label2, label1])
- end
-
- it 'returns group labels that match search' do
- result = described_class.new(user, group, search: 'Foo').execute
-
- expect(result.to_a).to match_array([label1])
- end
-
- it 'returns group labels user subscribed to' do
- label2.subscribe(user)
-
- result = described_class.new(user, group, subscribed: 'true').execute
-
- expect(result.to_a).to match_array([label2])
- end
-
- it 'returns second page of labels' do
- result = described_class.new(user, group, page: '2').execute
-
- expect(result.to_a).to match_array([])
- end
-end
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index 48e4c5dadc9..c9e9328794e 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -161,5 +161,61 @@ RSpec.describe GroupsFinder do
end
end
end
+
+ context 'with include parent group descendants' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:public_subgroup) { create(:group, :public, parent: parent_group) }
+ let_it_be(:internal_sub_subgroup) { create(:group, :internal, parent: public_subgroup) }
+ let_it_be(:private_sub_subgroup) { create(:group, :private, parent: public_subgroup) }
+ let_it_be(:public_sub_subgroup) { create(:group, :public, parent: public_subgroup) }
+ let(:params) { { include_parent_descendants: true, parent: parent_group } }
+
+ context 'with nil parent' do
+ it 'returns all accessible groups' do
+ params[:parent] = nil
+ expect(described_class.new(user, params).execute).to contain_exactly(
+ parent_group,
+ public_subgroup,
+ internal_sub_subgroup,
+ public_sub_subgroup
+ )
+ end
+ end
+
+ context 'without a user' do
+ it 'only returns the group public descendants' do
+ expect(described_class.new(nil, params).execute).to contain_exactly(
+ public_subgroup,
+ public_sub_subgroup
+ )
+ end
+ end
+
+ context 'when a user is present' do
+ it 'returns the group public and internal descendants' do
+ expect(described_class.new(user, params).execute).to contain_exactly(
+ public_subgroup,
+ public_sub_subgroup,
+ internal_sub_subgroup
+ )
+ end
+ end
+
+ context 'when a parent group member is present' do
+ before do
+ parent_group.add_developer(user)
+ end
+
+ it 'returns all group descendants' do
+ expect(described_class.new(user, params).execute).to contain_exactly(
+ public_subgroup,
+ public_sub_subgroup,
+ internal_sub_subgroup,
+ private_sub_subgroup
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index dbf5abe64a5..21bc03011c3 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -843,6 +843,16 @@ RSpec.describe IssuesFinder do
expect(finder.row_count).to be_zero
end
+
+ it 'returns -1 if the query times out' do
+ finder = described_class.new(admin)
+
+ expect_next_instance_of(described_class) do |subfinder|
+ expect(subfinder).to receive(:execute).and_raise(ActiveRecord::QueryCanceled)
+ end
+
+ expect(finder.row_count).to eq(-1)
+ end
end
describe '#with_confidentiality_access_check' do
diff --git a/spec/finders/labels_finder_spec.rb b/spec/finders/labels_finder_spec.rb
index 851b9e64db6..e344591dd5d 100644
--- a/spec/finders/labels_finder_spec.rb
+++ b/spec/finders/labels_finder_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user)
- expect(finder.execute).to eq [group_label_2, group_label_3, project_label_1, group_label_1, project_label_2, project_label_4]
+ expect(finder.execute).to match_array([group_label_2, group_label_3, project_label_1, group_label_1, project_label_2, project_label_4])
end
it 'returns labels available if nil title is supplied' do
@@ -50,7 +50,7 @@ RSpec.describe LabelsFinder do
# params[:title] will return `nil` regardless whether it is specified
finder = described_class.new(user, title: nil)
- expect(finder.execute).to eq [group_label_2, group_label_3, project_label_1, group_label_1, project_label_2, project_label_4]
+ expect(finder.execute).to match_array([group_label_2, group_label_3, project_label_1, group_label_1, project_label_2, project_label_4])
end
end
@@ -60,7 +60,7 @@ RSpec.describe LabelsFinder do
::Projects::UpdateService.new(project_1, user, archived: true).execute
finder = described_class.new(user, **group_params(group_1))
- expect(finder.execute).to eq [group_label_2, group_label_1, project_label_5]
+ expect(finder.execute).to match_array([group_label_2, group_label_1, project_label_5])
end
context 'when only_group_labels is true' do
@@ -69,7 +69,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, only_group_labels: true, **group_params(group_1))
- expect(finder.execute).to eq [group_label_2, group_label_1]
+ expect(finder.execute).to match_array([group_label_2, group_label_1])
end
end
@@ -86,7 +86,7 @@ RSpec.describe LabelsFinder do
it 'returns group labels' do
finder = described_class.new(user, **group_params(empty_group))
- expect(finder.execute).to eq [empty_group_label_1, empty_group_label_2]
+ expect(finder.execute).to match_array([empty_group_label_1, empty_group_label_2])
end
end
end
@@ -98,7 +98,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, **group_params(private_subgroup_1), only_group_labels: true, include_ancestor_groups: true)
- expect(finder.execute).to eq [private_group_label_1, private_subgroup_label_1]
+ expect(finder.execute).to match_array([private_group_label_1, private_subgroup_label_1])
end
it 'ignores labels from groups which user can not read' do
@@ -106,7 +106,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, **group_params(private_subgroup_1), only_group_labels: true, include_ancestor_groups: true)
- expect(finder.execute).to eq [private_subgroup_label_1]
+ expect(finder.execute).to match_array([private_subgroup_label_1])
end
end
@@ -117,7 +117,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, **group_params(private_group_1), only_group_labels: true, include_descendant_groups: true)
- expect(finder.execute).to eq [private_group_label_1, private_subgroup_label_1]
+ expect(finder.execute).to match_array([private_group_label_1, private_subgroup_label_1])
end
it 'ignores labels from groups which user can not read' do
@@ -125,7 +125,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, **group_params(private_group_1), only_group_labels: true, include_descendant_groups: true)
- expect(finder.execute).to eq [private_subgroup_label_1]
+ expect(finder.execute).to match_array([private_subgroup_label_1])
end
end
@@ -140,13 +140,13 @@ RSpec.describe LabelsFinder do
shared_examples 'with full visibility' do
it 'returns all projects labels' do
- expect(finder.execute).to eq [group_label_1, limited_visibility_label, visible_label]
+ expect(finder.execute).to match_array([group_label_1, limited_visibility_label, visible_label])
end
end
shared_examples 'with limited visibility' do
it 'returns only authorized projects labels' do
- expect(finder.execute).to eq [group_label_1, visible_label]
+ expect(finder.execute).to match_array([group_label_1, visible_label])
end
end
@@ -249,7 +249,7 @@ RSpec.describe LabelsFinder do
it 'returns labels available for the project' do
finder = described_class.new(user, project_id: project_1.id)
- expect(finder.execute).to eq [group_label_2, project_label_1, group_label_1]
+ expect(finder.execute).to match_array([group_label_2, project_label_1, group_label_1])
end
context 'as an administrator' do
@@ -272,13 +272,13 @@ RSpec.describe LabelsFinder do
it 'returns label with that title' do
finder = described_class.new(user, title: 'Group Label 2')
- expect(finder.execute).to eq [group_label_2]
+ expect(finder.execute).to match_array([group_label_2])
end
it 'returns label with title alias' do
finder = described_class.new(user, name: 'Group Label 2')
- expect(finder.execute).to eq [group_label_2]
+ expect(finder.execute).to match_array([group_label_2])
end
it 'returns no labels if empty title is supplied' do
@@ -304,19 +304,19 @@ RSpec.describe LabelsFinder do
it 'returns labels with a partially matching title' do
finder = described_class.new(user, search: '(group)')
- expect(finder.execute).to eq [group_label_1]
+ expect(finder.execute).to match_array([group_label_1])
end
it 'returns labels with a partially matching description' do
finder = described_class.new(user, search: 'awesome')
- expect(finder.execute).to eq [project_label_1]
+ expect(finder.execute).to match_array([project_label_1])
end
it 'returns labels matching a single character' do
finder = described_class.new(user, search: '(')
- expect(finder.execute).to eq [group_label_1]
+ expect(finder.execute).to match_array([group_label_1])
end
end
@@ -326,7 +326,7 @@ RSpec.describe LabelsFinder do
finder = described_class.new(user, subscribed: 'true')
- expect(finder.execute).to eq [project_label_1]
+ expect(finder.execute).to match_array([project_label_1])
end
end
diff --git a/spec/finders/merge_requests/by_approvals_finder_spec.rb b/spec/finders/merge_requests/by_approvals_finder_spec.rb
new file mode 100644
index 00000000000..0e1856879f1
--- /dev/null
+++ b/spec/finders/merge_requests/by_approvals_finder_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ByApprovalsFinder do
+ let_it_be(:first_user) { create(:user) }
+ let_it_be(:second_user) { create(:user) }
+ let(:third_user) { create(:user) }
+
+ let_it_be(:merge_request_without_approvals) { create(:merge_request) }
+ let_it_be(:merge_request_with_first_user_approval) do
+ create(:merge_request).tap do |mr|
+ create(:approval, merge_request: mr, user: first_user)
+ end
+ end
+ let_it_be(:merge_request_with_both_approvals) do
+ create(:merge_request).tap do |mr|
+ create(:approval, merge_request: mr, user: first_user)
+ create(:approval, merge_request: mr, user: second_user)
+ end
+ end
+
+ def merge_requests(ids: nil, names: [])
+ described_class.new(names, ids).execute(MergeRequest.all)
+ end
+
+ context 'filter by no approvals' do
+ it 'returns merge requests without approvals' do
+ expected_result = [merge_request_without_approvals]
+
+ expect(merge_requests(ids: 'None')).to match_array(expected_result)
+ expect(merge_requests(names: ['None'])).to match_array(expected_result)
+ end
+ end
+
+ context 'filter by any approvals' do
+ it 'returns merge requests approved by at least one user' do
+ expected_result = [merge_request_with_first_user_approval, merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: 'Any')).to match_array(expected_result)
+ expect(merge_requests(names: ['Any'])).to match_array(expected_result)
+ end
+ end
+
+ context 'filter by specific user approval' do
+ it 'returns merge requests approved by specific user' do
+ expected_result = [merge_request_with_first_user_approval, merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [first_user.id])).to match_array(expected_result)
+ expect(merge_requests(names: [first_user.username])).to match_array(expected_result)
+ end
+ end
+
+ context 'filter by multiple user approval' do
+ it 'returns merge requests approved by both users' do
+ expected_result = [merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [first_user.id, second_user.id])).to match_array(expected_result)
+ expect(merge_requests(names: [first_user.username, second_user.username])).to match_array(expected_result)
+ end
+
+ context 'limiting max conditional elements' do
+ it 'returns merge requests approved by both users, considering limit of 2 being defined' do
+ stub_const('MergeRequests::ByApprovalsFinder::MAX_FILTER_ELEMENTS', 2)
+
+ expected_result = [merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [first_user.id, second_user.id, third_user.id])).to match_array(expected_result)
+ expect(merge_requests(names: [first_user.username, second_user.username, third_user.username])).to match_array(expected_result)
+ end
+ end
+ end
+
+ context 'with empty params' do
+ it 'returns all merge requests' do
+ expected_result = [merge_request_without_approvals, merge_request_with_first_user_approval, merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [])).to match_array(expected_result)
+ expect(merge_requests(names: [])).to match_array(expected_result)
+ end
+ end
+end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 4f86323c7c6..68958e37001 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -486,6 +486,83 @@ RSpec.describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(old_merge_request, new_merge_request)
end
end
+
+ context 'filtering by the merge request deployments' do
+ let(:gstg) { create(:environment, project: project4, name: 'gstg') }
+ let(:gprd) { create(:environment, project: project4, name: 'gprd') }
+
+ let(:mr1) do
+ create(
+ :merge_request,
+ :simple,
+ :merged,
+ author: user,
+ source_project: project4,
+ target_project: project4
+ )
+ end
+
+ let(:mr2) do
+ create(
+ :merge_request,
+ :simple,
+ :merged,
+ author: user,
+ source_project: project4,
+ target_project: project4
+ )
+ end
+
+ let(:deploy1) do
+ create(
+ :deployment,
+ :success,
+ deployable: nil,
+ environment: gstg,
+ project: project4,
+ sha: mr1.diff_head_sha,
+ finished_at: Time.utc(2020, 10, 1, 12, 0)
+ )
+ end
+
+ let(:deploy2) do
+ create(
+ :deployment,
+ :success,
+ deployable: nil,
+ environment: gprd,
+ project: project4,
+ sha: mr2.diff_head_sha,
+ finished_at: Time.utc(2020, 10, 2, 15, 0)
+ )
+ end
+
+ before do
+ deploy1.link_merge_requests(MergeRequest.where(id: mr1.id))
+ deploy2.link_merge_requests(MergeRequest.where(id: mr2.id))
+ end
+
+ it 'filters merge requests deployed to a given environment' do
+ mrs = described_class.new(user, environment: 'gstg').execute
+
+ expect(mrs).to eq([mr1])
+ end
+
+ it 'filters merge requests deployed before a given date' do
+ mrs =
+ described_class.new(user, deployed_before: '2020-10-02').execute
+
+ expect(mrs).to eq([mr1])
+ end
+
+ it 'filters merge requests deployed after a given date' do
+ mrs = described_class
+ .new(user, deployed_after: '2020-10-01 12:00')
+ .execute
+
+ expect(mrs).to eq([mr2])
+ end
+ end
end
describe '#row_count', :request_store do
@@ -500,6 +577,16 @@ RSpec.describe MergeRequestsFinder do
expect(finder.row_count).to eq(1)
end
+
+ it 'returns -1 if the query times out' do
+ finder = described_class.new(user)
+
+ expect_next_instance_of(described_class) do |subfinder|
+ expect(subfinder).to receive(:execute).and_raise(ActiveRecord::QueryCanceled)
+ end
+
+ expect(finder.row_count).to eq(-1)
+ end
end
context 'external authorization' do
diff --git a/spec/finders/packages/generic/package_finder_spec.rb b/spec/finders/packages/generic/package_finder_spec.rb
new file mode 100644
index 00000000000..ed34268e7a9
--- /dev/null
+++ b/spec/finders/packages/generic/package_finder_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Generic::PackageFinder do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:generic_package, project: project) }
+
+ describe '#execute!' do
+ subject(:finder) { described_class.new(project) }
+
+ it 'finds package by name and version' do
+ found_package = finder.execute!(package.name, package.version)
+
+ expect(found_package).to eq(package)
+ end
+
+ it 'ignores packages with same name but different version' do
+ create(:generic_package, project: project, name: package.name, version: '3.1.4')
+
+ found_package = finder.execute!(package.name, package.version)
+
+ expect(found_package).to eq(package)
+ end
+
+ it 'raises ActiveRecord::RecordNotFound if package is not found' do
+ expect { finder.execute!(package.name, '3.1.4') }
+ .to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index 8ae19757c25..2d712bd44ce 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -31,6 +31,10 @@ RSpec.describe ProjectsFinder, :do_not_mock_admin_mode do
let(:use_cte) { true }
let(:finder) { described_class.new(params: params.merge(use_cte: use_cte), current_user: current_user, project_ids_relation: project_ids_relation) }
+ before do
+ stub_feature_flags(project_finder_similarity_sort: false)
+ end
+
subject { finder.execute }
shared_examples 'ProjectFinder#execute examples' do
@@ -304,9 +308,33 @@ RSpec.describe ProjectsFinder, :do_not_mock_admin_mode do
end
describe 'sorting' do
- let(:params) { { sort: 'name_asc' } }
+ context 'when sorting by a field' do
+ let(:params) { { sort: 'name_asc' } }
+
+ it { is_expected.to eq([internal_project, public_project]) }
+ end
- it { is_expected.to eq([internal_project, public_project]) }
+ context 'when sorting by similarity' do
+ let(:params) { { sort: 'similarity', search: 'pro' } }
+
+ let_it_be(:internal_project2) do
+ create(:project, :internal, group: group, name: 'projA', path: 'projA')
+ end
+
+ let_it_be(:internal_project3) do
+ create(:project, :internal, group: group, name: 'projABC', path: 'projABC')
+ end
+
+ let_it_be(:internal_project4) do
+ create(:project, :internal, group: group, name: 'projAB', path: 'projAB')
+ end
+
+ before do
+ stub_feature_flags(project_finder_similarity_sort: true)
+ end
+
+ it { is_expected.to eq([internal_project2, internal_project4, internal_project3]) }
+ end
end
describe 'with admin user' do
diff --git a/spec/finders/releases_finder_spec.rb b/spec/finders/releases_finder_spec.rb
index e8049a9eb81..94b6fe53daa 100644
--- a/spec/finders/releases_finder_spec.rb
+++ b/spec/finders/releases_finder_spec.rb
@@ -77,6 +77,34 @@ RSpec.describe ReleasesFinder do
expect(subject).to eq([v1_1_0, v1_0_0])
end
+ context 'with sorting parameters' do
+ before do
+ v1_1_0.update_attribute(:created_at, 3.days.ago)
+ end
+
+ context 'by default is released_at in descending order' do
+ it { is_expected.to eq([v1_1_0, v1_0_0]) }
+ end
+
+ context 'released_at in ascending order' do
+ let(:params) { { sort: 'asc' } }
+
+ it { is_expected.to eq([v1_0_0, v1_1_0]) }
+ end
+
+ context 'order by created_at in descending order' do
+ let(:params) { { order_by: 'created_at' } }
+
+ it { is_expected.to eq([v1_0_0, v1_1_0]) }
+ end
+
+ context 'order by created_at in ascending order' do
+ let(:params) { { order_by: 'created_at', sort: 'asc' } }
+
+ it { is_expected.to eq([v1_1_0, v1_0_0]) }
+ end
+ end
+
it_behaves_like 'preload'
it_behaves_like 'when tag is nil'
it_behaves_like 'when a tag parameter is passed'
diff --git a/spec/fixtures/api/schemas/entities/group_group_link.json b/spec/fixtures/api/schemas/entities/group_group_link.json
index 4c9aae140d2..bf94bbb3ce4 100644
--- a/spec/fixtures/api/schemas/entities/group_group_link.json
+++ b/spec/fixtures/api/schemas/entities/group_group_link.json
@@ -1,10 +1,20 @@
{
"type": "object",
- "required": ["id", "created_at", "expires_at", "access_level"],
+ "required": [
+ "id",
+ "created_at",
+ "expires_at",
+ "can_update",
+ "can_remove",
+ "access_level",
+ "valid_roles"
+ ],
"properties": {
"id": { "type": "integer" },
"created_at": { "type": "date-time" },
"expires_at": { "type": ["date-time", "null"] },
+ "can_update": { "type": "boolean" },
+ "can_remove": { "type": "boolean" },
"access_level": {
"type": "object",
"required": ["integer_value", "string_value"],
@@ -13,6 +23,7 @@
"string_value": { "type": "string" }
}
},
+ "valid_roles": { "type": "object" },
"shared_with_group": {
"type": "object",
"required": ["id", "name", "full_name", "full_path", "avatar_url", "web_url"],
diff --git a/spec/fixtures/api/schemas/entities/merge_request_basic.json b/spec/fixtures/api/schemas/entities/merge_request_basic.json
index 3c19528d71b..b061176f6a7 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_basic.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_basic.json
@@ -1,6 +1,7 @@
{
"type": "object",
"properties" : {
+ "title": { "type": "string" },
"state": { "type": "string" },
"merge_status": { "type": "string" },
"source_branch_exists": { "type": "boolean" },
diff --git a/spec/fixtures/api/schemas/entities/test_case.json b/spec/fixtures/api/schemas/entities/test_case.json
index 0dd3c5d472f..d731d7eed0a 100644
--- a/spec/fixtures/api/schemas/entities/test_case.json
+++ b/spec/fixtures/api/schemas/entities/test_case.json
@@ -8,6 +8,7 @@
"status": { "type": "string" },
"name": { "type": "string" },
"classname": { "type": "string" },
+ "file": { "type": ["string", "null"] },
"execution_time": { "type": "float" },
"system_output": { "type": ["string", "null"] },
"stack_trace": { "type": ["string", "null"] },
diff --git a/spec/fixtures/api/schemas/entities/trigger.json b/spec/fixtures/api/schemas/entities/trigger.json
new file mode 100644
index 00000000000..5c46142673f
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/trigger.json
@@ -0,0 +1,39 @@
+{
+ "type": "object",
+ "required": [
+ "description",
+ "owner",
+ "last_used",
+ "has_token_exposed",
+ "token",
+ "can_access_project"
+ ],
+ "properties": {
+ "description": {
+ "type": ["string", "null"]
+ },
+ "owner": {
+ "type": "object",
+ "$ref": "user.json"
+ },
+ "last_used": {
+ "type": ["datetime", "null"]
+ },
+ "token": {
+ "type": "string"
+ },
+ "has_token_exposed": {
+ "type": "boolean"
+ },
+ "can_access_project": {
+ "type": "boolean"
+ },
+ "edit_project_trigger_path": {
+ "type": "string"
+ },
+ "project_trigger_path": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index d1274bea817..7c49b269994 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -41,8 +41,11 @@
{ "type": "null" },
{ "$ref": "deployment.json" },
{
- "name": { "type": "string" },
- "build_path": { "type": "string" }
+ "type": "object",
+ "properties" : {
+ "name": { "type": "string" },
+ "build_path": { "type": "string" }
+ }
}
]
},
diff --git a/spec/fixtures/api/schemas/feature_flag.json b/spec/fixtures/api/schemas/feature_flag.json
new file mode 100644
index 00000000000..5f8cedc1132
--- /dev/null
+++ b/spec/fixtures/api/schemas/feature_flag.json
@@ -0,0 +1,23 @@
+{
+ "type": "object",
+ "required" : [
+ "id",
+ "name"
+ ],
+ "properties" : {
+ "id": { "type": "integer" },
+ "iid": { "type": ["integer", "null"] },
+ "version": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "name": { "type": "string" },
+ "active": { "type": "boolean" },
+ "description": { "type": ["string", "null"] },
+ "edit_path": { "type": ["string", "null"] },
+ "update_path": { "type": ["string", "null"] },
+ "destroy_path": { "type": ["string", "null"] },
+ "scopes": { "type": "array", "items": { "$ref": "feature_flag_scope.json" } },
+ "strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/feature_flag_scope.json b/spec/fixtures/api/schemas/feature_flag_scope.json
new file mode 100644
index 00000000000..07c5eed532a
--- /dev/null
+++ b/spec/fixtures/api/schemas/feature_flag_scope.json
@@ -0,0 +1,18 @@
+{
+ "type": "object",
+ "required" : [
+ "id",
+ "environment_scope",
+ "active"
+ ],
+ "properties" : {
+ "id": { "type": "integer" },
+ "environment_scope": { "type": "string" },
+ "active": { "type": "boolean" },
+ "percentage": { "type": ["integer", "null"] },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/feature_flag_strategy.json b/spec/fixtures/api/schemas/feature_flag_strategy.json
new file mode 100644
index 00000000000..5a2777dc8ea
--- /dev/null
+++ b/spec/fixtures/api/schemas/feature_flag_strategy.json
@@ -0,0 +1,13 @@
+{
+ "type": "object",
+ "required": [
+ "name"
+ ],
+ "properties": {
+ "name": { "type": "string" },
+ "parameters": {
+ "type": "object"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/feature_flags.json b/spec/fixtures/api/schemas/feature_flags.json
new file mode 100644
index 00000000000..fc5e668c8b0
--- /dev/null
+++ b/spec/fixtures/api/schemas/feature_flags.json
@@ -0,0 +1,13 @@
+{
+ "required": ["feature_flags", "count"],
+ "feature_flags": { "type": "array", "items": { "$ref": "feature_flag.json" } },
+ "count": {
+ "type": "object",
+ "properties" : {
+ "all": { "type": "integer" },
+ "enabled": { "type": "integer" },
+ "disabled": { "type": "integer" }
+ },
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/feature_flags_client_token.json b/spec/fixtures/api/schemas/feature_flags_client_token.json
new file mode 100644
index 00000000000..115db422d12
--- /dev/null
+++ b/spec/fixtures/api/schemas/feature_flags_client_token.json
@@ -0,0 +1,10 @@
+{
+ "type": "object",
+ "required" : [
+ "token"
+ ],
+ "properties" : {
+ "token": { "type": ["string"] }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/group_member.json b/spec/fixtures/api/schemas/group_member.json
index 035c862d229..3425108e46e 100644
--- a/spec/fixtures/api/schemas/group_member.json
+++ b/spec/fixtures/api/schemas/group_member.json
@@ -7,9 +7,9 @@
"access_level",
"requested_at",
"source",
+ "valid_roles",
"can_update",
- "can_remove",
- "can_override"
+ "can_remove"
],
"properties": {
"id": { "type": "integer" },
@@ -18,7 +18,6 @@
"requested_at": { "type": ["date-time", "null"] },
"can_update": { "type": "boolean" },
"can_remove": { "type": "boolean" },
- "can_override": { "type": "boolean" },
"access_level": {
"type": "object",
"required": ["integer_value", "string_value"],
@@ -36,6 +35,7 @@
"web_url": { "type": "string" }
}
},
+ "valid_roles": { "type": "object" },
"created_by": {
"type": "object",
"required": ["name", "web_url"],
@@ -62,7 +62,18 @@
"avatar_url": { "type": ["string", "null"] },
"web_url": { "type": "string" },
"blocked": { "type": "boolean" },
- "two_factor_enabled": { "type": "boolean" }
+ "two_factor_enabled": { "type": "boolean" },
+ "status": {
+ "type": "object",
+ "required": [
+ "emoji",
+ "message_html"
+ ],
+ "properties": {
+ "emoji": { "type": "string" },
+ "message_html": { "type": "string" }
+ }
+ }
}
},
"invite": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag.json
new file mode 100644
index 00000000000..0f304e9ee73
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag.json
@@ -0,0 +1,15 @@
+{
+ "type": "object",
+ "required": ["name"],
+ "properties": {
+ "name": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "active": {"type": "boolean" },
+ "version": { "type": "string" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "scopes": { "type": "array", "items": { "$ref": "feature_flag_scope.json" } },
+ "strategies": { "type": "array", "items": { "$ref": "operations/strategy.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json
new file mode 100644
index 00000000000..a11ae5705cc
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json
@@ -0,0 +1,22 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required" : [
+ "name",
+ "id",
+ "environment_scope",
+ "active"
+ ],
+ "properties" : {
+ "name": { "type": "string" },
+ "id": { "type": "integer" },
+ "environment_scope": { "type": "string" },
+ "active": { "type": "boolean" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
+ },
+ "additionalProperties": false
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json
new file mode 100644
index 00000000000..18402af482e
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "environment_scope",
+ "active"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "environment_scope": { "type": "string" },
+ "active": { "type": "boolean" },
+ "created_at": { "type": "date" },
+ "updated_at": { "type": "date" },
+ "strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag_scopes.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag_scopes.json
new file mode 100644
index 00000000000..b1a7021db8b
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag_scopes.json
@@ -0,0 +1,9 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "$ref": "./feature_flag_scope.json"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag_strategy.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag_strategy.json
new file mode 100644
index 00000000000..5a2777dc8ea
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag_strategy.json
@@ -0,0 +1,13 @@
+{
+ "type": "object",
+ "required": [
+ "name"
+ ],
+ "properties": {
+ "name": { "type": "string" },
+ "parameters": {
+ "type": "object"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flags.json b/spec/fixtures/api/schemas/public_api/v4/feature_flags.json
new file mode 100644
index 00000000000..c19df0443d9
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flags.json
@@ -0,0 +1,9 @@
+{
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "$ref": "./feature_flag.json"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/operations/scope.json b/spec/fixtures/api/schemas/public_api/v4/operations/scope.json
new file mode 100644
index 00000000000..e2b6d1ad6f1
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/operations/scope.json
@@ -0,0 +1,9 @@
+{
+ "type": "object",
+ "required": ["environment_scope"],
+ "properties": {
+ "id": { "type": "integer" },
+ "environment_scope": { "type": "string" }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json b/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json
new file mode 100644
index 00000000000..f572b1a4f9b
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/operations/strategy.json
@@ -0,0 +1,14 @@
+{
+ "type": "object",
+ "required": [
+ "name",
+ "parameters"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "parameters": { "type": "object" },
+ "scopes": { "type": "array", "items": { "$ref": "scope.json" } }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/package.json b/spec/fixtures/api/schemas/public_api/v4/packages/package.json
index 757e5fd26b6..08909efd10c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/package.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/package.json
@@ -11,6 +11,9 @@
"name": {
"type": "string"
},
+ "conan_package_name": {
+ "type": "string"
+ },
"version": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/registry/repository.json b/spec/fixtures/api/schemas/registry/repository.json
index 1f84e787b19..18d2c68ac2f 100644
--- a/spec/fixtures/api/schemas/registry/repository.json
+++ b/spec/fixtures/api/schemas/registry/repository.json
@@ -20,6 +20,9 @@
"created_at": {
"type": "date-time"
},
+ "cleanup_policy_started_at": {
+ "type": "date-time"
+ },
"tags_path": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/unleash/unleash.json b/spec/fixtures/api/schemas/unleash/unleash.json
new file mode 100644
index 00000000000..6eaf316bb11
--- /dev/null
+++ b/spec/fixtures/api/schemas/unleash/unleash.json
@@ -0,0 +1,20 @@
+{
+ "additionalProperties": false,
+ "properties": {
+ "features": {
+ "items": {
+ "$ref": "unleash_feature.json"
+ },
+ "minItems": 0,
+ "type": "array"
+ },
+ "version": {
+ "type": "integer"
+ }
+ },
+ "required": [
+ "version",
+ "features"
+ ],
+ "type": "object"
+}
diff --git a/spec/fixtures/api/schemas/unleash/unleash_feature.json b/spec/fixtures/api/schemas/unleash/unleash_feature.json
new file mode 100644
index 00000000000..71d375a5371
--- /dev/null
+++ b/spec/fixtures/api/schemas/unleash/unleash_feature.json
@@ -0,0 +1,27 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "enabled",
+ "strategies"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "enabled": {
+ "type": "boolean"
+ },
+ "description": {
+ "type": "string"
+ },
+ "strategies": {
+ "items": {
+ "$ref": "unleash_strategy.json"
+ },
+ "minItems": 1,
+ "type": "array"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/unleash/unleash_strategy.json b/spec/fixtures/api/schemas/unleash/unleash_strategy.json
new file mode 100644
index 00000000000..7b48038ad15
--- /dev/null
+++ b/spec/fixtures/api/schemas/unleash/unleash_strategy.json
@@ -0,0 +1,24 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "parameters": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "groupId": {
+ "type": "string"
+ },
+ "percentage": {
+ "type": "integer"
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/invalid_manifest.xml b/spec/fixtures/invalid_manifest.xml
new file mode 100644
index 00000000000..5357329784c
--- /dev/null
+++ b/spec/fixtures/invalid_manifest.xml
@@ -0,0 +1,4 @@
+<manifest>
+ <remote review="invalid-url" />
+ <project name="platform/build"/>
+</manifest>
diff --git a/spec/fixtures/lib/backup/design_repo.bundle b/spec/fixtures/lib/backup/design_repo.bundle
new file mode 100644
index 00000000000..3ed4ad6ab8b
--- /dev/null
+++ b/spec/fixtures/lib/backup/design_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/backup/personal_snippet_repo.bundle b/spec/fixtures/lib/backup/personal_snippet_repo.bundle
new file mode 100644
index 00000000000..452cf6a19fe
--- /dev/null
+++ b/spec/fixtures/lib/backup/personal_snippet_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/backup/project_repo.bundle b/spec/fixtures/lib/backup/project_repo.bundle
new file mode 100644
index 00000000000..44d4fc56d51
--- /dev/null
+++ b/spec/fixtures/lib/backup/project_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/backup/project_snippet_repo.bundle b/spec/fixtures/lib/backup/project_snippet_repo.bundle
new file mode 100644
index 00000000000..c05f8ec9495
--- /dev/null
+++ b/spec/fixtures/lib/backup/project_snippet_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/backup/wiki_repo.bundle b/spec/fixtures/lib/backup/wiki_repo.bundle
new file mode 100644
index 00000000000..bcc08dcbe8e
--- /dev/null
+++ b/spec/fixtures/lib/backup/wiki_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project.json b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project.json
new file mode 100644
index 00000000000..12136c6df3b
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project.json
@@ -0,0 +1 @@
+{"description":"Nisi et repellendus ut enim quo accusamus vel magnam.","import_type":"gitlab_project","creator_id":2147483547,"visibility_level":10,"archived":false,"hooks":[]}
diff --git a/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/issues.ndjson b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/issues.ndjson
new file mode 100644
index 00000000000..efe0d34bcb1
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/issues.ndjson
@@ -0,0 +1,10 @@
+{"id":40,"title":"Voluptatem","author_id":22,"project_id":5,"created_at":"2016-06-14T15:02:08.340Z","updated_at":"2016-06-14T15:02:47.967Z","position":0,"branch_name":null,"description":"Aliquam enim illo et possimus.","state":"opened","iid":10,"updated_by_id":null,"confidential":false,"due_date":"2020-08-07","moved_to_id":null,"test_ee_field":"test","issue_assignees":[{"user_id":1,"issue_id":40},{"user_id":15,"issue_id":40},{"user_id":16,"issue_id":40},{"user_id":16,"issue_id":40},{"user_id":6,"issue_id":40}],"award_emoji":[{"id":1,"name":"musical_keyboard","user_id":1,"awardable_type":"Issue","awardable_id":40,"created_at":"2020-01-07T11:55:22.234Z","updated_at":"2020-01-07T11:55:22.234Z"}],"zoom_meetings":[{"id":1,"project_id":5,"issue_id":40,"url":"https://zoom.us/j/123456789","issue_status":1,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z"}],"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]},"label_links":[{"id":2,"label_id":2,"target_id":40,"target_type":"Issue","created_at":"2016-07-22T08:57:02.840Z","updated_at":"2016-07-22T08:57:02.840Z","label":{"id":2,"title":"test2","color":"#428bca","project_id":8,"created_at":"2016-07-22T08:55:44.161Z","updated_at":"2016-07-22T08:55:44.161Z","template":false,"description":"","type":"ProjectLabel"}},{"id":3,"label_id":3,"target_id":40,"target_type":"Issue","created_at":"2016-07-22T08:57:02.841Z","updated_at":"2016-07-22T08:57:02.841Z","label":{"id":3,"title":"test3","color":"#428bca","group_id":8,"created_at":"2016-07-22T08:55:44.161Z","updated_at":"2016-07-22T08:55:44.161Z","template":false,"description":"","project_id":null,"type":"GroupLabel","priorities":[{"id":1,"project_id":5,"label_id":1,"priority":1,"created_at":"2016-10-18T09:35:43.338Z","updated_at":"2016-10-18T09:35:43.338Z"}]}}],"notes":[{"id":351,"note":"Quo reprehenderit aliquam qui dicta impedit cupiditate eligendi.","note_html":"<p>something else entirely</p>","cached_markdown_version":917504,"noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:47.770Z","updated_at":"2016-06-14T15:02:47.770Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[],"award_emoji":[{"id":1,"name":"clapper","user_id":1,"awardable_type":"Note","awardable_id":351,"created_at":"2020-01-07T11:55:22.234Z","updated_at":"2020-01-07T11:55:22.234Z"}]},{"id":352,"note":"Est reprehenderit quas aut aspernatur autem recusandae voluptatem.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:47.795Z","updated_at":"2016-06-14T15:02:47.795Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":353,"note":"Perspiciatis suscipit voluptates in eius nihil.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:47.823Z","updated_at":"2016-06-14T15:02:47.823Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":354,"note":"Aut vel voluptas corrupti nisi provident laboriosam magnam aut.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:47.850Z","updated_at":"2016-06-14T15:02:47.850Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":355,"note":"Officia dolore consequatur in saepe cum magni.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:47.876Z","updated_at":"2016-06-14T15:02:47.876Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":356,"note":"Cum ipsum rem voluptas eaque et ea.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:47.908Z","updated_at":"2016-06-14T15:02:47.908Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":357,"note":"Recusandae excepturi asperiores suscipit autem nostrum.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:47.937Z","updated_at":"2016-06-14T15:02:47.937Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":358,"note":"Et hic est id similique et non nesciunt voluptate.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:47.965Z","updated_at":"2016-06-14T15:02:47.965Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":40,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"resource_label_events":[{"id":244,"action":"remove","issue_id":40,"merge_request_id":null,"label_id":2,"user_id":1,"created_at":"2018-08-28T08:24:00.494Z","label":{"id":2,"title":"test2","color":"#428bca","project_id":8,"created_at":"2016-07-22T08:55:44.161Z","updated_at":"2016-07-22T08:55:44.161Z","template":false,"description":"","type":"ProjectLabel"}}],"sentry_issue":{"id":1,"issue_id":40,"sentry_issue_identifier":1234567891}}
+{"id":39,"title":"Issue without assignees","author_id":22,"project_id":5,"created_at":"2016-06-14T15:02:08.233Z","updated_at":"2016-06-14T15:02:48.194Z","position":0,"branch_name":null,"description":"Voluptate vel reprehenderit facilis omnis voluptas magnam tenetur.","state":"opened","iid":9,"updated_by_id":null,"confidential":false,"due_date":"2020-08-14","moved_to_id":null,"issue_assignees":[],"milestone":{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":null,"created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]},"notes":[{"id":359,"note":"Quo eius velit quia et id quam.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:48.009Z","updated_at":"2016-06-14T15:02:48.009Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":360,"note":"Nulla commodi ratione cumque id autem.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:48.032Z","updated_at":"2016-06-14T15:02:48.032Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":361,"note":"Illum non ea sed dolores corrupti.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:48.056Z","updated_at":"2016-06-14T15:02:48.056Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":362,"note":"Facere dolores ipsum dolorum maiores omnis occaecati ab.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:48.082Z","updated_at":"2016-06-14T15:02:48.082Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":363,"note":"Quod laudantium similique sint aut est ducimus.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:48.113Z","updated_at":"2016-06-14T15:02:48.113Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":364,"note":"Aut omnis eos esse incidunt vero reiciendis.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:48.139Z","updated_at":"2016-06-14T15:02:48.139Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":365,"note":"Beatae dolore et doloremque asperiores sunt.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:48.162Z","updated_at":"2016-06-14T15:02:48.162Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":366,"note":"Doloribus ipsam ex delectus rerum libero recusandae modi repellendus.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:48.192Z","updated_at":"2016-06-14T15:02:48.192Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":39,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":38,"title":"Quasi adipisci non cupiditate dolorem quo qui earum sed.","author_id":6,"project_id":5,"created_at":"2016-06-14T15:02:08.154Z","updated_at":"2016-06-14T15:02:48.614Z","position":0,"branch_name":null,"description":"Ea recusandae neque autem tempora.","state":"closed","iid":8,"updated_by_id":null,"confidential":false,"due_date":"2020-08-21","moved_to_id":null,"label_links":[{"id":99,"label_id":2,"target_id":38,"target_type":"Issue","created_at":"2016-07-22T08:57:02.840Z","updated_at":"2016-07-22T08:57:02.840Z","label":{"id":2,"title":"test2","color":"#428bca","project_id":8,"created_at":"2016-07-22T08:55:44.161Z","updated_at":"2016-07-22T08:55:44.161Z","template":false,"description":"","type":"ProjectLabel"}}],"notes":[{"id":367,"note":"Accusantium fugiat et eaque quisquam esse corporis.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:48.235Z","updated_at":"2016-06-14T15:02:48.235Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":368,"note":"Ea labore eum nam qui laboriosam.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:48.261Z","updated_at":"2016-06-14T15:02:48.261Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":369,"note":"Accusantium quis sed molestiae et.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:48.294Z","updated_at":"2016-06-14T15:02:48.294Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":370,"note":"Corporis numquam a voluptatem pariatur asperiores dolorem delectus autem.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:48.523Z","updated_at":"2016-06-14T15:02:48.523Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":371,"note":"Ea accusantium maxime voluptas rerum.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:48.546Z","updated_at":"2016-06-14T15:02:48.546Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":372,"note":"Pariatur iusto et et excepturi similique ipsam eum.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:48.569Z","updated_at":"2016-06-14T15:02:48.569Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":373,"note":"Aliquam et culpa officia iste eius.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:48.591Z","updated_at":"2016-06-14T15:02:48.591Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":374,"note":"Ab id velit id unde laborum.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:48.613Z","updated_at":"2016-06-14T15:02:48.613Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":38,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":37,"title":"Cupiditate quo aut ducimus minima molestiae vero numquam possimus.","author_id":20,"project_id":5,"created_at":"2016-06-14T15:02:08.051Z","updated_at":"2016-06-14T15:02:48.854Z","position":0,"branch_name":null,"description":"Maiores architecto quos in dolorem.","state":"opened","iid":7,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"notes":[{"id":375,"note":"Quasi fugit qui sed eligendi aut quia.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:48.647Z","updated_at":"2016-06-14T15:02:48.647Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":376,"note":"Esse nesciunt voluptatem ex vero est consequatur.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:48.674Z","updated_at":"2016-06-14T15:02:48.674Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":377,"note":"Similique qui quas non aut et velit sequi in.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:48.696Z","updated_at":"2016-06-14T15:02:48.696Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":378,"note":"Eveniet ut cupiditate repellendus numquam in esse eius.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:48.720Z","updated_at":"2016-06-14T15:02:48.720Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":379,"note":"Velit est dolorem adipisci rerum sed iure.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:48.755Z","updated_at":"2016-06-14T15:02:48.755Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":380,"note":"Voluptatem ullam ab ut illo ut quo.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:48.793Z","updated_at":"2016-06-14T15:02:48.793Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":381,"note":"Voluptatem impedit beatae quasi ipsa earum consectetur.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:48.823Z","updated_at":"2016-06-14T15:02:48.823Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":382,"note":"Nihil officiis eaque incidunt sunt voluptatum excepturi.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:48.852Z","updated_at":"2016-06-14T15:02:48.852Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":37,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":36,"title":"Necessitatibus dolor est enim quia rem suscipit quidem voluptas ullam.","author_id":16,"project_id":5,"created_at":"2016-06-14T15:02:07.958Z","updated_at":"2016-06-14T15:02:49.044Z","position":0,"branch_name":null,"description":"Ut aut ut et tenetur velit aut id modi.","state":"opened","iid":6,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"notes":[{"id":383,"note":"Excepturi deleniti sunt rerum nesciunt vero fugiat possimus.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:48.885Z","updated_at":"2016-06-14T15:02:48.885Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":384,"note":"Et est nemo sed nam sed.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:48.910Z","updated_at":"2016-06-14T15:02:48.910Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":385,"note":"Animi mollitia nulla facere amet aut quaerat.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:48.934Z","updated_at":"2016-06-14T15:02:48.934Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":386,"note":"Excepturi id voluptas ut odio officiis omnis.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:48.955Z","updated_at":"2016-06-14T15:02:48.955Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":387,"note":"Molestiae labore officiis magni et eligendi quasi maxime.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:48.978Z","updated_at":"2016-06-14T15:02:48.978Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":388,"note":"Officia tenetur praesentium rem nam non.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:49.001Z","updated_at":"2016-06-14T15:02:49.001Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":389,"note":"Et et et molestiae reprehenderit.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:49.022Z","updated_at":"2016-06-14T15:02:49.022Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":390,"note":"Aperiam in consequatur est sunt cum quia.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:49.043Z","updated_at":"2016-06-14T15:02:49.043Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":36,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":35,"title":"Repellat praesentium deserunt maxime incidunt harum porro qui.","author_id":20,"project_id":5,"created_at":"2016-06-14T15:02:07.832Z","updated_at":"2016-06-14T15:02:49.226Z","position":0,"branch_name":null,"description":"Dicta nisi nihil non ipsa velit.","state":"closed","iid":5,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"notes":[{"id":391,"note":"Qui magnam et assumenda quod id dicta necessitatibus.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:49.075Z","updated_at":"2016-06-14T15:02:49.075Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":392,"note":"Consectetur deserunt possimus dolor est odio.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:49.095Z","updated_at":"2016-06-14T15:02:49.095Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":393,"note":"Labore nisi quo cumque voluptas consequatur aut qui.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:49.117Z","updated_at":"2016-06-14T15:02:49.117Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":394,"note":"Et totam facilis voluptas et enim.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:49.138Z","updated_at":"2016-06-14T15:02:49.138Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":395,"note":"Ratione sint pariatur sed omnis eligendi quo libero exercitationem.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:49.160Z","updated_at":"2016-06-14T15:02:49.160Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":396,"note":"Iure hic autem id voluptatem.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:49.182Z","updated_at":"2016-06-14T15:02:49.182Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":397,"note":"Excepturi eum laboriosam delectus repellendus odio nisi et voluptatem.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:49.205Z","updated_at":"2016-06-14T15:02:49.205Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":398,"note":"Ut quis ex soluta consequatur et blanditiis.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:49.225Z","updated_at":"2016-06-14T15:02:49.225Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":35,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":34,"title":"Ullam expedita deserunt libero consequatur quia dolor harum perferendis facere quidem.","author_id":1,"project_id":5,"created_at":"2016-06-14T15:02:07.717Z","updated_at":"2016-06-14T15:02:49.416Z","position":0,"branch_name":null,"description":"Ut et explicabo vel voluptatem consequuntur ut sed.","state":"closed","iid":4,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"notes":[{"id":399,"note":"Dolor iste tempora tenetur non vitae maiores voluptatibus.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:49.256Z","updated_at":"2016-06-14T15:02:49.256Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":400,"note":"Aut sit quidem qui adipisci maxime excepturi iusto.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:49.284Z","updated_at":"2016-06-14T15:02:49.284Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":401,"note":"Et a necessitatibus autem quidem animi sunt voluptatum rerum.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:49.305Z","updated_at":"2016-06-14T15:02:49.305Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":402,"note":"Esse laboriosam quo voluptatem quis molestiae.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:49.328Z","updated_at":"2016-06-14T15:02:49.328Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":403,"note":"Nemo magnam distinctio est ut voluptate ea.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:49.350Z","updated_at":"2016-06-14T15:02:49.350Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":404,"note":"Omnis sed rerum neque rerum quae quam nulla officiis.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:49.372Z","updated_at":"2016-06-14T15:02:49.372Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":405,"note":"Quo soluta dolorem vitae ad consequatur qui aut dicta.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:49.394Z","updated_at":"2016-06-14T15:02:49.394Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":406,"note":"Magni minus est aut aut totam ut.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:49.414Z","updated_at":"2016-06-14T15:02:49.414Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":34,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":33,"title":"Numquam accusamus eos iste exercitationem magni non inventore.","author_id":26,"project_id":5,"created_at":"2016-06-14T15:02:07.611Z","updated_at":"2016-06-14T15:02:49.661Z","position":0,"branch_name":null,"description":"Non asperiores velit accusantium voluptate.","state":"closed","iid":3,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"notes":[{"id":407,"note":"Quod ea et possimus architecto.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:49.450Z","updated_at":"2016-06-14T15:02:49.450Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":408,"note":"Reiciendis est et unde perferendis dicta ut praesentium quasi.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:49.503Z","updated_at":"2016-06-14T15:02:49.503Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":409,"note":"Magni quia odio blanditiis pariatur voluptas.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:49.527Z","updated_at":"2016-06-14T15:02:49.527Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":410,"note":"Enim quam ut et et et.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:49.551Z","updated_at":"2016-06-14T15:02:49.551Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":411,"note":"Fugit voluptatem ratione maxime expedita.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:49.578Z","updated_at":"2016-06-14T15:02:49.578Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":412,"note":"Voluptatem enim aut ipsa et et ducimus.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:49.604Z","updated_at":"2016-06-14T15:02:49.604Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":413,"note":"Quia repellat fugiat consectetur quidem.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:49.631Z","updated_at":"2016-06-14T15:02:49.631Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":414,"note":"Corporis ipsum et ea necessitatibus quod assumenda repudiandae quam.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:49.659Z","updated_at":"2016-06-14T15:02:49.659Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":33,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":32,"title":"Necessitatibus magnam qui at velit consequatur perspiciatis.","author_id":15,"project_id":5,"created_at":"2016-06-14T15:02:07.431Z","updated_at":"2016-06-14T15:02:49.884Z","position":0,"branch_name":null,"description":"Molestiae corporis magnam et fugit aliquid nulla quia.","state":"closed","iid":2,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"notes":[{"id":415,"note":"Nemo consequatur sed blanditiis qui id iure dolores.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:49.694Z","updated_at":"2016-06-14T15:02:49.694Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":416,"note":"Voluptas ab accusantium dicta in.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:49.718Z","updated_at":"2016-06-14T15:02:49.718Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":417,"note":"Esse odit qui a et eum ducimus.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:49.741Z","updated_at":"2016-06-14T15:02:49.741Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":418,"note":"Sequi dolor doloribus ratione placeat repellendus.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:49.767Z","updated_at":"2016-06-14T15:02:49.767Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":419,"note":"Quae aspernatur rem est similique.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:49.796Z","updated_at":"2016-06-14T15:02:49.796Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":420,"note":"Voluptate omnis et id rerum non nesciunt laudantium assumenda.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:49.825Z","updated_at":"2016-06-14T15:02:49.825Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":421,"note":"Quia enim ab et eligendi.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:49.853Z","updated_at":"2016-06-14T15:02:49.853Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":422,"note":"In fugiat rerum voluptas quas officia.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:49.881Z","updated_at":"2016-06-14T15:02:49.881Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":32,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
+{"id":31,"title":"issue_with_timelogs","author_id":16,"project_id":5,"created_at":"2016-06-14T15:02:07.280Z","updated_at":"2016-06-14T15:02:50.134Z","position":0,"branch_name":null,"description":"Quod ad architecto qui est sed quia.","state":"closed","iid":1,"updated_by_id":null,"confidential":false,"due_date":null,"moved_to_id":null,"timelogs":[{"id":1,"time_spent":72000,"user_id":1,"created_at":"2019-12-27T09:15:22.302Z","updated_at":"2019-12-27T09:15:22.302Z","spent_at":"2019-12-27T00:00:00.000Z"}],"notes":[{"id":423,"note":"A mollitia qui iste consequatur eaque iure omnis sunt.","noteable_type":"Issue","author_id":26,"created_at":"2016-06-14T15:02:49.933Z","updated_at":"2016-06-14T15:02:49.933Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":424,"note":"Eveniet est et blanditiis sequi alias.","noteable_type":"Issue","author_id":25,"created_at":"2016-06-14T15:02:49.965Z","updated_at":"2016-06-14T15:02:49.965Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":425,"note":"Commodi tempore voluptas doloremque est.","noteable_type":"Issue","author_id":22,"created_at":"2016-06-14T15:02:49.996Z","updated_at":"2016-06-14T15:02:49.996Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":426,"note":"Quo libero impedit odio debitis rerum aspernatur.","noteable_type":"Issue","author_id":20,"created_at":"2016-06-14T15:02:50.024Z","updated_at":"2016-06-14T15:02:50.024Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":427,"note":"Dolorem voluptatem qui labore deserunt.","noteable_type":"Issue","author_id":16,"created_at":"2016-06-14T15:02:50.049Z","updated_at":"2016-06-14T15:02:50.049Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":428,"note":"Est blanditiis laboriosam enim ipsam.","noteable_type":"Issue","author_id":15,"created_at":"2016-06-14T15:02:50.077Z","updated_at":"2016-06-14T15:02:50.077Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":429,"note":"Et in voluptatem animi dolorem eos.","noteable_type":"Issue","author_id":6,"created_at":"2016-06-14T15:02:50.107Z","updated_at":"2016-06-14T15:02:50.107Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":430,"note":"Unde culpa voluptate qui sint quos.","noteable_type":"Issue","author_id":1,"created_at":"2016-06-14T15:02:50.132Z","updated_at":"2016-06-14T15:02:50.132Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":31,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}]}
diff --git a/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/labels.ndjson b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/labels.ndjson
new file mode 100644
index 00000000000..c36b6970e83
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/labels.ndjson
@@ -0,0 +1,2 @@
+{"id":2,"title":"test2","color":"#428bca","project_id":8,"created_at":"2016-07-22T08:55:44.161Z","updated_at":"2016-07-22T08:55:44.161Z","template":false,"description":"","type":"ProjectLabel","priorities":[]}
+{"id":3,"title":"test3","color":"#428bca","group_id":8,"created_at":"2016-07-22T08:55:44.161Z","updated_at":"2016-07-22T08:55:44.161Z","template":false,"description":"","project_id":null,"type":"GroupLabel","priorities":[{"id":1,"project_id":5,"label_id":1,"priority":1,"created_at":"2016-10-18T09:35:43.338Z","updated_at":"2016-10-18T09:35:43.338Z"}]}
diff --git a/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/milestones.ndjson b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/milestones.ndjson
new file mode 100644
index 00000000000..ebb8203ece3
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/import_export/sample_data/tree/project/milestones.ndjson
@@ -0,0 +1,3 @@
+{"id":1,"title":"test milestone","project_id":8,"description":"test milestone","due_date":"2020-08-07","created_at":"2016-06-14T15:02:04.415Z","updated_at":"2016-06-14T15:02:04.415Z","state":"active","iid":1,"events":[{"id":487,"target_type":"Milestone","target_id":1,"project_id":46,"created_at":"2016-06-14T15:02:04.418Z","updated_at":"2016-06-14T15:02:04.418Z","action":1,"author_id":18}]}
+{"id":20,"title":"v4.0","project_id":5,"description":"Totam quam laborum id magnam natus eaque aspernatur.","due_date":"2020-08-14","created_at":"2016-06-14T15:02:04.590Z","updated_at":"2016-06-14T15:02:04.590Z","state":"active","iid":5,"events":[{"id":240,"target_type":"Milestone","target_id":20,"project_id":36,"created_at":"2016-06-14T15:02:04.593Z","updated_at":"2016-06-14T15:02:04.593Z","action":1,"author_id":1},{"id":60,"target_type":"Milestone","target_id":20,"project_id":5,"created_at":"2016-06-14T15:02:04.593Z","updated_at":"2016-06-14T15:02:04.593Z","action":1,"author_id":20}]}
+{"id":19,"title":"v3.0","project_id":5,"description":"Rerum at autem exercitationem ea voluptates harum quam placeat.","due_date":"2020-08-21","created_at":"2016-06-14T15:02:04.583Z","updated_at":"2016-06-14T15:02:04.583Z","state":"active","iid":4,"events":[{"id":241,"target_type":"Milestone","target_id":19,"project_id":36,"created_at":"2016-06-14T15:02:04.585Z","updated_at":"2016-06-14T15:02:04.585Z","action":1,"author_id":1},{"id":59,"target_type":"Milestone","target_id":19,"project_id":5,"created_at":"2016-06-14T15:02:04.585Z","updated_at":"2016-06-14T15:02:04.585Z","action":1,"author_id":25}]}
diff --git a/spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb b/spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb
new file mode 100644
index 00000000000..c6cac69265a
--- /dev/null
+++ b/spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb
@@ -0,0 +1 @@
+empty
diff --git a/spec/fixtures/packages/generic/myfile.tar.gz b/spec/fixtures/packages/generic/myfile.tar.gz
new file mode 100644
index 00000000000..c71b1fef23d
--- /dev/null
+++ b/spec/fixtures/packages/generic/myfile.tar.gz
Binary files differ
diff --git a/spec/frontend/alert_handler_spec.js b/spec/frontend/alert_handler_spec.js
index ba2f4f24aa5..0cee28112a8 100644
--- a/spec/frontend/alert_handler_spec.js
+++ b/spec/frontend/alert_handler_spec.js
@@ -2,18 +2,26 @@ import { setHTMLFixture } from 'helpers/fixtures';
import initAlertHandler from '~/alert_handler';
describe('Alert Handler', () => {
- const ALERT_SELECTOR = 'gl-alert';
- const CLOSE_SELECTOR = 'gl-alert-dismiss';
- const ALERT_HTML = `<div class="${ALERT_SELECTOR}"><button class="${CLOSE_SELECTOR}">Dismiss</button></div>`;
+ const ALERT_CLASS = 'gl-alert';
+ const BANNER_CLASS = 'gl-banner';
+ const DISMISS_CLASS = 'gl-alert-dismiss';
+ const DISMISS_LABEL = 'Dismiss';
- const findFirstAlert = () => document.querySelector(`.${ALERT_SELECTOR}`);
- const findAllAlerts = () => document.querySelectorAll(`.${ALERT_SELECTOR}`);
- const findFirstCloseButton = () => document.querySelector(`.${CLOSE_SELECTOR}`);
+ const generateHtml = parentClass =>
+ `<div class="${parentClass}">
+ <button aria-label="${DISMISS_LABEL}">Dismiss</button>
+ </div>`;
+
+ const findFirstAlert = () => document.querySelector(`.${ALERT_CLASS}`);
+ const findFirstBanner = () => document.querySelector(`.${BANNER_CLASS}`);
+ const findAllAlerts = () => document.querySelectorAll(`.${ALERT_CLASS}`);
+ const findFirstDismissButton = () => document.querySelector(`[aria-label="${DISMISS_LABEL}"]`);
+ const findFirstDismissButtonByClass = () => document.querySelector(`.${DISMISS_CLASS}`);
describe('initAlertHandler', () => {
describe('with one alert', () => {
beforeEach(() => {
- setHTMLFixture(ALERT_HTML);
+ setHTMLFixture(generateHtml(ALERT_CLASS));
initAlertHandler();
});
@@ -22,14 +30,14 @@ describe('Alert Handler', () => {
});
it('should dismiss the alert on click', () => {
- findFirstCloseButton().click();
+ findFirstDismissButton().click();
expect(findFirstAlert()).not.toExist();
});
});
describe('with two alerts', () => {
beforeEach(() => {
- setHTMLFixture(ALERT_HTML + ALERT_HTML);
+ setHTMLFixture(generateHtml(ALERT_CLASS) + generateHtml(ALERT_CLASS));
initAlertHandler();
});
@@ -38,9 +46,46 @@ describe('Alert Handler', () => {
});
it('should dismiss only one alert on click', () => {
- findFirstCloseButton().click();
+ findFirstDismissButton().click();
expect(findAllAlerts()).toHaveLength(1);
});
});
+
+ describe('with a dismissible banner', () => {
+ beforeEach(() => {
+ setHTMLFixture(generateHtml(BANNER_CLASS));
+ initAlertHandler();
+ });
+
+ it('should render the banner', () => {
+ expect(findFirstBanner()).toExist();
+ });
+
+ it('should dismiss the banner on click', () => {
+ findFirstDismissButton().click();
+ expect(findFirstBanner()).not.toExist();
+ });
+ });
+
+ // Dismiss buttons *should* have the correct aria labels, but some of them won't
+ // because legacy code isn't always a11y compliant.
+ // This tests that the fallback for the incorrectly labelled buttons works.
+ describe('with a mislabelled dismiss button', () => {
+ beforeEach(() => {
+ setHTMLFixture(`<div class="${ALERT_CLASS}">
+ <button class="${DISMISS_CLASS}">Dismiss</button>
+ </div>`);
+ initAlertHandler();
+ });
+
+ it('should render the banner', () => {
+ expect(findFirstAlert()).toExist();
+ });
+
+ it('should dismiss the banner on click', () => {
+ findFirstDismissButtonByClass().click();
+ expect(findFirstAlert()).not.toExist();
+ });
+ });
});
});
diff --git a/spec/frontend/alert_management/components/alert_details_spec.js b/spec/frontend/alert_management/components/alert_details_spec.js
index 8aa26dbca3b..f3ebdfc5cc2 100644
--- a/spec/frontend/alert_management/components/alert_details_spec.js
+++ b/spec/frontend/alert_management/components/alert_details_spec.js
@@ -1,54 +1,76 @@
-import { mount, shallowMount } from '@vue/test-utils';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import AlertDetails from '~/alert_management/components/alert_details.vue';
-import createIssueMutation from '~/alert_management/graphql/mutations/create_issue_from_alert.mutation.graphql';
-import { joinPaths } from '~/lib/utils/url_utility';
+import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
import {
- trackAlertsDetailsViewsOptions,
ALERTS_SEVERITY_LABELS,
+ trackAlertsDetailsViewsOptions,
} from '~/alert_management/constants';
+import createIssueMutation from '~/alert_management/graphql/mutations/create_issue_from_alert.mutation.graphql';
+import { joinPaths } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
+import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[0];
+const environmentName = 'Production';
+const environmentPath = '/fake/path';
describe('AlertDetails', () => {
- let wrapper;
+ let environmentData = {
+ name: environmentName,
+ path: environmentPath,
+ };
+ let glFeatures = { exposeEnvironmentPathInAlertDetails: false };
let mock;
+ let wrapper;
const projectPath = 'root/alerts';
const projectIssuesPath = 'root/alerts/-/issues';
const projectId = '1';
const $router = { replace: jest.fn() };
function mountComponent({ data, loading = false, mountMethod = shallowMount, stubs = {} } = {}) {
- wrapper = mountMethod(AlertDetails, {
- provide: {
- alertId: 'alertId',
- projectPath,
- projectIssuesPath,
- projectId,
- },
- data() {
- return { alert: { ...mockAlert }, sidebarStatus: false, ...data };
- },
- mocks: {
- $apollo: {
- mutate: jest.fn(),
- queries: {
+ wrapper = extendedWrapper(
+ mountMethod(AlertDetails, {
+ provide: {
+ alertId: 'alertId',
+ projectPath,
+ projectIssuesPath,
+ projectId,
+ glFeatures,
+ },
+ data() {
+ return {
alert: {
- loading,
+ ...mockAlert,
+ environment: environmentData,
+ },
+ sidebarStatus: false,
+ ...data,
+ };
+ },
+ mocks: {
+ $apollo: {
+ mutate: jest.fn(),
+ queries: {
+ alert: {
+ loading,
+ },
+ sidebarStatus: {},
},
- sidebarStatus: {},
},
+ $router,
+ $route: { params: {} },
},
- $router,
- $route: { params: {} },
- },
- stubs,
- });
+ stubs: {
+ ...stubs,
+ AlertSummaryRow,
+ },
+ }),
+ );
}
beforeEach(() => {
@@ -62,9 +84,11 @@ describe('AlertDetails', () => {
mock.restore();
});
- const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
- const findViewIncidentBtn = () => wrapper.find('[data-testid="viewIncidentBtn"]');
- const findIncidentCreationAlert = () => wrapper.find('[data-testid="incidentCreationError"]');
+ const findCreateIncidentBtn = () => wrapper.findByTestId('createIncidentBtn');
+ const findViewIncidentBtn = () => wrapper.findByTestId('viewIncidentBtn');
+ const findIncidentCreationAlert = () => wrapper.findByTestId('incidentCreationError');
+ const findEnvironmentName = () => wrapper.findByTestId('environmentName');
+ const findEnvironmentPath = () => wrapper.findByTestId('environmentPath');
const findDetailsTable = () => wrapper.find(AlertDetailsTable);
describe('Alert details', () => {
@@ -74,7 +98,7 @@ describe('AlertDetails', () => {
});
it('shows an empty state', () => {
- expect(wrapper.find('[data-testid="alertDetailsTabs"]').exists()).toBe(false);
+ expect(wrapper.findByTestId('alertDetailsTabs').exists()).toBe(false);
});
});
@@ -84,28 +108,26 @@ describe('AlertDetails', () => {
});
it('renders a tab with overview information', () => {
- expect(wrapper.find('[data-testid="overview"]').exists()).toBe(true);
+ expect(wrapper.findByTestId('overview').exists()).toBe(true);
});
it('renders a tab with an activity feed', () => {
- expect(wrapper.find('[data-testid="activity"]').exists()).toBe(true);
+ expect(wrapper.findByTestId('activity').exists()).toBe(true);
});
it('renders severity', () => {
- expect(wrapper.find('[data-testid="severity"]').text()).toBe(
+ expect(wrapper.findByTestId('severity').text()).toBe(
ALERTS_SEVERITY_LABELS[mockAlert.severity],
);
});
it('renders a title', () => {
- expect(wrapper.find('[data-testid="title"]').text()).toBe(mockAlert.title);
+ expect(wrapper.findByTestId('title').text()).toBe(mockAlert.title);
});
it('renders a start time', () => {
- expect(wrapper.find('[data-testid="startTimeItem"]').exists()).toBe(true);
- expect(wrapper.find('[data-testid="startTimeItem"]').props().time).toBe(
- mockAlert.startedAt,
- );
+ expect(wrapper.findByTestId('startTimeItem').exists()).toBe(true);
+ expect(wrapper.findByTestId('startTimeItem').props('time')).toBe(mockAlert.startedAt);
});
});
@@ -126,15 +148,47 @@ describe('AlertDetails', () => {
});
it(`${field} is ${isShown ? 'displayed' : 'hidden'} correctly`, () => {
+ const element = wrapper.findByTestId(field);
if (isShown) {
- expect(wrapper.find(`[data-testid="${field}"]`).text()).toBe(data.toString());
+ expect(element.text()).toContain(data.toString());
} else {
- expect(wrapper.find(`[data-testid="${field}"]`).exists()).toBe(false);
+ expect(wrapper.findByTestId(field).exists()).toBe(false);
}
});
});
});
+ describe('environment fields', () => {
+ describe('when exposeEnvironmentPathInAlertDetails is disabled', () => {
+ beforeEach(mountComponent);
+
+ it('should not show the environment', () => {
+ expect(findEnvironmentName().exists()).toBe(false);
+ expect(findEnvironmentPath().exists()).toBe(false);
+ });
+ });
+
+ describe('when exposeEnvironmentPathInAlertDetails is enabled', () => {
+ beforeEach(() => {
+ glFeatures = { exposeEnvironmentPathInAlertDetails: true };
+ mountComponent();
+ });
+
+ it('should show the environment name with link to path', () => {
+ expect(findEnvironmentName().exists()).toBe(false);
+ expect(findEnvironmentPath().text()).toBe(environmentName);
+ expect(findEnvironmentPath().attributes('href')).toBe(environmentPath);
+ });
+
+ it('should only show the environment name if the path is not provided', () => {
+ environmentData = { name: environmentName, path: null };
+ mountComponent();
+ expect(findEnvironmentPath().exists()).toBe(false);
+ expect(findEnvironmentName().text()).toBe(environmentName);
+ });
+ });
+ });
+
describe('Create incident from alert', () => {
it('should display "View incident" button that links the incident page when incident exists', () => {
const issueIid = '3';
@@ -222,7 +276,7 @@ describe('AlertDetails', () => {
mountComponent({
data: { errored: true, sidebarErrorMessage: '<span data-testid="htmlError" />' },
});
- expect(wrapper.find('[data-testid="htmlError"]').exists()).toBe(true);
+ expect(wrapper.findByTestId('htmlError').exists()).toBe(true);
});
it('does not display an error when dismissed', () => {
@@ -232,7 +286,7 @@ describe('AlertDetails', () => {
});
describe('header', () => {
- const findHeader = () => wrapper.find('[data-testid="alert-header"]');
+ const findHeader = () => wrapper.findByTestId('alert-header');
const stubs = { TimeAgoTooltip: { template: '<span>now</span>' } };
describe('individual header fields', () => {
diff --git a/spec/frontend/alert_management/components/alert_management_empty_state_spec.js b/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
index 6712282503d..ddb102339cc 100644
--- a/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
@@ -1,25 +1,17 @@
import { shallowMount } from '@vue/test-utils';
import { GlEmptyState } from '@gitlab/ui';
import AlertManagementEmptyState from '~/alert_management/components/alert_management_empty_state.vue';
+import defaultProvideValues from '../mocks/alerts_provide_config.json';
describe('AlertManagementEmptyState', () => {
let wrapper;
- function mountComponent({
- props = {
- alertManagementEnabled: false,
- userCanEnableAlertManagement: false,
- },
- stubs = {},
- } = {}) {
+ function mountComponent({ provide = {} } = {}) {
wrapper = shallowMount(AlertManagementEmptyState, {
- propsData: {
- enableAlertManagementPath: '/link',
- alertsHelpUrl: '/link',
- emptyAlertSvgPath: 'illustration/path',
- ...props,
+ provide: {
+ ...defaultProvideValues,
+ ...provide,
},
- stubs,
});
}
@@ -42,7 +34,7 @@ describe('AlertManagementEmptyState', () => {
it('show OpsGenie integration state when OpsGenie mcv is true', () => {
mountComponent({
- props: {
+ provide: {
alertManagementEnabled: false,
userCanEnableAlertManagement: false,
opsgenieMvcEnabled: true,
diff --git a/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js b/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js
index c36107c28ce..1d79b10a796 100644
--- a/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js
@@ -1,33 +1,18 @@
import { shallowMount } from '@vue/test-utils';
import AlertManagementList from '~/alert_management/components/alert_management_list_wrapper.vue';
-import { trackAlertListViewsOptions } from '~/alert_management/constants';
-import mockAlerts from '../mocks/alerts.json';
-import Tracking from '~/tracking';
+import AlertManagementEmptyState from '~/alert_management/components/alert_management_empty_state.vue';
+import AlertManagementTable from '~/alert_management/components/alert_management_table.vue';
+import defaultProvideValues from '../mocks/alerts_provide_config.json';
describe('AlertManagementList', () => {
let wrapper;
- function mountComponent({
- props = {
- alertManagementEnabled: false,
- userCanEnableAlertManagement: false,
- },
- data = {},
- stubs = {},
- } = {}) {
+ function mountComponent({ provide = {} } = {}) {
wrapper = shallowMount(AlertManagementList, {
- propsData: {
- projectPath: 'gitlab-org/gitlab',
- enableAlertManagementPath: '/link',
- alertsHelpUrl: '/link',
- populatingAlertsHelpUrl: '/help/help-page.md#populating-alert-data',
- emptyAlertSvgPath: 'illustration/path',
- ...props,
+ provide: {
+ ...defaultProvideValues,
+ ...provide,
},
- data() {
- return data;
- },
- stubs,
});
}
@@ -41,18 +26,21 @@ describe('AlertManagementList', () => {
}
});
- describe('Snowplow tracking', () => {
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
+ describe('Alert List Wrapper', () => {
+ it('should show the empty state when alerts are not enabled', () => {
+ expect(wrapper.find(AlertManagementEmptyState).exists()).toBe(true);
+ expect(wrapper.find(AlertManagementTable).exists()).toBe(false);
+ });
+
+ it('should show the alerts table when alerts are enabled', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts } },
+ provide: {
+ alertManagementEnabled: true,
+ },
});
- });
- it('should track alert list page views', () => {
- const { category, action } = trackAlertListViewsOptions;
- expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ expect(wrapper.find(AlertManagementEmptyState).exists()).toBe(false);
+ expect(wrapper.find(AlertManagementTable).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index bcad415eb19..f7a629142f9 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -1,26 +1,13 @@
import { mount } from '@vue/test-utils';
-import {
- GlTable,
- GlAlert,
- GlLoadingIcon,
- GlDeprecatedDropdown,
- GlDeprecatedDropdownItem,
- GlIcon,
- GlTabs,
- GlTab,
- GlBadge,
- GlPagination,
- GlSearchBoxByType,
- GlAvatar,
-} from '@gitlab/ui';
-import waitForPromises from 'helpers/wait_for_promises';
+import { GlTable, GlAlert, GlLoadingIcon, GlDropdown, GlIcon, GlAvatar } from '@gitlab/ui';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import { visitUrl } from '~/lib/utils/url_utility';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import AlertManagementTable from '~/alert_management/components/alert_management_table.vue';
-import { ALERTS_STATUS_TABS, trackAlertStatusUpdateOptions } from '~/alert_management/constants';
-import updateAlertStatus from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
+import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import mockAlerts from '../mocks/alerts.json';
-import Tracking from '~/tracking';
+import defaultProvideValues from '../mocks/alerts_provide_config.json';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
@@ -29,26 +16,21 @@ jest.mock('~/lib/utils/url_utility', () => ({
describe('AlertManagementTable', () => {
let wrapper;
+ let mock;
const findAlertsTable = () => wrapper.find(GlTable);
const findAlerts = () => wrapper.findAll('table tbody tr');
const findAlert = () => wrapper.find(GlAlert);
const findLoader = () => wrapper.find(GlLoadingIcon);
- const findStatusDropdown = () => wrapper.find(GlDeprecatedDropdown);
- const findStatusFilterTabs = () => wrapper.findAll(GlTab);
- const findStatusTabs = () => wrapper.find(GlTabs);
- const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
+ const findStatusDropdown = () => wrapper.find(GlDropdown);
const findDateFields = () => wrapper.findAll(TimeAgo);
- const findFirstStatusOption = () => findStatusDropdown().find(GlDeprecatedDropdownItem);
- const findPagination = () => wrapper.find(GlPagination);
- const findSearch = () => wrapper.find(GlSearchBoxByType);
+ const findSearch = () => wrapper.find(FilteredSearchBar);
const findSeverityColumnHeader = () =>
wrapper.find('[data-testid="alert-management-severity-sort"]');
const findFirstIDField = () => wrapper.findAll('[data-testid="idField"]').at(0);
const findAssignees = () => wrapper.findAll('[data-testid="assigneesField"]');
const findSeverityFields = () => wrapper.findAll('[data-testid="severityField"]');
const findIssueFields = () => wrapper.findAll('[data-testid="issueField"]');
- const findAlertError = () => wrapper.find('[data-testid="alert-error"]');
const alertsCount = {
open: 24,
triggered: 20,
@@ -56,26 +38,14 @@ describe('AlertManagementTable', () => {
resolved: 11,
all: 26,
};
- const selectFirstStatusOption = () => {
- findFirstStatusOption().vm.$emit('click');
- return waitForPromises();
- };
-
- function mountComponent({
- props = {
- alertManagementEnabled: false,
- userCanEnableAlertManagement: false,
- },
- data = {},
- loading = false,
- stubs = {},
- } = {}) {
+ function mountComponent({ provide = {}, data = {}, loading = false, stubs = {} } = {}) {
wrapper = mount(AlertManagementTable, {
- propsData: {
- projectPath: 'gitlab-org/gitlab',
- populatingAlertsHelpUrl: '/help/help-page.md#populating-alert-data',
- ...props,
+ provide: {
+ ...defaultProvideValues,
+ alertManagementEnabled: true,
+ userCanEnableAlertManagement: true,
+ ...provide,
},
data() {
return data;
@@ -95,41 +65,21 @@ describe('AlertManagementTable', () => {
});
}
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
afterEach(() => {
if (wrapper) {
wrapper.destroy();
wrapper = null;
}
- });
-
- describe('Status Filter Tabs', () => {
- beforeEach(() => {
- mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: mockAlerts, alertsCount },
- loading: false,
- stubs: {
- GlTab: true,
- },
- });
- });
-
- it('should display filter tabs with alerts count badge for each status', () => {
- const tabs = findStatusFilterTabs().wrappers;
- const badges = findStatusFilterBadge();
-
- tabs.forEach((tab, i) => {
- const status = ALERTS_STATUS_TABS[i].status.toLowerCase();
- expect(tab.text()).toContain(ALERTS_STATUS_TABS[i].title);
- expect(badges.at(i).text()).toContain(alertsCount[status]);
- });
- });
+ mock.restore();
});
describe('Alerts table', () => {
it('loading state', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: { alerts: {}, alertsCount: null },
loading: true,
});
@@ -144,8 +94,7 @@ describe('AlertManagementTable', () => {
it('error state', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { errors: ['error'] }, alertsCount: null, hasError: true },
+ data: { alerts: { errors: ['error'] }, alertsCount: null, errored: true },
loading: false,
});
expect(findAlertsTable().exists()).toBe(true);
@@ -161,10 +110,17 @@ describe('AlertManagementTable', () => {
it('empty state', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: [], pageInfo: {} }, alertsCount: { all: 0 }, hasError: false },
+ data: {
+ alerts: { list: [], pageInfo: {} },
+ alertsCount: { all: 0 },
+ errored: false,
+ isErrorAlertDismissed: false,
+ searchTerm: '',
+ assigneeUsername: '',
+ },
loading: false,
});
+
expect(findAlertsTable().exists()).toBe(true);
expect(findAlertsTable().text()).toContain('No alerts to display');
expect(findLoader().exists()).toBe(false);
@@ -178,8 +134,7 @@ describe('AlertManagementTable', () => {
it('has data state', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
expect(findLoader().exists()).toBe(false);
@@ -194,8 +149,7 @@ describe('AlertManagementTable', () => {
it('displays the alert ID and title formatted correctly', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
@@ -205,8 +159,7 @@ describe('AlertManagementTable', () => {
it('displays status dropdown', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
expect(findStatusDropdown().exists()).toBe(true);
@@ -214,8 +167,7 @@ describe('AlertManagementTable', () => {
it('does not display a dropdown status header', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
expect(
@@ -225,27 +177,25 @@ describe('AlertManagementTable', () => {
).toBe(false);
});
- it('shows correct severity icons', () => {
+ it('shows correct severity icons', async () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlTable).exists()).toBe(true);
- expect(
- findAlertsTable()
- .find(GlIcon)
- .classes('icon-critical'),
- ).toBe(true);
- });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlTable).exists()).toBe(true);
+ expect(
+ findAlertsTable()
+ .find(GlIcon)
+ .classes('icon-critical'),
+ ).toBe(true);
});
it('renders severity text', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
@@ -258,8 +208,7 @@ describe('AlertManagementTable', () => {
it('renders Unassigned when no assignee(s) present', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
@@ -272,8 +221,7 @@ describe('AlertManagementTable', () => {
it('renders user avatar when assignee present', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
@@ -290,22 +238,39 @@ describe('AlertManagementTable', () => {
it('navigates to the detail page when alert row is clicked', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
+ expect(visitUrl).not.toHaveBeenCalled();
+
findAlerts()
.at(0)
.trigger('click');
- expect(visitUrl).toHaveBeenCalledWith('/1527542/details');
+ expect(visitUrl).toHaveBeenCalledWith('/1527542/details', false);
+ });
+
+ it('navigates to the detail page in new tab when alert row is clicked with the metaKey', () => {
+ mountComponent({
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ loading: false,
+ });
+
+ expect(visitUrl).not.toHaveBeenCalled();
+
+ findAlerts()
+ .at(0)
+ .trigger('click', {
+ metaKey: true,
+ });
+
+ expect(visitUrl).toHaveBeenCalledWith('/1527542/details', true);
});
describe('alert issue links', () => {
beforeEach(() => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
});
@@ -335,7 +300,6 @@ describe('AlertManagementTable', () => {
describe('handle date fields', () => {
it('should display time ago dates when values provided', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: {
alerts: {
list: [
@@ -349,7 +313,7 @@ describe('AlertManagementTable', () => {
],
},
alertsCount,
- hasError: false,
+ errored: false,
},
loading: false,
});
@@ -358,7 +322,6 @@ describe('AlertManagementTable', () => {
it('should not display time ago dates when values not provided', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: {
alerts: [
{
@@ -369,7 +332,7 @@ describe('AlertManagementTable', () => {
},
],
alertsCount,
- hasError: false,
+ errored: false,
},
loading: false,
});
@@ -383,8 +346,7 @@ describe('AlertManagementTable', () => {
it('should highlight the row when alert is new', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: [newAlert] }, alertsCount, hasError: false },
+ data: { alerts: { list: [newAlert] }, alertsCount, errored: false },
loading: false,
});
@@ -397,8 +359,7 @@ describe('AlertManagementTable', () => {
it('should not highlight the row when alert is not new', () => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: [oldAlert] }, alertsCount, hasError: false },
+ data: { alerts: { list: [oldAlert] }, alertsCount, errored: false },
loading: false,
});
@@ -415,10 +376,9 @@ describe('AlertManagementTable', () => {
describe('sorting the alert list by column', () => {
beforeEach(() => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: {
alerts: { list: mockAlerts },
- hasError: false,
+ errored: false,
sort: 'STARTED_AT_DESC',
alertsCount,
},
@@ -438,184 +398,10 @@ describe('AlertManagementTable', () => {
});
});
- describe('updating the alert status', () => {
- const iid = '1527542';
- const mockUpdatedMutationResult = {
- data: {
- updateAlertStatus: {
- errors: [],
- alert: {
- iid,
- status: 'acknowledged',
- },
- },
- },
- };
-
- beforeEach(() => {
- mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
- loading: false,
- });
- });
-
- it('calls `$apollo.mutate` with `updateAlertStatus` mutation and variables containing `iid`, `status`, & `projectPath`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationResult);
- findFirstStatusOption().vm.$emit('click');
-
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateAlertStatus,
- variables: {
- iid,
- status: 'TRIGGERED',
- projectPath: 'gitlab-org/gitlab',
- },
- });
- });
-
- describe('when a request fails', () => {
- beforeEach(() => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockReturnValue(Promise.reject(new Error()));
- });
-
- it('shows an error', async () => {
- await selectFirstStatusOption();
-
- expect(findAlertError().text()).toContain(
- 'There was an error while updating the status of the alert.',
- );
- });
-
- it('shows an error when triggered a second time', async () => {
- await selectFirstStatusOption();
-
- wrapper.find(GlAlert).vm.$emit('dismiss');
-
- await wrapper.vm.$nextTick();
-
- // Assert that the error has been dismissed in the setup
- expect(findAlertError().exists()).toBe(false);
-
- await selectFirstStatusOption();
-
- expect(findAlertError().exists()).toBe(true);
- });
- });
-
- it('shows an error when response includes HTML errors', async () => {
- const mockUpdatedMutationErrorResult = {
- data: {
- updateAlertStatus: {
- errors: ['<span data-testid="htmlError" />'],
- alert: {
- iid,
- status: 'acknowledged',
- },
- },
- },
- };
-
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationErrorResult);
-
- await selectFirstStatusOption();
-
- expect(findAlertError().exists()).toBe(true);
- expect(
- findAlertError()
- .find('[data-testid="htmlError"]')
- .exists(),
- ).toBe(true);
- });
- });
-
- describe('Snowplow tracking', () => {
- beforeEach(() => {
- jest.spyOn(Tracking, 'event');
- mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount },
- loading: false,
- });
- });
-
- it('should track alert status updates', () => {
- Tracking.event.mockClear();
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({});
- findFirstStatusOption().vm.$emit('click');
- const status = findFirstStatusOption().text();
- setImmediate(() => {
- const { category, action, label } = trackAlertStatusUpdateOptions;
- expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property: status });
- });
- });
- });
-
- describe('Pagination', () => {
- beforeEach(() => {
- mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts, pageInfo: {} }, alertsCount, hasError: false },
- loading: false,
- });
- });
-
- it('does NOT show pagination control when list is smaller than default page size', () => {
- findStatusTabs().vm.$emit('input', 3);
- return wrapper.vm.$nextTick(() => {
- expect(findPagination().exists()).toBe(false);
- });
- });
-
- it('shows pagination control when list is larger than default page size', () => {
- findStatusTabs().vm.$emit('input', 0);
- return wrapper.vm.$nextTick(() => {
- expect(findPagination().exists()).toBe(true);
- });
- });
-
- describe('prevPage', () => {
- it('returns prevPage number', () => {
- findPagination().vm.$emit('input', 3);
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.prevPage).toBe(2);
- });
- });
-
- it('returns 0 when it is the first page', () => {
- findPagination().vm.$emit('input', 1);
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.prevPage).toBe(0);
- });
- });
- });
-
- describe('nextPage', () => {
- it('returns nextPage number', () => {
- findPagination().vm.$emit('input', 1);
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.nextPage).toBe(2);
- });
- });
-
- it('returns `null` when currentPage is already last page', () => {
- findStatusTabs().vm.$emit('input', 3);
- findPagination().vm.$emit('input', 1);
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.nextPage).toBeNull();
- });
- });
- });
- });
-
describe('Search', () => {
beforeEach(() => {
mountComponent({
- props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
loading: false,
});
});
@@ -623,13 +409,5 @@ describe('AlertManagementTable', () => {
it('renders the search component', () => {
expect(findSearch().exists()).toBe(true);
});
-
- it('sets the `searchTerm` graphql variable', () => {
- const SEARCH_TERM = 'Simple Alert';
-
- findSearch().vm.$emit('input', SEARCH_TERM);
-
- expect(wrapper.vm.$data.searchTerm).toBe(SEARCH_TERM);
- });
});
});
diff --git a/spec/frontend/alert_management/components/alert_status_spec.js b/spec/frontend/alert_management/components/alert_status_spec.js
new file mode 100644
index 00000000000..f5916b8b265
--- /dev/null
+++ b/spec/frontend/alert_management/components/alert_status_spec.js
@@ -0,0 +1,151 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
+import AlertManagementStatus from '~/alert_management/components/alert_status.vue';
+import updateAlertStatusMutation from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
+import Tracking from '~/tracking';
+import mockAlerts from '../mocks/alerts.json';
+
+const mockAlert = mockAlerts[0];
+
+describe('AlertManagementStatus', () => {
+ let wrapper;
+ const findStatusDropdown = () => wrapper.find(GlDropdown);
+ const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem);
+
+ const selectFirstStatusOption = () => {
+ findFirstStatusOption().vm.$emit('click');
+
+ return waitForPromises();
+ };
+
+ function mountComponent({ props = {}, loading = false, stubs = {} } = {}) {
+ wrapper = shallowMount(AlertManagementStatus, {
+ propsData: {
+ alert: { ...mockAlert },
+ projectPath: 'gitlab-org/gitlab',
+ isSidebar: false,
+ ...props,
+ },
+ mocks: {
+ $apollo: {
+ mutate: jest.fn(),
+ queries: {
+ alert: {
+ loading,
+ },
+ },
+ },
+ },
+ stubs,
+ });
+ }
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('updating the alert status', () => {
+ const iid = '1527542';
+ const mockUpdatedMutationResult = {
+ data: {
+ updateAlertStatus: {
+ errors: [],
+ alert: {
+ iid,
+ status: 'acknowledged',
+ },
+ },
+ },
+ };
+
+ beforeEach(() => {
+ mountComponent({});
+ });
+
+ it('calls `$apollo.mutate` with `updateAlertStatus` mutation and variables containing `iid`, `status`, & `projectPath`', () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationResult);
+ findFirstStatusOption().vm.$emit('click');
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: updateAlertStatusMutation,
+ variables: {
+ iid,
+ status: 'TRIGGERED',
+ projectPath: 'gitlab-org/gitlab',
+ },
+ });
+ });
+
+ describe('when a request fails', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockReturnValue(Promise.reject(new Error()));
+ });
+
+ it('emits an error', async () => {
+ await selectFirstStatusOption();
+
+ expect(wrapper.emitted('alert-error')[0]).toEqual([
+ 'There was an error while updating the status of the alert. Please try again.',
+ ]);
+ });
+
+ it('emits an error when triggered a second time', async () => {
+ await selectFirstStatusOption();
+ await wrapper.vm.$nextTick();
+ await selectFirstStatusOption();
+ // Should emit two errors [0,1]
+ expect(wrapper.emitted('alert-error').length > 1).toBe(true);
+ });
+ });
+
+ it('shows an error when response includes HTML errors', async () => {
+ const mockUpdatedMutationErrorResult = {
+ data: {
+ updateAlertStatus: {
+ errors: ['<span data-testid="htmlError" />'],
+ alert: {
+ iid,
+ status: 'acknowledged',
+ },
+ },
+ },
+ };
+
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationErrorResult);
+
+ await selectFirstStatusOption();
+
+ expect(wrapper.emitted('alert-error').length > 0).toBe(true);
+ expect(wrapper.emitted('alert-error')[0]).toEqual([
+ 'There was an error while updating the status of the alert. <span data-testid="htmlError" />',
+ ]);
+ });
+ });
+
+ describe('Snowplow tracking', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ mountComponent({});
+ });
+
+ it('should track alert status updates', () => {
+ Tracking.event.mockClear();
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({});
+ findFirstStatusOption().vm.$emit('click');
+ const status = findFirstStatusOption().text();
+ setImmediate(() => {
+ const { category, action, label } = trackAlertStatusUpdateOptions;
+ expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property: status });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/alert_management/components/alert_summary_row_spec.js b/spec/frontend/alert_management/components/alert_summary_row_spec.js
new file mode 100644
index 00000000000..47c715c089a
--- /dev/null
+++ b/spec/frontend/alert_management/components/alert_summary_row_spec.js
@@ -0,0 +1,40 @@
+import { shallowMount } from '@vue/test-utils';
+import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
+
+const label = 'a label';
+const value = 'a value';
+
+describe('AlertSummaryRow', () => {
+ let wrapper;
+
+ function mountComponent({ mountMethod = shallowMount, props, defaultSlot } = {}) {
+ wrapper = mountMethod(AlertSummaryRow, {
+ propsData: props,
+ scopedSlots: {
+ default: defaultSlot,
+ },
+ });
+ }
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('Alert Summary Row', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: {
+ label,
+ },
+ defaultSlot: `<span class="value">${value}</span>`,
+ });
+ });
+
+ it('should display a label and a value', () => {
+ expect(wrapper.text()).toBe(`${label} ${value}`);
+ });
+ });
+});
diff --git a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
index 4c9db02eff4..1d87301aac9 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import { GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlDropdownItem } from '@gitlab/ui';
import SidebarAssignee from '~/alert_management/components/sidebar/sidebar_assignee.vue';
import SidebarAssignees from '~/alert_management/components/sidebar/sidebar_assignees.vue';
import AlertSetAssignees from '~/alert_management/graphql/mutations/alert_set_assignees.mutation.graphql';
@@ -106,7 +106,7 @@ describe('Alert Details Sidebar Assignees', () => {
it('renders a unassigned option', async () => {
wrapper.setData({ isDropdownSearching: false });
await wrapper.vm.$nextTick();
- expect(wrapper.find(GlDeprecatedDropdownItem).text()).toBe('Unassigned');
+ expect(wrapper.find(GlDropdownItem).text()).toBe('Unassigned');
});
it('calls `$apollo.mutate` with `AlertSetAssignees` mutation and variables containing `iid`, `assigneeUsernames`, & `projectPath`', async () => {
diff --git a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
index a8fe40687e1..bef4a341985 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
-import { GlDeprecatedDropdown, GlDeprecatedDropdownItem, GlLoadingIcon } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
import AlertSidebarStatus from '~/alert_management/components/sidebar/sidebar_status.vue';
-import updateAlertStatus from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
+import updateAlertStatusMutation from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
import Tracking from '~/tracking';
import mockAlerts from '../../mocks/alerts.json';
@@ -10,9 +10,10 @@ const mockAlert = mockAlerts[0];
describe('Alert Details Sidebar Status', () => {
let wrapper;
- const findStatusDropdown = () => wrapper.find(GlDeprecatedDropdown);
- const findStatusDropdownItem = () => wrapper.find(GlDeprecatedDropdownItem);
+ const findStatusDropdown = () => wrapper.find(GlDropdown);
+ const findStatusDropdownItem = () => wrapper.find(GlDropdownItem);
const findStatusLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findStatusDropdownHeader = () => wrapper.find('[data-testid="dropdown-header"]');
function mountComponent({ data, sidebarCollapsed = true, loading = false, stubs = {} } = {}) {
wrapper = mount(AlertSidebarStatus, {
@@ -56,11 +57,7 @@ describe('Alert Details Sidebar Status', () => {
});
it('displays the dropdown status header', () => {
- expect(
- findStatusDropdown()
- .find('.dropdown-title')
- .exists(),
- ).toBe(true);
+ expect(findStatusDropdownHeader().exists()).toBe(true);
});
describe('updating the alert status', () => {
@@ -88,7 +85,7 @@ describe('Alert Details Sidebar Status', () => {
findStatusDropdownItem().vm.$emit('click');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateAlertStatus,
+ mutation: updateAlertStatusMutation,
variables: {
iid: '1527542',
status: 'TRIGGERED',
diff --git a/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js b/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
index 8dd663e55d9..65cfc600d76 100644
--- a/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
+++ b/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
import SystemNote from '~/alert_management/components/system_notes/system_note.vue';
import mockAlerts from '../../mocks/alerts.json';
@@ -19,6 +20,7 @@ describe('Alert Details System Note', () => {
afterEach(() => {
if (wrapper) {
wrapper.destroy();
+ wrapper = null;
}
});
@@ -29,10 +31,10 @@ describe('Alert Details System Note', () => {
it('renders the correct system note', () => {
const noteId = wrapper.find('.note-wrapper').attributes('id');
- const iconRoute = wrapper.find('use').attributes('href');
+ const iconName = wrapper.find(GlIcon).attributes('name');
expect(noteId).toBe('note_1628');
- expect(iconRoute.includes('user')).toBe(true);
+ expect(iconName).toBe(mockAlert.notes.nodes[0].systemNoteIconName);
});
});
});
diff --git a/spec/frontend/alert_management/mocks/alerts_provide_config.json b/spec/frontend/alert_management/mocks/alerts_provide_config.json
new file mode 100644
index 00000000000..af543e641bc
--- /dev/null
+++ b/spec/frontend/alert_management/mocks/alerts_provide_config.json
@@ -0,0 +1,13 @@
+{
+ "textQuery": "foo",
+ "authorUsernameQuery": "root",
+ "assigneeUsernameQuery": "root",
+ "projectPath": "gitlab-org/gitlab",
+ "enableAlertManagementPath": "/link",
+ "populatingAlertsHelpUrl": "/link",
+ "emptyAlertSvgPath": "/link",
+ "alertManagementEnabled": false,
+ "userCanEnableAlertManagement": false,
+ "opsgenieMvcTargetUrl": "/link",
+ "opsgenieMvcEnabled": false
+} \ No newline at end of file
diff --git a/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap b/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
index 16e92bf505a..5800b160efe 100644
--- a/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
+++ b/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
@@ -2,48 +2,48 @@
exports[`AlertsSettingsForm with default values renders the initial template 1`] = `
"<div>
- <!---->
- <div data-testid=\\"alert-settings-description\\" class=\\"gl-mt-5\\">
- <p>
- <gl-sprintf-stub message=\\"You must provide this URL and authorization key to authorize an external service to send alerts to GitLab. You can provide this URL and key to multiple services. After configuring an external service, alerts from your service will display on the GitLab %{linkStart}Alerts%{linkEnd} page.\\"></gl-sprintf-stub>
- </p>
- <p>
- <gl-sprintf-stub message=\\"Review your external service's documentation to learn where to provide this information to your external service, and the %{linkStart}GitLab documentation%{linkEnd} to learn more about configuring your endpoint.\\"></gl-sprintf-stub>
- </p>
- </div>
+ <integrations-list-stub integrations=\\"[object Object],[object Object]\\"></integrations-list-stub>
<gl-form-stub>
- <gl-form-group-stub label=\\"Integrations\\" label-for=\\"integrations\\" label-class=\\"label-bold\\">
- <gl-form-select-stub options=\\"[object Object],[object Object],[object Object]\\" data-testid=\\"alert-settings-select\\" value=\\"generic\\"></gl-form-select-stub> <span class=\\"gl-text-gray-200\\"><gl-sprintf-stub message=\\"Learn more about our %{linkStart}upcoming integrations%{linkEnd}\\"></gl-sprintf-stub></span>
+ <h5 class=\\"gl-font-lg gl-my-5\\">Add new integrations</h5>
+ <!---->
+ <div data-testid=\\"alert-settings-description\\">
+ <p>
+ <gl-sprintf-stub message=\\"You must provide this URL and authorization key to authorize an external service to send alerts to GitLab. You can provide this URL and key to multiple services. After configuring an external service, alerts from your service will display on the GitLab %{linkStart}Alerts%{linkEnd} page.\\"></gl-sprintf-stub>
+ </p>
+ <p>
+ <gl-sprintf-stub message=\\"Review your external service's documentation to learn where to provide this information to your external service, and the %{linkStart}GitLab documentation%{linkEnd} to learn more about configuring your endpoint.\\"></gl-sprintf-stub>
+ </p>
+ </div>
+ <gl-form-group-stub label-for=\\"integration-type\\" label=\\"Integration\\">
+ <gl-form-select-stub id=\\"integration-type\\" options=\\"[object Object],[object Object],[object Object]\\" data-testid=\\"alert-settings-select\\" value=\\"generic\\"></gl-form-select-stub> <span class=\\"gl-text-gray-500\\"><gl-sprintf-stub message=\\"Learn more about our improvements for %{linkStart}integrations%{linkEnd}\\"></gl-sprintf-stub></span>
</gl-form-group-stub>
- <gl-form-group-stub label=\\"Active\\" label-for=\\"activated\\" label-class=\\"label-bold\\">
+ <gl-form-group-stub label=\\"Active\\" label-for=\\"activated\\">
<toggle-button-stub id=\\"activated\\"></toggle-button-stub>
</gl-form-group-stub>
<!---->
- <gl-form-group-stub label=\\"Webhook URL\\" label-for=\\"url\\" label-class=\\"label-bold\\">
- <gl-form-input-group-stub value=\\"/alerts/notify.json\\" predefinedoptions=\\"[object Object]\\" id=\\"url\\" readonly=\\"\\"></gl-form-input-group-stub> <span class=\\"gl-text-gray-200\\">
+ <gl-form-group-stub label=\\"Webhook URL\\" label-for=\\"url\\">
+ <gl-form-input-group-stub value=\\"/alerts/notify.json\\" predefinedoptions=\\"[object Object]\\" id=\\"url\\" readonly=\\"\\"></gl-form-input-group-stub> <span class=\\"gl-text-gray-500\\">
</span>
</gl-form-group-stub>
- <gl-form-group-stub label=\\"Authorization key\\" label-for=\\"authorization-key\\" label-class=\\"label-bold\\">
+ <gl-form-group-stub label=\\"Authorization key\\" label-for=\\"authorization-key\\">
<gl-form-input-group-stub value=\\"abcedfg123\\" predefinedoptions=\\"[object Object]\\" id=\\"authorization-key\\" readonly=\\"\\" class=\\"gl-mb-2\\"></gl-form-input-group-stub>
- <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\" class=\\"gl-mt-3\\" role=\\"button\\" tabindex=\\"0\\">Reset key</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\" class=\\"gl-mt-3\\" role=\\"button\\" tabindex=\\"0\\">Reset key</gl-button-stub>
<gl-modal-stub modalid=\\"authKeyModal\\" titletag=\\"h4\\" modalclass=\\"\\" size=\\"md\\" title=\\"Reset key\\" ok-title=\\"Reset key\\" ok-variant=\\"danger\\">
Resetting the authorization key for this project will require updating the authorization key in every alert source it is enabled in.
</gl-modal-stub>
</gl-form-group-stub>
- <gl-form-group-stub label=\\"Alert test payload\\" label-for=\\"alert-json\\" label-class=\\"label-bold\\">
+ <gl-form-group-stub label=\\"Alert test payload\\" label-for=\\"alert-json\\">
<gl-form-textarea-stub noresize=\\"true\\" id=\\"alert-json\\" disabled=\\"true\\" state=\\"true\\" placeholder=\\"Enter test alert JSON....\\" rows=\\"6\\" max-rows=\\"10\\"></gl-form-textarea-stub>
</gl-form-group-stub>
- <div class=\\"gl-display-flex gl-justify-content-end\\">
- <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
- </div>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
<div class=\\"footer-block row-content-block gl-display-flex gl-justify-content-space-between\\">
- <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
- Cancel
- </gl-button-stub>
- <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
+ <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\">
Save changes
</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\">
+ Cancel
+ </gl-button-stub>
</div>
</gl-form-stub>
</div>"
diff --git a/spec/frontend/alert_settings/alert_settings_form_spec.js b/spec/frontend/alert_settings/alert_settings_form_spec.js
index 87a631bda56..6e1ea31ed6a 100644
--- a/spec/frontend/alert_settings/alert_settings_form_spec.js
+++ b/spec/frontend/alert_settings/alert_settings_form_spec.js
@@ -1,9 +1,12 @@
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
import { GlModal, GlAlert } from '@gitlab/ui';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
+import IntegrationsList from '~/alerts_settings/components/alerts_integrations_list.vue';
import ToggleButton from '~/vue_shared/components/toggle_button.vue';
+import { i18n } from '~/alerts_settings/constants';
+import service from '~/alerts_settings/services';
+
+jest.mock('~/alerts_settings/services');
const PROMETHEUS_URL = '/prometheus/alerts/notify.json';
const GENERIC_URL = '/alerts/notify.json';
@@ -13,7 +16,6 @@ const ACTIVATED = false;
describe('AlertsSettingsForm', () => {
let wrapper;
- let mockAxios;
const createComponent = ({ methods } = {}, data) => {
wrapper = shallowMount(AlertsSettingsForm, {
@@ -53,7 +55,6 @@ describe('AlertsSettingsForm', () => {
const findApiUrl = () => wrapper.find('#api-url');
beforeEach(() => {
- mockAxios = new MockAdapter(axios);
setFixtures(`
<div>
<span class="js-service-active-status fa fa-circle" data-value="true"></span>
@@ -63,7 +64,6 @@ describe('AlertsSettingsForm', () => {
afterEach(() => {
wrapper.destroy();
- mockAxios.restore();
});
describe('with default values', () => {
@@ -76,6 +76,11 @@ describe('AlertsSettingsForm', () => {
});
});
+ it('renders alerts integrations list', () => {
+ createComponent();
+ expect(wrapper.find(IntegrationsList).exists()).toBe(true);
+ });
+
describe('reset key', () => {
it('triggers resetKey method', () => {
const resetKey = jest.fn();
@@ -99,8 +104,7 @@ describe('AlertsSettingsForm', () => {
});
it('shows a alert message on error', () => {
- const formPath = 'some/path';
- mockAxios.onPut(formPath).replyOnce(404);
+ service.updateGenericKey.mockRejectedValueOnce({});
createComponent();
@@ -122,8 +126,7 @@ describe('AlertsSettingsForm', () => {
describe('error is encountered', () => {
it('restores previous value', () => {
- const formPath = 'some/path';
- mockAxios.onPut(formPath).replyOnce(500);
+ service.updateGenericKey.mockRejectedValueOnce({});
createComponent();
return wrapper.vm.resetKey().then(() => {
expect(wrapper.find(ToggleButton).props('value')).toBe(false);
@@ -193,18 +196,34 @@ describe('AlertsSettingsForm', () => {
});
describe('alert service is toggled', () => {
- it('should show a error alert if failed', () => {
- const formPath = 'some/path';
+ describe('error handling', () => {
const toggleService = true;
- mockAxios.onPut(formPath).replyOnce(422, {
- errors: 'Error message to display',
- });
- createComponent();
+ it('should show generic error', async () => {
+ service.updateGenericActive.mockRejectedValueOnce({});
+
+ createComponent();
- return wrapper.vm.toggleActivated(toggleService).then(() => {
+ await wrapper.vm.toggleActivated(toggleService);
expect(wrapper.vm.active).toBe(false);
expect(wrapper.find(GlAlert).attributes('variant')).toBe('danger');
+ expect(wrapper.find(GlAlert).text()).toBe(i18n.errorMsg);
+ });
+
+ it('should show first field specific error when available', async () => {
+ const err1 = "can't be blank";
+ const err2 = 'is not a valid URL';
+ const key = 'api_url';
+ service.updateGenericActive.mockRejectedValueOnce({
+ response: { data: { errors: { [key]: [err1, err2] } } },
+ });
+
+ createComponent();
+
+ await wrapper.vm.toggleActivated(toggleService);
+
+ expect(wrapper.find(GlAlert).text()).toContain(i18n.errorMsg);
+ expect(wrapper.find(GlAlert).text()).toContain(`${key} ${err1}`);
});
});
});
diff --git a/spec/frontend/alert_settings/alerts_integrations_list_spec.js b/spec/frontend/alert_settings/alerts_integrations_list_spec.js
new file mode 100644
index 00000000000..6fc9901db2a
--- /dev/null
+++ b/spec/frontend/alert_settings/alerts_integrations_list_spec.js
@@ -0,0 +1,89 @@
+import { GlTable, GlIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import Tracking from '~/tracking';
+import AlertIntegrationsList, {
+ i18n,
+} from '~/alerts_settings/components/alerts_integrations_list.vue';
+import { trackAlertIntergrationsViewsOptions } from '~/alerts_settings/constants';
+
+const mockIntegrations = [
+ {
+ activated: true,
+ name: 'Integration 1',
+ type: 'HTTP endpoint',
+ },
+ {
+ activated: false,
+ name: 'Integration 2',
+ type: 'HTTP endpoint',
+ },
+];
+
+describe('AlertIntegrationsList', () => {
+ let wrapper;
+
+ function mountComponent(propsData = {}) {
+ wrapper = mount(AlertIntegrationsList, {
+ propsData: {
+ integrations: mockIntegrations,
+ ...propsData,
+ },
+ stubs: {
+ GlIcon: true,
+ },
+ });
+ }
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ const findTableComponent = () => wrapper.find(GlTable);
+ const finsStatusCell = () => wrapper.findAll('[data-testid="integration-activated-status"]');
+
+ it('renders a table', () => {
+ expect(findTableComponent().exists()).toBe(true);
+ });
+
+ it('renders an empty state when no integrations provided', () => {
+ mountComponent({ integrations: [] });
+ expect(findTableComponent().text()).toContain(i18n.emptyState);
+ });
+
+ describe('integration status', () => {
+ it('enabled', () => {
+ const cell = finsStatusCell().at(0);
+ const activatedIcon = cell.find(GlIcon);
+ expect(cell.text()).toBe(i18n.status.enabled.name);
+ expect(activatedIcon.attributes('name')).toBe('check-circle-filled');
+ expect(activatedIcon.attributes('title')).toBe(i18n.status.enabled.tooltip);
+ });
+
+ it('disabled', () => {
+ const cell = finsStatusCell().at(1);
+ const notActivatedIcon = cell.find(GlIcon);
+ expect(cell.text()).toBe(i18n.status.disabled.name);
+ expect(notActivatedIcon.attributes('name')).toBe('warning-solid');
+ expect(notActivatedIcon.attributes('title')).toBe(i18n.status.disabled.tooltip);
+ });
+ });
+
+ describe('Snowplow tracking', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ mountComponent();
+ });
+
+ it('should track alert list page views', () => {
+ const { category, action } = trackAlertIntergrationsViewsOptions;
+ expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ });
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/apollo_mock_data.js b/spec/frontend/analytics/instance_statistics/apollo_mock_data.js
new file mode 100644
index 00000000000..2e4eaf3fc96
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/apollo_mock_data.js
@@ -0,0 +1,30 @@
+const defaultPageInfo = { hasPreviousPage: false, startCursor: null, endCursor: null };
+
+export function getApolloResponse(options = {}) {
+ const {
+ pipelinesTotal = [],
+ pipelinesSucceeded = [],
+ pipelinesFailed = [],
+ pipelinesCanceled = [],
+ pipelinesSkipped = [],
+ hasNextPage = false,
+ } = options;
+ return {
+ data: {
+ pipelinesTotal: { pageInfo: { ...defaultPageInfo, hasNextPage }, nodes: pipelinesTotal },
+ pipelinesSucceeded: {
+ pageInfo: { ...defaultPageInfo, hasNextPage },
+ nodes: pipelinesSucceeded,
+ },
+ pipelinesFailed: { pageInfo: { ...defaultPageInfo, hasNextPage }, nodes: pipelinesFailed },
+ pipelinesCanceled: {
+ pageInfo: { ...defaultPageInfo, hasNextPage },
+ nodes: pipelinesCanceled,
+ },
+ pipelinesSkipped: {
+ pageInfo: { ...defaultPageInfo, hasNextPage },
+ nodes: pipelinesSkipped,
+ },
+ },
+ };
+}
diff --git a/spec/frontend/analytics/instance_statistics/components/__snapshots__/pipelines_chart_spec.js.snap b/spec/frontend/analytics/instance_statistics/components/__snapshots__/pipelines_chart_spec.js.snap
new file mode 100644
index 00000000000..0b3b685a9f2
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/__snapshots__/pipelines_chart_spec.js.snap
@@ -0,0 +1,161 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PipelinesChart when fetching more data when the fetchMore query returns data passes the data to the line chart 1`] = `
+Array [
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 21,
+ ],
+ Array [
+ "2020-07-01",
+ 10,
+ ],
+ Array [
+ "2020-08-01",
+ 5,
+ ],
+ ],
+ "name": "Total",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 21,
+ ],
+ Array [
+ "2020-07-01",
+ 10,
+ ],
+ Array [
+ "2020-08-01",
+ 5,
+ ],
+ ],
+ "name": "Succeeded",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 22,
+ ],
+ Array [
+ "2020-07-01",
+ 41,
+ ],
+ Array [
+ "2020-08-01",
+ 5,
+ ],
+ ],
+ "name": "Failed",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 21,
+ ],
+ Array [
+ "2020-07-01",
+ 10,
+ ],
+ Array [
+ "2020-08-01",
+ 5,
+ ],
+ ],
+ "name": "Canceled",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 21,
+ ],
+ Array [
+ "2020-07-01",
+ 10,
+ ],
+ Array [
+ "2020-08-01",
+ 5,
+ ],
+ ],
+ "name": "Skipped",
+ },
+]
+`;
+
+exports[`PipelinesChart with data passes the data to the line chart 1`] = `
+Array [
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 22,
+ ],
+ Array [
+ "2020-07-01",
+ 41,
+ ],
+ ],
+ "name": "Total",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 21,
+ ],
+ Array [
+ "2020-07-01",
+ 10,
+ ],
+ ],
+ "name": "Succeeded",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 21,
+ ],
+ Array [
+ "2020-07-01",
+ 10,
+ ],
+ ],
+ "name": "Failed",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 22,
+ ],
+ Array [
+ "2020-07-01",
+ 41,
+ ],
+ ],
+ "name": "Canceled",
+ },
+ Object {
+ "data": Array [
+ Array [
+ "2020-06-01",
+ 22,
+ ],
+ Array [
+ "2020-07-01",
+ 41,
+ ],
+ ],
+ "name": "Skipped",
+ },
+]
+`;
diff --git a/spec/frontend/analytics/instance_statistics/components/app_spec.js b/spec/frontend/analytics/instance_statistics/components/app_spec.js
new file mode 100644
index 00000000000..df13c9f82a9
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/app_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import InstanceStatisticsApp from '~/analytics/instance_statistics/components/app.vue';
+import InstanceCounts from '~/analytics/instance_statistics/components//instance_counts.vue';
+import PipelinesChart from '~/analytics/instance_statistics/components/pipelines_chart.vue';
+import UsersChart from '~/analytics/instance_statistics/components/users_chart.vue';
+
+describe('InstanceStatisticsApp', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(InstanceStatisticsApp);
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays the instance counts component', () => {
+ expect(wrapper.find(InstanceCounts).exists()).toBe(true);
+ });
+
+ it('displays the pipelines chart component', () => {
+ expect(wrapper.find(PipelinesChart).exists()).toBe(true);
+ });
+
+ it('displays the users chart component', () => {
+ expect(wrapper.find(UsersChart).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js b/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js
new file mode 100644
index 00000000000..12b5e14b9c4
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js
@@ -0,0 +1,54 @@
+import { shallowMount } from '@vue/test-utils';
+import InstanceCounts from '~/analytics/instance_statistics/components/instance_counts.vue';
+import MetricCard from '~/analytics/shared/components/metric_card.vue';
+import { mockInstanceCounts } from '../mock_data';
+
+describe('InstanceCounts', () => {
+ let wrapper;
+
+ const createComponent = ({ loading = false, data = {} } = {}) => {
+ const $apollo = {
+ queries: {
+ counts: {
+ loading,
+ },
+ },
+ };
+
+ wrapper = shallowMount(InstanceCounts, {
+ mocks: { $apollo },
+ data() {
+ return {
+ ...data,
+ };
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findMetricCard = () => wrapper.find(MetricCard);
+
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({ loading: true });
+ });
+
+ it('displays the metric card with isLoading=true', () => {
+ expect(findMetricCard().props('isLoading')).toBe(true);
+ });
+ });
+
+ describe('with data', () => {
+ beforeEach(() => {
+ createComponent({ data: { counts: mockInstanceCounts } });
+ });
+
+ it('passes the counts data to the metric card', () => {
+ expect(findMetricCard().props('metrics')).toEqual(mockInstanceCounts);
+ });
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/components/pipelines_chart_spec.js b/spec/frontend/analytics/instance_statistics/components/pipelines_chart_spec.js
new file mode 100644
index 00000000000..a06d66f783e
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/pipelines_chart_spec.js
@@ -0,0 +1,189 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlLineChart } from '@gitlab/ui/dist/charts';
+import { GlAlert } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import PipelinesChart from '~/analytics/instance_statistics/components/pipelines_chart.vue';
+import pipelinesStatsQuery from '~/analytics/instance_statistics/graphql/queries/pipeline_stats.query.graphql';
+import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
+import { mockCountsData1, mockCountsData2 } from '../mock_data';
+import { getApolloResponse } from '../apollo_mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('PipelinesChart', () => {
+ let wrapper;
+ let queryHandler;
+
+ const createApolloProvider = pipelineStatsHandler => {
+ return createMockApollo([[pipelinesStatsQuery, pipelineStatsHandler]]);
+ };
+
+ const createComponent = apolloProvider => {
+ return shallowMount(PipelinesChart, {
+ localVue,
+ apolloProvider,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findLoader = () => wrapper.find(ChartSkeletonLoader);
+ const findChart = () => wrapper.find(GlLineChart);
+ const findAlert = () => wrapper.find(GlAlert);
+
+ describe('while loading', () => {
+ beforeEach(() => {
+ queryHandler = jest.fn().mockReturnValue(new Promise(() => {}));
+ const apolloProvider = createApolloProvider(queryHandler);
+ wrapper = createComponent(apolloProvider);
+ });
+
+ it('requests data', () => {
+ expect(queryHandler).toBeCalledTimes(1);
+ });
+
+ it('displays the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(true);
+ });
+
+ it('hides the chart', () => {
+ expect(findChart().exists()).toBe(false);
+ });
+
+ it('does not show an error', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('without data', () => {
+ beforeEach(() => {
+ const emptyResponse = getApolloResponse();
+ queryHandler = jest.fn().mockResolvedValue(emptyResponse);
+ const apolloProvider = createApolloProvider(queryHandler);
+ wrapper = createComponent(apolloProvider);
+ });
+
+ it('renders an no data message', () => {
+ expect(findAlert().text()).toBe('There is no data available.');
+ });
+
+ it('hides the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('renders the chart', () => {
+ expect(findChart().exists()).toBe(false);
+ });
+ });
+
+ describe('with data', () => {
+ beforeEach(() => {
+ const response = getApolloResponse({
+ pipelinesTotal: mockCountsData1,
+ pipelinesSucceeded: mockCountsData2,
+ pipelinesFailed: mockCountsData2,
+ pipelinesCanceled: mockCountsData1,
+ pipelinesSkipped: mockCountsData1,
+ });
+ queryHandler = jest.fn().mockResolvedValue(response);
+ const apolloProvider = createApolloProvider(queryHandler);
+ wrapper = createComponent(apolloProvider);
+ });
+
+ it('requests data', () => {
+ expect(queryHandler).toBeCalledTimes(1);
+ });
+
+ it('hides the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('renders the chart', () => {
+ expect(findChart().exists()).toBe(true);
+ });
+
+ it('passes the data to the line chart', () => {
+ expect(findChart().props('data')).toMatchSnapshot();
+ });
+
+ it('does not show an error', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('when fetching more data', () => {
+ const recordedAt = '2020-08-01';
+ describe('when the fetchMore query returns data', () => {
+ beforeEach(async () => {
+ const newData = { recordedAt, count: 5 };
+ const firstResponse = getApolloResponse({
+ pipelinesTotal: mockCountsData2,
+ pipelinesSucceeded: mockCountsData2,
+ pipelinesFailed: mockCountsData1,
+ pipelinesCanceled: mockCountsData2,
+ pipelinesSkipped: mockCountsData2,
+ hasNextPage: true,
+ });
+ const secondResponse = getApolloResponse({
+ pipelinesTotal: [newData],
+ pipelinesSucceeded: [newData],
+ pipelinesFailed: [newData],
+ pipelinesCanceled: [newData],
+ pipelinesSkipped: [newData],
+ hasNextPage: false,
+ });
+ queryHandler = jest
+ .fn()
+ .mockResolvedValueOnce(firstResponse)
+ .mockResolvedValueOnce(secondResponse);
+ const apolloProvider = createApolloProvider(queryHandler);
+ wrapper = createComponent(apolloProvider);
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('requests data twice', () => {
+ expect(queryHandler).toBeCalledTimes(2);
+ });
+
+ it('passes the data to the line chart', () => {
+ expect(findChart().props('data')).toMatchSnapshot();
+ });
+ });
+
+ describe('when the fetchMore query throws an error', () => {
+ beforeEach(async () => {
+ const response = getApolloResponse({
+ pipelinesTotal: mockCountsData2,
+ pipelinesSucceeded: mockCountsData2,
+ pipelinesFailed: mockCountsData1,
+ pipelinesCanceled: mockCountsData2,
+ pipelinesSkipped: mockCountsData2,
+ hasNextPage: true,
+ });
+ queryHandler = jest.fn().mockResolvedValue(response);
+ const apolloProvider = createApolloProvider(queryHandler);
+ wrapper = createComponent(apolloProvider);
+ jest
+ .spyOn(wrapper.vm.$apollo.queries.pipelineStats, 'fetchMore')
+ .mockImplementation(jest.fn().mockRejectedValue());
+ await wrapper.vm.$nextTick();
+ });
+
+ it('calls fetchMore', () => {
+ expect(wrapper.vm.$apollo.queries.pipelineStats.fetchMore).toHaveBeenCalledTimes(1);
+ });
+
+ it('show an error message', () => {
+ expect(findAlert().text()).toBe(
+ 'Could not load the pipelines chart. Please refresh the page to try again.',
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js b/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js
new file mode 100644
index 00000000000..7509c1e6626
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/users_chart_spec.js
@@ -0,0 +1,200 @@
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlAreaChart } from '@gitlab/ui/dist/charts';
+import { GlAlert } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import { useFakeDate } from 'helpers/fake_date';
+import UsersChart from '~/analytics/instance_statistics/components/users_chart.vue';
+import ChartSkeletonLoader from '~/vue_shared/components/resizable_chart/skeleton_loader.vue';
+import usersQuery from '~/analytics/instance_statistics/graphql/queries/users.query.graphql';
+import { mockCountsData2, roundedSortedCountsMonthlyChartData2, mockPageInfo } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('UsersChart', () => {
+ let wrapper;
+ let queryHandler;
+
+ const mockApolloResponse = ({ loading = false, hasNextPage = false, users }) => ({
+ data: {
+ users: {
+ pageInfo: { ...mockPageInfo, hasNextPage },
+ nodes: users,
+ loading,
+ },
+ },
+ });
+
+ const mockQueryResponse = ({ users, loading = false, hasNextPage = false }) => {
+ const apolloQueryResponse = mockApolloResponse({ loading, hasNextPage, users });
+ if (loading) {
+ return jest.fn().mockReturnValue(new Promise(() => {}));
+ }
+ if (hasNextPage) {
+ return jest
+ .fn()
+ .mockResolvedValueOnce(apolloQueryResponse)
+ .mockResolvedValueOnce(
+ mockApolloResponse({
+ loading,
+ hasNextPage: false,
+ users: [{ recordedAt: '2020-07-21', count: 5 }],
+ }),
+ );
+ }
+ return jest.fn().mockResolvedValue(apolloQueryResponse);
+ };
+
+ const createComponent = ({
+ loadingError = false,
+ loading = false,
+ users = [],
+ hasNextPage = false,
+ } = {}) => {
+ queryHandler = mockQueryResponse({ users, loading, hasNextPage });
+
+ return shallowMount(UsersChart, {
+ props: {
+ startDate: useFakeDate(2020, 9, 26),
+ endDate: useFakeDate(2020, 10, 1),
+ totalDataPoints: mockCountsData2.length,
+ },
+ localVue,
+ apolloProvider: createMockApollo([[usersQuery, queryHandler]]),
+ data() {
+ return { loadingError };
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findLoader = () => wrapper.find(ChartSkeletonLoader);
+ const findAlert = () => wrapper.find(GlAlert);
+ const findChart = () => wrapper.find(GlAreaChart);
+
+ describe('while loading', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ loading: true });
+ });
+
+ it('displays the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(true);
+ });
+
+ it('hides the chart', () => {
+ expect(findChart().exists()).toBe(false);
+ });
+ });
+
+ describe('without data', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ users: [] });
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders an no data message', () => {
+ expect(findAlert().text()).toBe('There is no data available.');
+ });
+
+ it('hides the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('renders the chart', () => {
+ expect(findChart().exists()).toBe(false);
+ });
+ });
+
+ describe('with data', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ users: mockCountsData2 });
+ await wrapper.vm.$nextTick();
+ });
+
+ it('hides the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('renders the chart', () => {
+ expect(findChart().exists()).toBe(true);
+ });
+
+ it('passes the data to the line chart', () => {
+ expect(findChart().props('data')).toEqual([
+ { data: roundedSortedCountsMonthlyChartData2, name: 'Total users' },
+ ]);
+ });
+ });
+
+ describe('with errors', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ loadingError: true });
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders an error message', () => {
+ expect(findAlert().text()).toBe(
+ 'Could not load the user chart. Please refresh the page to try again.',
+ );
+ });
+
+ it('hides the skeleton loader', () => {
+ expect(findLoader().exists()).toBe(false);
+ });
+
+ it('renders the chart', () => {
+ expect(findChart().exists()).toBe(false);
+ });
+ });
+
+ describe('when fetching more data', () => {
+ describe('when the fetchMore query returns data', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({
+ users: mockCountsData2,
+ hasNextPage: true,
+ });
+
+ jest.spyOn(wrapper.vm.$apollo.queries.users, 'fetchMore');
+ await wrapper.vm.$nextTick();
+ });
+
+ it('requests data twice', () => {
+ expect(queryHandler).toBeCalledTimes(2);
+ });
+
+ it('calls fetchMore', () => {
+ expect(wrapper.vm.$apollo.queries.users.fetchMore).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('when the fetchMore query throws an error', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ users: mockCountsData2,
+ hasNextPage: true,
+ });
+
+ jest
+ .spyOn(wrapper.vm.$apollo.queries.users, 'fetchMore')
+ .mockImplementation(jest.fn().mockRejectedValue());
+ return wrapper.vm.$nextTick();
+ });
+
+ it('calls fetchMore', () => {
+ expect(wrapper.vm.$apollo.queries.users.fetchMore).toHaveBeenCalledTimes(1);
+ });
+
+ it('renders an error message', () => {
+ expect(findAlert().text()).toBe(
+ 'Could not load the user chart. Please refresh the page to try again.',
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/mock_data.js b/spec/frontend/analytics/instance_statistics/mock_data.js
new file mode 100644
index 00000000000..b737db4c55f
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/mock_data.js
@@ -0,0 +1,42 @@
+export const mockInstanceCounts = [
+ { key: 'projects', value: 10, label: 'Projects' },
+ { key: 'groups', value: 20, label: 'Group' },
+];
+
+export const mockCountsData1 = [
+ { recordedAt: '2020-07-23', count: 52 },
+ { recordedAt: '2020-07-22', count: 40 },
+ { recordedAt: '2020-07-21', count: 31 },
+ { recordedAt: '2020-06-14', count: 23 },
+ { recordedAt: '2020-06-12', count: 20 },
+];
+
+export const countsMonthlyChartData1 = [
+ ['2020-07-01', 41], // average of 2020-07-x items
+ ['2020-06-01', 21.5], // average of 2020-06-x items
+];
+
+export const mockCountsData2 = [
+ { recordedAt: '2020-07-28', count: 10 },
+ { recordedAt: '2020-07-27', count: 9 },
+ { recordedAt: '2020-06-26', count: 14 },
+ { recordedAt: '2020-06-25', count: 23 },
+ { recordedAt: '2020-06-24', count: 25 },
+];
+
+export const countsMonthlyChartData2 = [
+ ['2020-07-01', 9.5], // average of 2020-07-x items
+ ['2020-06-01', 20.666666666666668], // average of 2020-06-x items
+];
+
+export const roundedSortedCountsMonthlyChartData2 = [
+ ['2020-06-01', 21], // average of 2020-06-x items
+ ['2020-07-01', 10], // average of 2020-07-x items
+];
+
+export const mockPageInfo = {
+ hasNextPage: false,
+ hasPreviousPage: false,
+ startCursor: null,
+ endCursor: null,
+};
diff --git a/spec/frontend/analytics/instance_statistics/utils_spec.js b/spec/frontend/analytics/instance_statistics/utils_spec.js
new file mode 100644
index 00000000000..d480238419b
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/utils_spec.js
@@ -0,0 +1,84 @@
+import {
+ getAverageByMonth,
+ extractValues,
+ sortByDate,
+} from '~/analytics/instance_statistics/utils';
+import {
+ mockCountsData1,
+ mockCountsData2,
+ countsMonthlyChartData1,
+ countsMonthlyChartData2,
+} from './mock_data';
+
+describe('getAverageByMonth', () => {
+ it('collects data into average by months', () => {
+ expect(getAverageByMonth(mockCountsData1)).toStrictEqual(countsMonthlyChartData1);
+ expect(getAverageByMonth(mockCountsData2)).toStrictEqual(countsMonthlyChartData2);
+ });
+
+ it('it transforms a data point to the first of the month', () => {
+ const item = mockCountsData1[0];
+ const firstOfTheMonth = item.recordedAt.replace(/-[0-9]{2}$/, '-01');
+ expect(getAverageByMonth([item])).toStrictEqual([[firstOfTheMonth, item.count]]);
+ });
+
+ it('it uses sane defaults', () => {
+ expect(getAverageByMonth()).toStrictEqual([]);
+ });
+
+ it('it errors when passing null', () => {
+ expect(() => {
+ getAverageByMonth(null);
+ }).toThrow();
+ });
+
+ describe('when shouldRound = true', () => {
+ const options = { shouldRound: true };
+
+ it('rounds the averages', () => {
+ const roundedData1 = countsMonthlyChartData1.map(([date, avg]) => [date, Math.round(avg)]);
+ const roundedData2 = countsMonthlyChartData2.map(([date, avg]) => [date, Math.round(avg)]);
+ expect(getAverageByMonth(mockCountsData1, options)).toStrictEqual(roundedData1);
+ expect(getAverageByMonth(mockCountsData2, options)).toStrictEqual(roundedData2);
+ });
+ });
+});
+
+describe('extractValues', () => {
+ it('extracts only requested values', () => {
+ const data = { fooBar: { baz: 'quis' }, ignored: 'ignored' };
+ expect(extractValues(data, ['fooBar'], 'foo', 'baz')).toEqual({ bazBar: 'quis' });
+ });
+
+ it('is able to extract multiple values', () => {
+ const data = {
+ fooBar: { baz: 'quis' },
+ fooBaz: { baz: 'quis' },
+ fooQuis: { baz: 'quis' },
+ };
+ expect(extractValues(data, ['fooBar', 'fooBaz', 'fooQuis'], 'foo', 'baz')).toEqual({
+ bazBar: 'quis',
+ bazBaz: 'quis',
+ bazQuis: 'quis',
+ });
+ });
+
+ it('returns empty data set when keys are not found', () => {
+ const data = { foo: { baz: 'quis' }, ignored: 'ignored' };
+ expect(extractValues(data, ['fooBar'], 'foo', 'baz')).toEqual({});
+ });
+
+ it('returns empty data when params are missing', () => {
+ expect(extractValues()).toEqual({});
+ });
+});
+
+describe('sortByDate', () => {
+ it('sorts the array by date', () => {
+ expect(sortByDate(mockCountsData1)).toStrictEqual([...mockCountsData1].reverse());
+ });
+
+ it('does not modify the original array', () => {
+ expect(sortByDate(countsMonthlyChartData1)).not.toBe(countsMonthlyChartData1);
+ });
+});
diff --git a/spec/frontend/analytics/shared/components/metric_card_spec.js b/spec/frontend/analytics/shared/components/metric_card_spec.js
new file mode 100644
index 00000000000..e89d499ed9b
--- /dev/null
+++ b/spec/frontend/analytics/shared/components/metric_card_spec.js
@@ -0,0 +1,129 @@
+import { mount } from '@vue/test-utils';
+import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import MetricCard from '~/analytics/shared/components/metric_card.vue';
+
+const metrics = [
+ { key: 'first_metric', value: 10, label: 'First metric', unit: 'days', link: 'some_link' },
+ { key: 'second_metric', value: 20, label: 'Yet another metric' },
+ { key: 'third_metric', value: null, label: 'Null metric without value', unit: 'parsecs' },
+ { key: 'fourth_metric', value: '-', label: 'Metric without value', unit: 'parsecs' },
+];
+
+const defaultProps = {
+ title: 'My fancy title',
+ isLoading: false,
+ metrics,
+};
+
+describe('MetricCard', () => {
+ let wrapper;
+
+ const factory = (props = defaultProps) => {
+ wrapper = mount(MetricCard, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findTitle = () => wrapper.find({ ref: 'title' });
+ const findLoadingIndicator = () => wrapper.find(GlSkeletonLoading);
+ const findMetricsWrapper = () => wrapper.find({ ref: 'metricsWrapper' });
+ const findMetricItem = () => wrapper.findAll({ ref: 'metricItem' });
+ const findTooltip = () => wrapper.find('[data-testid="tooltip"]');
+
+ describe('template', () => {
+ it('renders the title', () => {
+ factory();
+
+ expect(findTitle().text()).toContain('My fancy title');
+ });
+
+ describe('when isLoading is true', () => {
+ beforeEach(() => {
+ factory({ isLoading: true });
+ });
+
+ it('displays a loading indicator', () => {
+ expect(findLoadingIndicator().exists()).toBe(true);
+ });
+
+ it('does not display the metrics container', () => {
+ expect(findMetricsWrapper().exists()).toBe(false);
+ });
+ });
+
+ describe('when isLoading is false', () => {
+ beforeEach(() => {
+ factory({ isLoading: false });
+ });
+
+ it('does not display a loading indicator', () => {
+ expect(findLoadingIndicator().exists()).toBe(false);
+ });
+
+ it('displays the metrics container', () => {
+ expect(findMetricsWrapper().exists()).toBe(true);
+ });
+
+ it('renders two metrics', () => {
+ expect(findMetricItem()).toHaveLength(metrics.length);
+ });
+
+ describe('with tooltip text', () => {
+ const tooltipText = 'This is a tooltip';
+ const tooltipMetric = {
+ key: 'fifth_metric',
+ value: '-',
+ label: 'Metric with tooltip',
+ unit: 'parsecs',
+ tooltipText,
+ };
+
+ beforeEach(() => {
+ factory({
+ isLoading: false,
+ metrics: [tooltipMetric],
+ });
+ });
+
+ it('will render a tooltip', () => {
+ const tt = getBinding(findTooltip().element, 'gl-tooltip');
+ expect(tt.value.title).toEqual(tooltipText);
+ });
+ });
+
+ describe.each`
+ columnIndex | label | value | unit | link
+ ${0} | ${'First metric'} | ${10} | ${' days'} | ${'some_link'}
+ ${1} | ${'Yet another metric'} | ${20} | ${''} | ${null}
+ ${2} | ${'Null metric without value'} | ${'-'} | ${''} | ${null}
+ ${3} | ${'Metric without value'} | ${'-'} | ${''} | ${null}
+ `('metric columns', ({ columnIndex, label, value, unit, link }) => {
+ it(`renders ${value}${unit} ${label} with URL ${link}`, () => {
+ const allMetricItems = findMetricItem();
+ const metricItem = allMetricItems.at(columnIndex);
+ const text = metricItem.text();
+
+ expect(text).toContain(`${value}${unit}`);
+ expect(text).toContain(label);
+
+ if (link) {
+ expect(metricItem.find('a').attributes('href')).toBe(link);
+ } else {
+ expect(metricItem.find('a').exists()).toBe(false);
+ }
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 3ae0d06162d..9924525929b 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -421,6 +421,25 @@ describe('Api', () => {
});
});
+ describe('addProjectIssueAsTodo', () => {
+ it('adds issue ID as a todo', () => {
+ const projectId = 1;
+ const issueIid = 11;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/issues/11/todo`;
+ mock.onPost(expectedUrl).reply(200, {
+ id: 112,
+ project: {
+ id: 1,
+ },
+ });
+
+ return Api.addProjectIssueAsTodo(projectId, issueIid).then(({ data }) => {
+ expect(data.id).toBe(112);
+ expect(data.project.id).toBe(projectId);
+ });
+ });
+ });
+
describe('newLabel', () => {
it('creates a new label', done => {
const namespace = 'some namespace';
@@ -672,6 +691,27 @@ describe('Api', () => {
});
});
+ describe('pipelineJobs', () => {
+ it('fetches the jobs for a given pipeline', done => {
+ const projectId = 123;
+ const pipelineId = 456;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/pipelines/${pipelineId}/jobs`;
+ const payload = [
+ {
+ name: 'test',
+ },
+ ];
+ mock.onGet(expectedUrl).reply(httpStatus.OK, payload);
+
+ Api.pipelineJobs(projectId, pipelineId)
+ .then(({ data }) => {
+ expect(data).toEqual(payload);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('createBranch', () => {
it('creates new branch', done => {
const ref = 'master';
@@ -1152,4 +1192,44 @@ describe('Api', () => {
});
});
});
+
+ describe('trackRedisHllUserEvent', () => {
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/usage_data/increment_unique_users`;
+
+ const event = 'dummy_event';
+ const postData = { event };
+ const headers = {
+ 'Content-Type': 'application/json',
+ };
+
+ describe('when usage data increment unique users is called with feature flag disabled', () => {
+ beforeEach(() => {
+ gon.features = { ...gon.features, usageDataApi: false };
+ });
+
+ it('returns null', () => {
+ jest.spyOn(axios, 'post');
+ mock.onPost(expectedUrl).replyOnce(httpStatus.OK, true);
+
+ expect(axios.post).toHaveBeenCalledTimes(0);
+ expect(Api.trackRedisHllUserEvent(event)).toEqual(null);
+ });
+ });
+
+ describe('when usage data increment unique users is called', () => {
+ beforeEach(() => {
+ gon.features = { ...gon.features, usageDataApi: true };
+ });
+
+ it('resolves the Promise', () => {
+ jest.spyOn(axios, 'post');
+ mock.onPost(expectedUrl, { event }).replyOnce(httpStatus.OK, true);
+
+ return Api.trackRedisHllUserEvent(event).then(({ data }) => {
+ expect(data).toEqual(true);
+ expect(axios.post).toHaveBeenCalledWith(expectedUrl, postData, { headers });
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js
index f0ed18248f0..7fd6a9e7b87 100644
--- a/spec/frontend/awards_handler_spec.js
+++ b/spec/frontend/awards_handler_spec.js
@@ -309,6 +309,30 @@ describe('AwardsHandler', () => {
expect($('[data-name=alien]').is(':visible')).toBe(true);
expect($('.js-emoji-menu-search').val()).toBe('');
});
+
+ it('should fuzzy filter the emoji', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('sgls');
+
+ expect($('[data-name=angel]').is(':visible')).toBe(false);
+ expect($('[data-name=anger]').is(':visible')).toBe(false);
+ expect($('[data-name=sunglasses]').is(':visible')).toBe(true);
+ });
+
+ it('should filter by emoji description', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('baby');
+ expect($('[data-name=angel]').is(':visible')).toBe(true);
+ });
+
+ it('should filter by emoji unicode value', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('👼');
+ expect($('[data-name=angel]').is(':visible')).toBe(true);
+ });
});
describe('emoji menu', () => {
diff --git a/spec/frontend/badges/components/badge_settings_spec.js b/spec/frontend/badges/components/badge_settings_spec.js
index b6a86746598..769be7cb1bd 100644
--- a/spec/frontend/badges/components/badge_settings_spec.js
+++ b/spec/frontend/badges/components/badge_settings_spec.js
@@ -1,117 +1,71 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
import store from '~/badges/store';
import BadgeSettings from '~/badges/components/badge_settings.vue';
+import BadgeList from '~/badges/components/badge_list.vue';
+import BadgeListRow from '~/badges/components/badge_list_row.vue';
import { createDummyBadge } from '../dummy_badge';
-describe('BadgeSettings component', () => {
- const Component = Vue.extend(BadgeSettings);
- let vm;
+const localVue = createLocalVue();
+localVue.use(Vuex);
- beforeEach(() => {
- setFixtures(`
- <div id="dummy-element"></div>
- <button
- id="dummy-modal-button"
- type="button"
- data-toggle="modal"
- data-target="#delete-badge-modal"
- >Show modal</button>
- `);
+describe('BadgeSettings component', () => {
+ let wrapper;
+ const badge = createDummyBadge();
- // Can be removed once GlLoadingIcon no longer throws a warning
- jest.spyOn(global.console, 'warn').mockImplementation(() => jest.fn());
+ const createComponent = (isEditing = false) => {
+ store.state.badges = [badge];
+ store.state.kind = 'project';
+ store.state.isEditing = isEditing;
- vm = mountComponentWithStore(Component, {
- el: '#dummy-element',
+ wrapper = shallowMount(BadgeSettings, {
store,
+ localVue,
+ stubs: {
+ 'badge-list': BadgeList,
+ 'badge-list-row': BadgeListRow,
+ },
});
+ };
+
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- it('displays modal if button is clicked', done => {
- const badge = createDummyBadge();
- store.state.badgeInModal = badge;
- const modal = vm.$el.querySelector('#delete-badge-modal');
- const button = document.getElementById('dummy-modal-button');
+ it('displays modal if button for deleting a badge is clicked', async () => {
+ const button = wrapper.find('[data-testid="delete-badge"]');
- button.click();
+ button.vm.$emit('click');
+ await wrapper.vm.$nextTick();
- Vue.nextTick()
- .then(() => {
- expect(modal.innerText).toMatch('Delete badge?');
- const badgeElement = modal.querySelector('img.project-badge');
- expect(badgeElement).not.toBe(null);
- expect(badgeElement.getAttribute('src')).toBe(badge.renderedImageUrl);
- })
- .then(done)
- .catch(done.fail);
+ const modal = wrapper.find(GlModal);
+ expect(modal.isVisible()).toBe(true);
});
it('displays a form to add a badge', () => {
- const form = vm.$el.querySelector('form:nth-of-type(2)');
-
- expect(form).not.toBe(null);
- const button = form.querySelector('.btn-success');
-
- expect(button).not.toBe(null);
- expect(button).toHaveText(/Add badge/);
+ expect(wrapper.find('[data-testid="add-new-badge"]').isVisible()).toBe(true);
});
it('displays badge list', () => {
- const badgeListElement = vm.$el.querySelector('.card');
-
- expect(badgeListElement).not.toBe(null);
- expect(badgeListElement).toBeVisible();
- expect(badgeListElement.innerText).toMatch('Your badges');
+ expect(wrapper.find(BadgeList).isVisible()).toBe(true);
});
describe('when editing', () => {
- beforeEach(done => {
- store.state.isEditing = true;
-
- Vue.nextTick()
- .then(done)
- .catch(done.fail);
+ beforeEach(() => {
+ createComponent(true);
});
it('displays a form to edit a badge', () => {
- const form = vm.$el.querySelector('form:nth-of-type(1)');
-
- expect(form).not.toBe(null);
- const cancelButton = form.querySelector('[data-testid="cancelEditing"]');
-
- expect(cancelButton).not.toBe(null);
- expect(cancelButton).toHaveText(/Cancel/);
- const submitButton = form.querySelector('[data-testid="saveEditing"]');
-
- expect(submitButton).not.toBe(null);
- expect(submitButton).toHaveText(/Save changes/);
+ expect(wrapper.find('[data-testid="edit-badge"]').isVisible()).toBe(true);
});
it('displays no badge list', () => {
- const badgeListElement = vm.$el.querySelector('.card');
-
- expect(badgeListElement).toBeHidden();
- });
- });
-
- describe('methods', () => {
- describe('onSubmitModal', () => {
- it('triggers ', () => {
- jest.spyOn(vm, 'deleteBadge').mockImplementation(() => Promise.resolve());
- const modal = vm.$el.querySelector('#delete-badge-modal');
- const deleteButton = modal.querySelector('.btn-danger');
-
- deleteButton.click();
-
- const badge = store.state.badgeInModal;
-
- expect(vm.deleteBadge).toHaveBeenCalledWith(badge);
- });
+ expect(wrapper.find(BadgeList).isVisible()).toBe(false);
});
});
});
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index 2b63ece28ba..8ddad3dacfe 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -43,22 +43,6 @@ describe('Batch comments draft preview item component', () => {
);
});
- it('adds is last class', () => {
- createComponent(true);
-
- expect(vm.$el.classList).toContain('is-last');
- });
-
- it('scrolls to draft on click', () => {
- createComponent();
-
- jest.spyOn(vm.$store, 'dispatch').mockImplementation();
-
- vm.$el.click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith('batchComments/scrollToDraft', vm.draft);
- });
-
describe('for file', () => {
it('renders file path', () => {
createComponent(false, { file_path: 'index.js', file_hash: 'abc', position: {} });
diff --git a/spec/frontend/batch_comments/components/publish_button_spec.js b/spec/frontend/batch_comments/components/publish_button_spec.js
index 4362f62c7f8..4032713150c 100644
--- a/spec/frontend/batch_comments/components/publish_button_spec.js
+++ b/spec/frontend/batch_comments/components/publish_button_spec.js
@@ -29,17 +29,6 @@ describe('Batch comments publish button component', () => {
expect(vm.$store.dispatch).toHaveBeenCalledWith('batchComments/publishReview', undefined);
});
- it('dispatches toggleReviewDropdown when shouldPublish is false on click', () => {
- vm.shouldPublish = false;
-
- vm.$el.click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- 'batchComments/toggleReviewDropdown',
- undefined,
- );
- });
-
it('sets loading when isPublishing is true', done => {
vm.$store.state.batchComments.isPublishing = true;
diff --git a/spec/frontend/batch_comments/components/publish_dropdown_spec.js b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
index fb3c532174d..f235867f002 100644
--- a/spec/frontend/batch_comments/components/publish_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
@@ -1,96 +1,39 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
import { createStore } from '~/mr_notes/stores';
import '~/behaviors/markdown/render_gfm';
import { createDraft } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('Batch comments publish dropdown component', () => {
- let vm;
- let Component;
+ let wrapper;
- function createComponent(extendStore = () => {}) {
+ function createComponent() {
const store = createStore();
store.state.batchComments.drafts.push(createDraft(), { ...createDraft(), id: 2 });
- extendStore(store);
-
- vm = mountComponentWithStore(Component, { store });
+ wrapper = shallowMount(PreviewDropdown, {
+ store,
+ });
}
- beforeAll(() => {
- Component = Vue.extend(PreviewDropdown);
- });
-
afterEach(() => {
- vm.$destroy();
- });
-
- it('toggles dropdown when clicking button', done => {
- createComponent();
-
- jest.spyOn(vm.$store, 'dispatch');
-
- vm.$el.querySelector('.review-preview-dropdown-toggle').click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- 'batchComments/toggleReviewDropdown',
- expect.anything(),
- );
-
- setImmediate(() => {
- expect(vm.$el.classList).toContain('show');
-
- done();
- });
- });
-
- it('toggles dropdown when clicking body', () => {
- createComponent();
-
- vm.$store.state.batchComments.showPreviewDropdown = true;
-
- jest.spyOn(vm.$store, 'dispatch').mockImplementation();
-
- document.body.click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- 'batchComments/toggleReviewDropdown',
- undefined,
- );
+ wrapper.destroy();
});
it('renders list of drafts', () => {
- createComponent(store => {
- Object.assign(store.state.notes, {
- isNotesFetched: true,
- });
- });
-
- expect(vm.$el.querySelectorAll('.dropdown-content li').length).toBe(2);
- });
-
- it('adds is-last class to last item', () => {
- createComponent(store => {
- Object.assign(store.state.notes, {
- isNotesFetched: true,
- });
- });
-
- expect(vm.$el.querySelectorAll('.dropdown-content li')[1].querySelector('.is-last')).not.toBe(
- null,
- );
- });
-
- it('renders draft count in dropdown title', () => {
createComponent();
- expect(vm.$el.querySelector('.dropdown-title').textContent).toContain('2 pending comments');
+ expect(wrapper.findAll(GlDropdownItem).length).toBe(2);
});
- it('renders publish button in footer', () => {
+ it('renders draft count in dropdown title', () => {
createComponent();
- expect(vm.$el.querySelector('.dropdown-footer .js-publish-draft-button')).not.toBe(null);
+ expect(wrapper.find(GlDropdown).props('headerText')).toEqual('2 pending comments');
});
});
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index a6942115649..e66f36aa3a2 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -199,42 +199,6 @@ describe('Batch comments store actions', () => {
});
});
- describe('discardReview', () => {
- it('commits mutations', done => {
- const getters = {
- getNotesData: { draftsDiscardPath: TEST_HOST },
- };
- const commit = jest.fn();
- mock.onAny().reply(200);
-
- actions
- .discardReview({ getters, commit })
- .then(() => {
- expect(commit.mock.calls[0]).toEqual(['REQUEST_DISCARD_REVIEW']);
- expect(commit.mock.calls[1]).toEqual(['RECEIVE_DISCARD_REVIEW_SUCCESS']);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('commits error mutations', done => {
- const getters = {
- getNotesData: { draftsDiscardPath: TEST_HOST },
- };
- const commit = jest.fn();
- mock.onAny().reply(500);
-
- actions
- .discardReview({ getters, commit })
- .then(() => {
- expect(commit.mock.calls[0]).toEqual(['REQUEST_DISCARD_REVIEW']);
- expect(commit.mock.calls[1]).toEqual(['RECEIVE_DISCARD_REVIEW_ERROR']);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
describe('updateDraft', () => {
let getters;
@@ -284,56 +248,6 @@ describe('Batch comments store actions', () => {
});
});
- describe('toggleReviewDropdown', () => {
- it('dispatches openReviewDropdown', done => {
- testAction(
- actions.toggleReviewDropdown,
- null,
- { showPreviewDropdown: false },
- [],
- [{ type: 'openReviewDropdown' }],
- done,
- );
- });
-
- it('dispatches closeReviewDropdown when showPreviewDropdown is true', done => {
- testAction(
- actions.toggleReviewDropdown,
- null,
- { showPreviewDropdown: true },
- [],
- [{ type: 'closeReviewDropdown' }],
- done,
- );
- });
- });
-
- describe('openReviewDropdown', () => {
- it('commits OPEN_REVIEW_DROPDOWN', done => {
- testAction(
- actions.openReviewDropdown,
- null,
- null,
- [{ type: 'OPEN_REVIEW_DROPDOWN' }],
- [],
- done,
- );
- });
- });
-
- describe('closeReviewDropdown', () => {
- it('commits CLOSE_REVIEW_DROPDOWN', done => {
- testAction(
- actions.closeReviewDropdown,
- null,
- null,
- [{ type: 'CLOSE_REVIEW_DROPDOWN' }],
- [],
- done,
- );
- });
- });
-
describe('expandAllDiscussions', () => {
it('dispatches expandDiscussion for all drafts', done => {
const state = {
@@ -383,9 +297,7 @@ describe('Batch comments store actions', () => {
actions.scrollToDraft({ dispatch, rootGetters }, draft);
- expect(dispatch.mock.calls[0]).toEqual(['closeReviewDropdown']);
-
- expect(dispatch.mock.calls[1]).toEqual([
+ expect(dispatch.mock.calls[0]).toEqual([
'expandDiscussion',
{ discussionId: '1' },
{ root: true },
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js
index a86726269ef..1406f66fd10 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js
@@ -89,42 +89,6 @@ describe('Batch comments mutations', () => {
});
});
- describe(types.REQUEST_DISCARD_REVIEW, () => {
- it('sets isDiscarding to true', () => {
- mutations[types.REQUEST_DISCARD_REVIEW](state);
-
- expect(state.isDiscarding).toBe(true);
- });
- });
-
- describe(types.RECEIVE_DISCARD_REVIEW_SUCCESS, () => {
- it('emptys drafts array', () => {
- state.drafts.push('test');
-
- mutations[types.RECEIVE_DISCARD_REVIEW_SUCCESS](state);
-
- expect(state.drafts).toEqual([]);
- });
-
- it('sets isDiscarding to false', () => {
- state.isDiscarding = true;
-
- mutations[types.RECEIVE_DISCARD_REVIEW_SUCCESS](state);
-
- expect(state.isDiscarding).toBe(false);
- });
- });
-
- describe(types.RECEIVE_DISCARD_REVIEW_ERROR, () => {
- it('updates isDiscarding to false', () => {
- state.isDiscarding = true;
-
- mutations[types.RECEIVE_DISCARD_REVIEW_ERROR](state);
-
- expect(state.isDiscarding).toBe(false);
- });
- });
-
describe(types.RECEIVE_DRAFT_UPDATE_SUCCESS, () => {
it('updates draft in store', () => {
state.drafts.push({ id: 1 });
@@ -140,20 +104,4 @@ describe('Batch comments mutations', () => {
]);
});
});
-
- describe(types.OPEN_REVIEW_DROPDOWN, () => {
- it('sets showPreviewDropdown to true', () => {
- mutations[types.OPEN_REVIEW_DROPDOWN](state);
-
- expect(state.showPreviewDropdown).toBe(true);
- });
- });
-
- describe(types.CLOSE_REVIEW_DROPDOWN, () => {
- it('sets showPreviewDropdown to false', () => {
- mutations[types.CLOSE_REVIEW_DROPDOWN](state);
-
- expect(state.showPreviewDropdown).toBe(false);
- });
- });
});
diff --git a/spec/frontend/behaviors/load_startup_css_spec.js b/spec/frontend/behaviors/load_startup_css_spec.js
new file mode 100644
index 00000000000..81222ac5aaa
--- /dev/null
+++ b/spec/frontend/behaviors/load_startup_css_spec.js
@@ -0,0 +1,44 @@
+import { setHTMLFixture } from 'helpers/fixtures';
+import { loadStartupCSS } from '~/behaviors/load_startup_css';
+
+describe('behaviors/load_startup_css', () => {
+ let loadListener;
+
+ const setupListeners = () => {
+ document
+ .querySelectorAll('link')
+ .forEach(x => x.addEventListener('load', () => loadListener(x)));
+ };
+
+ beforeEach(() => {
+ loadListener = jest.fn();
+
+ setHTMLFixture(`
+ <meta charset="utf-8" />
+ <link media="print" src="./lorem-print.css" />
+ <link media="print" src="./ipsum-print.css" />
+ <link media="all" src="./dolar-all.css" />
+ `);
+
+ setupListeners();
+
+ loadStartupCSS();
+ });
+
+ it('does nothing at first', () => {
+ expect(loadListener).not.toHaveBeenCalled();
+ });
+
+ describe('on window load', () => {
+ beforeEach(() => {
+ window.dispatchEvent(new Event('load'));
+ });
+
+ it('dispatches load to the print links', () => {
+ expect(loadListener.mock.calls.map(([el]) => el.getAttribute('src'))).toEqual([
+ './lorem-print.css',
+ './ipsum-print.css',
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/behaviors/shortcuts/keybindings_spec.js b/spec/frontend/behaviors/shortcuts/keybindings_spec.js
new file mode 100644
index 00000000000..23fea79f828
--- /dev/null
+++ b/spec/frontend/behaviors/shortcuts/keybindings_spec.js
@@ -0,0 +1,66 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+
+describe('~/behaviors/shortcuts/keybindings.js', () => {
+ let keysFor;
+ let TOGGLE_PERFORMANCE_BAR;
+ let LOCAL_STORAGE_KEY;
+
+ beforeAll(() => {
+ useLocalStorageSpy();
+ });
+
+ const setupCustomizations = async customizationsAsString => {
+ localStorage.clear();
+
+ if (customizationsAsString) {
+ localStorage.setItem(LOCAL_STORAGE_KEY, customizationsAsString);
+ }
+
+ jest.resetModules();
+ ({ keysFor, TOGGLE_PERFORMANCE_BAR, LOCAL_STORAGE_KEY } = await import(
+ '~/behaviors/shortcuts/keybindings'
+ ));
+ };
+
+ describe('when a command has not been customized', () => {
+ beforeEach(async () => {
+ await setupCustomizations('{}');
+ });
+
+ it('returns the default keybinding for the command', () => {
+ expect(keysFor(TOGGLE_PERFORMANCE_BAR)).toEqual(['p b']);
+ });
+ });
+
+ describe('when a command has been customized', () => {
+ const customization = ['p b a r'];
+
+ beforeEach(async () => {
+ await setupCustomizations(JSON.stringify({ [TOGGLE_PERFORMANCE_BAR]: customization }));
+ });
+
+ it('returns the default keybinding for the command', () => {
+ expect(keysFor(TOGGLE_PERFORMANCE_BAR)).toEqual(customization);
+ });
+ });
+
+ describe("when the localStorage entry isn't valid JSON", () => {
+ beforeEach(async () => {
+ await setupCustomizations('{');
+ });
+
+ it('returns the default keybinding for the command', () => {
+ expect(keysFor(TOGGLE_PERFORMANCE_BAR)).toEqual(['p b']);
+ });
+ });
+
+ describe(`when localStorage doesn't contain the ${LOCAL_STORAGE_KEY} key`, () => {
+ beforeEach(async () => {
+ await setupCustomizations();
+ });
+
+ it('returns the default keybinding for the command', () => {
+ expect(keysFor(TOGGLE_PERFORMANCE_BAR)).toEqual(['p b']);
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
index 0f5b3cd3f5e..53815820bbe 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -27,8 +27,10 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
</small>
<clipboard-button-stub
+ category="tertiary"
cssclass="btn-clipboard btn-transparent lh-100 position-static"
gfm="\`foo/bar/dummy.md\`"
+ size="medium"
text="foo/bar/dummy.md"
title="Copy file path"
tooltipplacement="top"
diff --git a/spec/frontend/blob/pipeline_tour_success_modal_spec.js b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
index 50db1675e13..a02c968c4b5 100644
--- a/spec/frontend/blob/pipeline_tour_success_modal_spec.js
+++ b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
@@ -16,6 +16,7 @@ describe('PipelineTourSuccessModal', () => {
stubs: {
GlModal,
GlSprintf,
+ 'gl-emoji': '<img/>',
},
});
};
@@ -66,9 +67,11 @@ describe('PipelineTourSuccessModal', () => {
it('has expected structure', () => {
const modal = wrapper.find(GlModal);
const sprintf = modal.find(GlSprintf);
+ const emoji = modal.find('img');
- expect(modal.attributes('title')).toContain("That's it, well done!");
+ expect(wrapper.text()).toContain("That's it, well done!");
expect(sprintf.exists()).toBe(true);
+ expect(emoji.exists()).toBe(true);
});
it('renders the link for codeQualityLink', () => {
diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js
index f5e9da21b2a..cd12d5e17a8 100644
--- a/spec/frontend/blob/sketch/index_spec.js
+++ b/spec/frontend/blob/sketch/index_spec.js
@@ -8,13 +8,6 @@ describe('Sketch viewer', () => {
beforeEach(() => {
loadFixtures('static/sketch_viewer.html');
- window.URL = {
- createObjectURL: jest.fn(() => 'http://foo/bar'),
- };
- });
-
- afterEach(() => {
- window.URL = {};
});
describe('with error message', () => {
diff --git a/spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js b/spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js
deleted file mode 100644
index 8dc71f99010..00000000000
--- a/spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
-import { shallowMount } from '@vue/test-utils';
-import { GlButton, GlAlert } from '@gitlab/ui';
-import axios from '~/lib/utils/axios_utils';
-import WebIdeAlert from '~/blob/suggest_web_ide_ci/components/web_ide_alert.vue';
-
-const dismissEndpoint = '/-/user_callouts';
-const featureId = 'web_ide_alert_dismissed';
-const editPath = 'edit/master/-/.gitlab-ci.yml';
-
-describe('WebIdeAlert', () => {
- let wrapper;
- let mock;
-
- const findButton = () => wrapper.find(GlButton);
- const findAlert = () => wrapper.find(GlAlert);
- const dismissAlert = alertWrapper => alertWrapper.vm.$emit('dismiss');
- const getPostPayload = () => JSON.parse(mock.history.post[0].data);
-
- const createComponent = () => {
- wrapper = shallowMount(WebIdeAlert, {
- propsData: {
- dismissEndpoint,
- featureId,
- editPath,
- },
- });
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
-
- mock.onPost(dismissEndpoint).reply(200);
-
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
-
- mock.restore();
- });
-
- describe('with defaults', () => {
- it('displays alert correctly', () => {
- expect(findAlert().exists()).toBe(true);
- });
-
- it('web ide button link has correct path', () => {
- expect(findButton().attributes('href')).toBe(editPath);
- });
-
- it('dismisses alert correctly', async () => {
- const alertWrapper = findAlert();
-
- dismissAlert(alertWrapper);
-
- await waitForPromises();
-
- expect(alertWrapper.exists()).toBe(false);
- expect(mock.history.post).toHaveLength(1);
- expect(getPostPayload()).toEqual({ feature_name: featureId });
- });
- });
-});
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
index 97ac42a10bf..69ec22b1f94 100644
--- a/spec/frontend/blob/viewer/index_spec.js
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -2,6 +2,7 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
+import { setTestTimeout } from 'helpers/timeout';
import BlobViewer from '~/blob/viewer/index';
import axios from '~/lib/utils/axios_utils';
@@ -13,26 +14,22 @@ describe('Blob viewer', () => {
tooltip: jest.fn(),
};
- preloadFixtures('snippets/show.html');
+ setTestTimeout(2000);
+
+ preloadFixtures('blob/show_readme.html');
beforeEach(() => {
$.fn.extend(jQueryMock);
mock = new MockAdapter(axios);
- loadFixtures('snippets/show.html');
+ loadFixtures('blob/show_readme.html');
$('#modal-upload-blob').remove();
- blob = new BlobViewer();
-
- mock.onGet('http://test.host/-/snippets/1.json?viewer=rich').reply(200, {
- html: '<div>testing</div>',
- });
-
- mock.onGet('http://test.host/-/snippets/1.json?viewer=simple').reply(200, {
+ mock.onGet(/blob\/master\/README\.md/).reply(200, {
html: '<div>testing</div>',
});
- jest.spyOn(axios, 'get');
+ blob = new BlobViewer();
});
afterEach(() => {
@@ -71,12 +68,11 @@ describe('Blob viewer', () => {
});
it('doesnt reload file if already loaded', () => {
- const asyncClick = () =>
- new Promise(resolve => {
- document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
+ const asyncClick = async () => {
+ document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
- setImmediate(resolve);
- });
+ await axios.waitForAll();
+ };
return asyncClick()
.then(() => asyncClick())
@@ -163,17 +159,30 @@ describe('Blob viewer', () => {
expect(simpleBtn.blur).toHaveBeenCalled();
});
- it('sends AJAX request when switching to simple view', () => {
- blob.switchToViewer('simple');
-
- expect(axios.get).toHaveBeenCalled();
+ it('makes request for initial view', () => {
+ expect(mock.history).toMatchObject({
+ get: [{ url: expect.stringMatching(/README\.md\?.*viewer=rich/) }],
+ });
});
- it('does not send AJAX request when switching to rich view', () => {
- blob.switchToViewer('simple');
- blob.switchToViewer('rich');
+ describe.each`
+ views
+ ${['simple']}
+ ${['simple', 'rich']}
+ `('when view switches to $views', ({ views }) => {
+ beforeEach(async () => {
+ views.forEach(view => blob.switchToViewer(view));
+ await axios.waitForAll();
+ });
- expect(axios.get.mock.calls.length).toBe(1);
+ it('sends 1 AJAX request for new view', async () => {
+ expect(mock.history).toMatchObject({
+ get: [
+ { url: expect.stringMatching(/README\.md\?.*viewer=rich/) },
+ { url: expect.stringMatching(/README\.md\?.*viewer=simple/) },
+ ],
+ });
+ });
});
});
});
diff --git a/spec/frontend/blob_edit/blob_bundle_spec.js b/spec/frontend/blob_edit/blob_bundle_spec.js
index a105b62586b..eecc54be35b 100644
--- a/spec/frontend/blob_edit/blob_bundle_spec.js
+++ b/spec/frontend/blob_edit/blob_bundle_spec.js
@@ -1,10 +1,25 @@
import $ from 'jquery';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import blobBundle from '~/blob_edit/blob_bundle';
+import EditorLite from '~/blob_edit/edit_blob';
+
jest.mock('~/blob_edit/edit_blob');
describe('BlobBundle', () => {
+ it('does not load EditorLite by default', () => {
+ blobBundle();
+ expect(EditorLite).not.toHaveBeenCalled();
+ });
+
+ it('loads EditorLite for the edit screen', async () => {
+ setFixtures(`<div class="js-edit-blob-form"></div>`);
+ blobBundle();
+ await waitForPromises();
+ expect(EditorLite).toHaveBeenCalled();
+ });
+
describe('No Suggest Popover', () => {
beforeEach(() => {
setFixtures(`
diff --git a/spec/frontend/blob_edit/edit_blob_spec.js b/spec/frontend/blob_edit/edit_blob_spec.js
index 8f92e8498b9..ac8b916e448 100644
--- a/spec/frontend/blob_edit/edit_blob_spec.js
+++ b/spec/frontend/blob_edit/edit_blob_spec.js
@@ -1,3 +1,4 @@
+import waitForPromises from 'helpers/wait_for_promises';
import EditBlob from '~/blob_edit/edit_blob';
import EditorLite from '~/editor/editor_lite';
import MarkdownExtension from '~/editor/editor_markdown_ext';
@@ -7,7 +8,12 @@ jest.mock('~/editor/editor_lite');
jest.mock('~/editor/editor_markdown_ext');
describe('Blob Editing', () => {
- const mockInstance = 'foo';
+ const useMock = jest.fn();
+ const mockInstance = {
+ use: useMock,
+ getValue: jest.fn(),
+ focus: jest.fn(),
+ };
beforeEach(() => {
setFixtures(
`<div class="js-edit-blob-form"><div id="file_path"></div><div id="editor"></div><input id="file-content"></div>`,
@@ -15,36 +21,33 @@ describe('Blob Editing', () => {
jest.spyOn(EditorLite.prototype, 'createInstance').mockReturnValue(mockInstance);
});
- const initEditor = (isMarkdown = false) => {
+ const editorInst = isMarkdown => {
return new EditBlob({
isMarkdown,
- monacoEnabled: true,
});
};
+ const initEditor = async (isMarkdown = false) => {
+ editorInst(isMarkdown);
+ await waitForPromises();
+ };
+
it('loads FileTemplateExtension by default', async () => {
await initEditor();
- expect(EditorLite.prototype.use).toHaveBeenCalledWith(
- expect.arrayContaining([FileTemplateExtension]),
- mockInstance,
- );
+ expect(useMock).toHaveBeenCalledWith(FileTemplateExtension);
});
describe('Markdown', () => {
it('does not load MarkdownExtension by default', async () => {
await initEditor();
- expect(EditorLite.prototype.use).not.toHaveBeenCalledWith(
- expect.arrayContaining([MarkdownExtension]),
- mockInstance,
- );
+ expect(useMock).not.toHaveBeenCalledWith(MarkdownExtension);
});
it('loads MarkdownExtension only for the markdown files', async () => {
await initEditor(true);
- expect(EditorLite.prototype.use).toHaveBeenCalledWith(
- [MarkdownExtension, FileTemplateExtension],
- mockInstance,
- );
+ expect(useMock).toHaveBeenCalledTimes(2);
+ expect(useMock).toHaveBeenNthCalledWith(1, FileTemplateExtension);
+ expect(useMock).toHaveBeenNthCalledWith(2, MarkdownExtension);
});
});
});
diff --git a/spec/frontend/boards/board_blank_state_spec.js b/spec/frontend/boards/board_blank_state_spec.js
deleted file mode 100644
index 3ffdda52f58..00000000000
--- a/spec/frontend/boards/board_blank_state_spec.js
+++ /dev/null
@@ -1,95 +0,0 @@
-import Vue from 'vue';
-import boardsStore from '~/boards/stores/boards_store';
-import BoardBlankState from '~/boards/components/board_blank_state.vue';
-
-describe('Boards blank state', () => {
- let vm;
- let fail = false;
-
- beforeEach(done => {
- const Comp = Vue.extend(BoardBlankState);
-
- boardsStore.create();
-
- jest.spyOn(boardsStore, 'addList').mockImplementation();
- jest.spyOn(boardsStore, 'removeList').mockImplementation();
- jest.spyOn(boardsStore, 'generateDefaultLists').mockImplementation(
- () =>
- new Promise((resolve, reject) => {
- if (fail) {
- reject();
- } else {
- resolve({
- data: [
- {
- id: 1,
- title: 'To Do',
- label: { id: 1 },
- },
- {
- id: 2,
- title: 'Doing',
- label: { id: 2 },
- },
- ],
- });
- }
- }),
- );
-
- vm = new Comp();
-
- setImmediate(() => {
- vm.$mount();
- done();
- });
- });
-
- it('renders pre-defined labels', () => {
- expect(vm.$el.querySelectorAll('.board-blank-state-list li').length).toBe(2);
-
- expect(vm.$el.querySelectorAll('.board-blank-state-list li')[0].textContent.trim()).toEqual(
- 'To Do',
- );
-
- expect(vm.$el.querySelectorAll('.board-blank-state-list li')[1].textContent.trim()).toEqual(
- 'Doing',
- );
- });
-
- it('clears blank state', done => {
- vm.$el.querySelector('.btn-default').click();
-
- setImmediate(() => {
- expect(boardsStore.welcomeIsHidden()).toBeTruthy();
-
- done();
- });
- });
-
- it('creates pre-defined labels', done => {
- vm.$el.querySelector('.btn-success').click();
-
- setImmediate(() => {
- expect(boardsStore.addList).toHaveBeenCalledTimes(2);
- expect(boardsStore.addList).toHaveBeenCalledWith(expect.objectContaining({ title: 'To Do' }));
-
- expect(boardsStore.addList).toHaveBeenCalledWith(expect.objectContaining({ title: 'Doing' }));
-
- done();
- });
- });
-
- it('resets the store if request fails', done => {
- fail = true;
-
- vm.$el.querySelector('.btn-success').click();
-
- setImmediate(() => {
- expect(boardsStore.welcomeIsHidden()).toBeFalsy();
- expect(boardsStore.removeList).toHaveBeenCalledWith(undefined, 'label');
-
- done();
- });
- });
-});
diff --git a/spec/frontend/boards/board_list_new_spec.js b/spec/frontend/boards/board_list_new_spec.js
new file mode 100644
index 00000000000..163611c2197
--- /dev/null
+++ b/spec/frontend/boards/board_list_new_spec.js
@@ -0,0 +1,234 @@
+/* global List */
+/* global ListIssue */
+
+import Vuex from 'vuex';
+import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
+import { createLocalVue, mount } from '@vue/test-utils';
+import eventHub from '~/boards/eventhub';
+import BoardList from '~/boards/components/board_list_new.vue';
+import BoardCard from '~/boards/components/board_card.vue';
+import '~/boards/models/issue';
+import '~/boards/models/list';
+import { listObj, mockIssuesByListId, issues } from './mock_data';
+import defaultState from '~/boards/stores/state';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const actions = {
+ fetchIssuesForList: jest.fn(),
+};
+
+const createStore = (state = defaultState) => {
+ return new Vuex.Store({
+ state,
+ actions,
+ });
+};
+
+const createComponent = ({
+ listIssueProps = {},
+ componentProps = {},
+ listProps = {},
+ state = {},
+} = {}) => {
+ const store = createStore({
+ issuesByListId: mockIssuesByListId,
+ issues,
+ pageInfoByListId: {
+ 'gid://gitlab/List/1': { hasNextPage: true },
+ 'gid://gitlab/List/2': {},
+ },
+ listsFlags: {
+ 'gid://gitlab/List/1': {},
+ 'gid://gitlab/List/2': {},
+ },
+ ...state,
+ });
+
+ const list = new List({
+ ...listObj,
+ id: 'gid://gitlab/List/1',
+ ...listProps,
+ doNotFetchIssues: true,
+ });
+ const issue = new ListIssue({
+ title: 'Testing',
+ id: 1,
+ iid: 1,
+ confidential: false,
+ labels: [],
+ assignees: [],
+ ...listIssueProps,
+ });
+ if (!Object.prototype.hasOwnProperty.call(listProps, 'issuesSize')) {
+ list.issuesSize = 1;
+ }
+
+ const component = mount(BoardList, {
+ localVue,
+ propsData: {
+ disabled: false,
+ list,
+ issues: [issue],
+ ...componentProps,
+ },
+ store,
+ provide: {
+ groupId: null,
+ rootPath: '/',
+ },
+ });
+
+ return component;
+};
+
+describe('Board list component', () => {
+ let wrapper;
+ useFakeRequestAnimationFrame();
+
+ describe('When Expanded', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders component', () => {
+ expect(wrapper.find('.board-list-component').exists()).toBe(true);
+ });
+
+ it('renders loading icon', () => {
+ wrapper = createComponent({
+ state: { listsFlags: { 'gid://gitlab/List/1': { isLoading: true } } },
+ });
+
+ expect(wrapper.find('[data-testid="board_list_loading"').exists()).toBe(true);
+ });
+
+ it('renders issues', () => {
+ expect(wrapper.findAll(BoardCard).length).toBe(1);
+ });
+
+ it('sets data attribute with issue id', () => {
+ expect(wrapper.find('.board-card').attributes('data-issue-id')).toBe('1');
+ });
+
+ it('shows new issue form', async () => {
+ wrapper.vm.toggleForm();
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-new-issue-form').exists()).toBe(true);
+ });
+
+ it('shows new issue form after eventhub event', async () => {
+ eventHub.$emit(`toggle-issue-form-${wrapper.vm.list.id}`);
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-new-issue-form').exists()).toBe(true);
+ });
+
+ it('does not show new issue form for closed list', () => {
+ wrapper.setProps({ list: { type: 'closed' } });
+ wrapper.vm.toggleForm();
+
+ expect(wrapper.find('.board-new-issue-form').exists()).toBe(false);
+ });
+
+ it('shows count list item', async () => {
+ wrapper.vm.showCount = true;
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count').exists()).toBe(true);
+
+ expect(wrapper.find('.board-list-count').text()).toBe('Showing all issues');
+ });
+
+ it('sets data attribute with invalid id', async () => {
+ wrapper.vm.showCount = true;
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count').attributes('data-issue-id')).toBe('-1');
+ });
+
+ it('shows how many more issues to load', async () => {
+ wrapper.vm.showCount = true;
+ wrapper.setProps({ list: { issuesSize: 20 } });
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count').text()).toBe('Showing 1 of 20 issues');
+ });
+ });
+
+ describe('load more issues', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ listProps: { issuesSize: 25 },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('loads more issues after scrolling', () => {
+ wrapper.vm.$refs.list.dispatchEvent(new Event('scroll'));
+
+ expect(actions.fetchIssuesForList).toHaveBeenCalled();
+ });
+
+ it('does not load issues if already loading', () => {
+ wrapper.vm.$refs.list.dispatchEvent(new Event('scroll'));
+ wrapper.vm.$refs.list.dispatchEvent(new Event('scroll'));
+
+ expect(actions.fetchIssuesForList).toHaveBeenCalledTimes(1);
+ });
+
+ it('shows loading more spinner', async () => {
+ wrapper.vm.showCount = true;
+ wrapper.vm.list.loadingMore = true;
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.board-list-count .gl-spinner').exists()).toBe(true);
+ });
+ });
+
+ describe('max issue count warning', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ listProps: { issuesSize: 50 },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when issue count exceeds max issue count', () => {
+ it('sets background to bg-danger-100', async () => {
+ wrapper.setProps({ list: { issuesSize: 4, maxIssueCount: 3 } });
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.bg-danger-100').exists()).toBe(true);
+ });
+ });
+
+ describe('when list issue count does NOT exceed list max issue count', () => {
+ it('does not sets background to bg-danger-100', () => {
+ wrapper.setProps({ list: { issuesSize: 2, maxIssueCount: 3 } });
+
+ expect(wrapper.find('.bg-danger-100').exists()).toBe(false);
+ });
+ });
+
+ describe('when list max issue count is 0', () => {
+ it('does not sets background to bg-danger-100', () => {
+ wrapper.setProps({ list: { maxIssueCount: 0 } });
+
+ expect(wrapper.find('.bg-danger-100').exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 88883ae61d4..0fe3c88f518 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -44,7 +44,6 @@ const createComponent = ({ done, listIssueProps = {}, componentProps = {}, listP
disabled: false,
list,
issues: list.issues,
- loading: false,
...componentProps,
},
provide: {
@@ -94,7 +93,7 @@ describe('Board list component', () => {
});
it('renders loading icon', () => {
- component.loading = true;
+ component.list.loading = true;
return Vue.nextTick().then(() => {
expect(component.$el.querySelector('.board-list-loading')).not.toBeNull();
diff --git a/spec/frontend/boards/boards_store_spec.js b/spec/frontend/boards/boards_store_spec.js
index 41971137b95..e7c1cf79fdc 100644
--- a/spec/frontend/boards/boards_store_spec.js
+++ b/spec/frontend/boards/boards_store_spec.js
@@ -1,7 +1,7 @@
import AxiosMockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
-import boardsStore from '~/boards/stores/boards_store';
+import boardsStore, { gqlClient } from '~/boards/stores/boards_store';
import eventHub from '~/boards/eventhub';
import { listObj, listObjDuplicate } from './mock_data';
@@ -503,11 +503,15 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
axiosMock.onPut(url).replyOnce(config => requestSpy(config));
+ jest.spyOn(gqlClient, 'mutate').mockReturnValue(Promise.resolve({}));
});
it('makes a request to update the board', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
- const expectedResponse = expect.objectContaining({ data: dummyResponse });
+ const expectedResponse = [
+ expect.objectContaining({ data: dummyResponse }),
+ expect.objectContaining({}),
+ ];
return expect(
boardsStore.createBoard({
@@ -555,11 +559,12 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
axiosMock.onPost(url).replyOnce(config => requestSpy(config));
+ jest.spyOn(gqlClient, 'mutate').mockReturnValue(Promise.resolve({}));
});
it('makes a request to create a new board', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
- const expectedResponse = expect.objectContaining({ data: dummyResponse });
+ const expectedResponse = dummyResponse;
return expect(boardsStore.createBoard(board))
.resolves.toEqual(expectedResponse)
@@ -740,14 +745,6 @@ describe('boardsStore', () => {
expect(boardsStore.shouldAddBlankState()).toBe(true);
});
- it('adds the blank state', () => {
- boardsStore.addBlankState();
-
- const list = boardsStore.findList('type', 'blank', 'blank');
-
- expect(list).toBeDefined();
- });
-
it('removes list from state', () => {
boardsStore.addList(listObj);
diff --git a/spec/frontend/boards/components/board_configuration_options_spec.js b/spec/frontend/boards/components/board_configuration_options_spec.js
new file mode 100644
index 00000000000..e9a1cb6a4e8
--- /dev/null
+++ b/spec/frontend/boards/components/board_configuration_options_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import BoardConfigurationOptions from '~/boards/components/board_configuration_options.vue';
+
+describe('BoardConfigurationOptions', () => {
+ let wrapper;
+ const board = { hide_backlog_list: false, hide_closed_list: false };
+
+ const defaultProps = {
+ currentBoard: board,
+ board,
+ isNewForm: false,
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(BoardConfigurationOptions, {
+ propsData: { ...defaultProps },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const backlogListCheckbox = el => el.find('[data-testid="backlog-list-checkbox"]');
+ const closedListCheckbox = el => el.find('[data-testid="closed-list-checkbox"]');
+
+ const checkboxAssert = (backlogCheckbox, closedCheckbox) => {
+ expect(backlogListCheckbox(wrapper).attributes('checked')).toEqual(
+ backlogCheckbox ? undefined : 'true',
+ );
+ expect(closedListCheckbox(wrapper).attributes('checked')).toEqual(
+ closedCheckbox ? undefined : 'true',
+ );
+ };
+
+ it.each`
+ backlogCheckboxValue | closedCheckboxValue
+ ${true} | ${true}
+ ${true} | ${false}
+ ${false} | ${true}
+ ${false} | ${false}
+ `(
+ 'renders two checkbox when one is $backlogCheckboxValue and other is $closedCheckboxValue',
+ async ({ backlogCheckboxValue, closedCheckboxValue }) => {
+ await wrapper.setData({
+ hideBacklogList: backlogCheckboxValue,
+ hideClosedList: closedCheckboxValue,
+ });
+
+ return wrapper.vm.$nextTick().then(() => {
+ checkboxAssert(backlogCheckboxValue, closedCheckboxValue);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index df117d06cdf..09e38001e2e 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -23,9 +23,6 @@ describe('BoardContent', () => {
return new Vuex.Store({
getters,
state,
- actions: {
- fetchIssuesForAllLists: () => {},
- },
});
};
diff --git a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
index 1dbcbd06407..d7df2ff1563 100644
--- a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
@@ -96,12 +96,34 @@ describe('boards sidebar remove issue', () => {
expect(findExpanded().isVisible()).toBe(false);
});
- it('emits changed event', async () => {
+ it('emits close event', async () => {
document.body.click();
await wrapper.vm.$nextTick();
- expect(wrapper.emitted().changed[1][0]).toBe(false);
+ expect(wrapper.emitted().close.length).toBe(1);
});
});
+
+ it('emits open when edit button is clicked and edit is initailized to false', async () => {
+ createComponent({ canUpdate: true });
+
+ findEditButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted().open.length).toBe(1);
+ });
+
+ it('does not emits events when collapsing with false `emitEvent`', async () => {
+ createComponent({ canUpdate: true });
+
+ findEditButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ wrapper.vm.collapse({ emitEvent: false });
+
+ expect(wrapper.emitted().close).toBeUndefined();
+ });
});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
new file mode 100644
index 00000000000..da000d21f6a
--- /dev/null
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
@@ -0,0 +1,143 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLabel } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
+import { labels as TEST_LABELS, mockIssue as TEST_ISSUE } from 'jest/boards/mock_data';
+import BoardSidebarLabelsSelect from '~/boards/components/sidebar/board_sidebar_labels_select.vue';
+import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import { createStore } from '~/boards/stores';
+import createFlash from '~/flash';
+
+jest.mock('~/flash');
+
+const TEST_LABELS_PAYLOAD = TEST_LABELS.map(label => ({ ...label, set: true }));
+const TEST_LABELS_TITLES = TEST_LABELS.map(label => label.title);
+
+describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
+ let wrapper;
+ let store;
+
+ afterEach(() => {
+ wrapper.destroy();
+ store = null;
+ wrapper = null;
+ });
+
+ const createWrapper = ({ labels = [] } = {}) => {
+ store = createStore();
+ store.state.issues = { [TEST_ISSUE.id]: { ...TEST_ISSUE, labels } };
+ store.state.activeId = TEST_ISSUE.id;
+
+ wrapper = shallowMount(BoardSidebarLabelsSelect, {
+ store,
+ provide: {
+ canUpdate: true,
+ labelsFetchPath: TEST_HOST,
+ labelsManagePath: TEST_HOST,
+ labelsFilterBasePath: TEST_HOST,
+ },
+ stubs: {
+ 'board-editable-item': BoardEditableItem,
+ 'labels-select': '<div></div>',
+ },
+ });
+ };
+
+ const findLabelsSelect = () => wrapper.find({ ref: 'labelsSelect' });
+ const findLabelsTitles = () => wrapper.findAll(GlLabel).wrappers.map(item => item.props('title'));
+ const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
+
+ it('renders "None" when no labels are selected', () => {
+ createWrapper();
+
+ expect(findCollapsed().text()).toBe('None');
+ });
+
+ it('renders labels when set', () => {
+ createWrapper({ labels: TEST_LABELS });
+
+ expect(findLabelsTitles()).toEqual(TEST_LABELS_TITLES);
+ });
+
+ describe('when labels are submitted', () => {
+ beforeEach(async () => {
+ createWrapper();
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => TEST_LABELS);
+ findLabelsSelect().vm.$emit('updateSelectedLabels', TEST_LABELS_PAYLOAD);
+ store.state.issues[TEST_ISSUE.id].labels = TEST_LABELS;
+ await wrapper.vm.$nextTick();
+ });
+
+ it('collapses sidebar and renders labels', () => {
+ expect(findCollapsed().isVisible()).toBe(true);
+ expect(findLabelsTitles()).toEqual(TEST_LABELS_TITLES);
+ });
+
+ it('commits change to the server', () => {
+ expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
+ addLabelIds: TEST_LABELS.map(label => label.id),
+ projectPath: 'gitlab-org/test-subgroup/gitlab-test',
+ removeLabelIds: [],
+ });
+ });
+ });
+
+ describe('when labels are updated over existing labels', () => {
+ const testLabelsPayload = [{ id: 5, set: true }, { id: 7, set: true }];
+ const expectedLabels = [{ id: 5 }, { id: 7 }];
+
+ beforeEach(async () => {
+ createWrapper({ labels: TEST_LABELS });
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => expectedLabels);
+ findLabelsSelect().vm.$emit('updateSelectedLabels', testLabelsPayload);
+ await wrapper.vm.$nextTick();
+ });
+
+ it('commits change to the server', () => {
+ expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
+ addLabelIds: [5, 7],
+ removeLabelIds: [6],
+ projectPath: 'gitlab-org/test-subgroup/gitlab-test',
+ });
+ });
+ });
+
+ describe('when removing individual labels', () => {
+ const testLabel = TEST_LABELS[0];
+
+ beforeEach(async () => {
+ createWrapper({ labels: [testLabel] });
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => {});
+ });
+
+ it('commits change to the server', () => {
+ wrapper.find(GlLabel).vm.$emit('close', testLabel);
+
+ expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
+ removeLabelIds: [getIdFromGraphQLId(testLabel.id)],
+ projectPath: 'gitlab-org/test-subgroup/gitlab-test',
+ });
+ });
+ });
+
+ describe('when the mutation fails', () => {
+ beforeEach(async () => {
+ createWrapper({ labels: TEST_LABELS });
+
+ jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => {
+ throw new Error(['failed mutation']);
+ });
+ findLabelsSelect().vm.$emit('updateSelectedLabels', [{ id: '?' }]);
+ await wrapper.vm.$nextTick();
+ });
+
+ it('collapses sidebar and renders former issue weight', () => {
+ expect(findCollapsed().isVisible()).toBe(true);
+ expect(findLabelsTitles()).toEqual(TEST_LABELS_TITLES);
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 5776332c499..50c0a85fc70 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -108,13 +108,19 @@ const assignees = [
},
];
-const labels = [
+export const labels = [
{
id: 'gid://gitlab/GroupLabel/5',
title: 'Cosync',
color: '#34ebec',
description: null,
},
+ {
+ id: 'gid://gitlab/GroupLabel/6',
+ title: 'Brock',
+ color: '#e082b6',
+ description: null,
+ },
];
export const rawIssue = {
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index bdbcd435708..78e70161121 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -6,12 +6,14 @@ import {
mockIssueWithModel,
mockIssue2WithModel,
rawIssue,
+ mockIssues,
+ labels,
} from '../mock_data';
import actions, { gqlClient } from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
import { inactiveId, ListType } from '~/boards/constants';
import issueMoveListMutation from '~/boards/queries/issue_move_list.mutation.graphql';
-import { fullBoardId } from '~/boards/boards_util';
+import { fullBoardId, formatListIssues, formatBoardLists } from '~/boards/boards_util';
const expectNotImplemented = action => {
it('is not implemented', () => {
@@ -76,6 +78,80 @@ describe('setActiveId', () => {
});
});
+describe('fetchLists', () => {
+ const state = {
+ endpoints: {
+ fullPath: 'gitlab-org',
+ boardId: 1,
+ },
+ filterParams: {},
+ boardType: 'group',
+ };
+
+ let queryResponse = {
+ data: {
+ group: {
+ board: {
+ hideBacklogList: true,
+ lists: {
+ nodes: [mockLists[1]],
+ },
+ },
+ },
+ },
+ };
+
+ const formattedLists = formatBoardLists(queryResponse.data.group.board.lists);
+
+ it('should commit mutations RECEIVE_BOARD_LISTS_SUCCESS on success', done => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ testAction(
+ actions.fetchLists,
+ {},
+ state,
+ [
+ {
+ type: types.RECEIVE_BOARD_LISTS_SUCCESS,
+ payload: formattedLists,
+ },
+ ],
+ [{ type: 'showWelcomeList' }],
+ done,
+ );
+ });
+
+ it('dispatch createList action when backlog list does not exist and is not hidden', done => {
+ queryResponse = {
+ data: {
+ group: {
+ board: {
+ hideBacklogList: false,
+ lists: {
+ nodes: [mockLists[1]],
+ },
+ },
+ },
+ },
+ };
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ testAction(
+ actions.fetchLists,
+ {},
+ state,
+ [
+ {
+ type: types.RECEIVE_BOARD_LISTS_SUCCESS,
+ payload: formattedLists,
+ },
+ ],
+ [{ type: 'createList', payload: { backlog: true } }, { type: 'showWelcomeList' }],
+ done,
+ );
+ });
+});
+
describe('showWelcomeList', () => {
it('should dispatch addList action', done => {
const state = {
@@ -176,16 +252,26 @@ describe('createList', () => {
describe('moveList', () => {
it('should commit MOVE_LIST mutation and dispatch updateList action', done => {
+ const initialBoardListsState = {
+ 'gid://gitlab/List/1': mockListsWithModel[0],
+ 'gid://gitlab/List/2': mockListsWithModel[1],
+ };
+
const state = {
endpoints: { fullPath: 'gitlab-org', boardId: '1' },
boardType: 'group',
disabled: false,
- boardLists: mockListsWithModel,
+ boardLists: initialBoardListsState,
};
testAction(
actions.moveList,
- { listId: 'gid://gitlab/List/1', newIndex: 1, adjustmentValue: 1 },
+ {
+ listId: 'gid://gitlab/List/1',
+ replacedListId: 'gid://gitlab/List/2',
+ newIndex: 1,
+ adjustmentValue: 1,
+ },
state,
[
{
@@ -196,7 +282,11 @@ describe('moveList', () => {
[
{
type: 'updateList',
- payload: { listId: 'gid://gitlab/List/1', position: 0, backupList: mockListsWithModel },
+ payload: {
+ listId: 'gid://gitlab/List/1',
+ position: 0,
+ backupList: initialBoardListsState,
+ },
},
],
done,
@@ -237,6 +327,99 @@ describe('deleteList', () => {
expectNotImplemented(actions.deleteList);
});
+describe('fetchIssuesForList', () => {
+ const listId = mockLists[0].id;
+
+ const state = {
+ endpoints: {
+ fullPath: 'gitlab-org',
+ boardId: 1,
+ },
+ filterParams: {},
+ boardType: 'group',
+ };
+
+ const mockIssuesNodes = mockIssues.map(issue => ({ node: issue }));
+
+ const pageInfo = {
+ endCursor: '',
+ hasNextPage: false,
+ };
+
+ const queryResponse = {
+ data: {
+ group: {
+ board: {
+ lists: {
+ nodes: [
+ {
+ id: listId,
+ issues: {
+ edges: mockIssuesNodes,
+ pageInfo,
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+ };
+
+ const formattedIssues = formatListIssues(queryResponse.data.group.board.lists);
+
+ const listPageInfo = {
+ [listId]: pageInfo,
+ };
+
+ it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_SUCCESS on success', done => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ testAction(
+ actions.fetchIssuesForList,
+ { listId },
+ state,
+ [
+ {
+ type: types.REQUEST_ISSUES_FOR_LIST,
+ payload: { listId, fetchNext: false },
+ },
+ {
+ type: types.RECEIVE_ISSUES_FOR_LIST_SUCCESS,
+ payload: { listIssues: formattedIssues, listPageInfo, listId },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('should commit mutations REQUEST_ISSUES_FOR_LIST and RECEIVE_ISSUES_FOR_LIST_FAILURE on failure', done => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
+
+ testAction(
+ actions.fetchIssuesForList,
+ { listId },
+ state,
+ [
+ {
+ type: types.REQUEST_ISSUES_FOR_LIST,
+ payload: { listId, fetchNext: false },
+ },
+ { type: types.RECEIVE_ISSUES_FOR_LIST_FAILURE, payload: listId },
+ ],
+ [],
+ done,
+ );
+ });
+});
+
+describe('resetIssues', () => {
+ it('commits RESET_ISSUES mutation', () => {
+ return testAction(actions.resetIssues, {}, {}, [{ type: types.RESET_ISSUES }], []);
+ });
+});
+
describe('moveIssue', () => {
const listIssues = {
'gid://gitlab/List/1': [436, 437],
@@ -418,6 +601,51 @@ describe('addListIssueFailure', () => {
});
});
+describe('setActiveIssueLabels', () => {
+ const state = { issues: { [mockIssue.id]: mockIssue } };
+ const getters = { getActiveIssue: mockIssue };
+ const testLabelIds = labels.map(label => label.id);
+ const input = {
+ addLabelIds: testLabelIds,
+ removeLabelIds: [],
+ projectPath: 'h/b',
+ };
+
+ it('should assign labels on success', done => {
+ jest
+ .spyOn(gqlClient, 'mutate')
+ .mockResolvedValue({ data: { updateIssue: { issue: { labels: { nodes: labels } } } } });
+
+ const payload = {
+ issueId: getters.getActiveIssue.id,
+ prop: 'labels',
+ value: labels,
+ };
+
+ testAction(
+ actions.setActiveIssueLabels,
+ input,
+ { ...state, ...getters },
+ [
+ {
+ type: types.UPDATE_ISSUE_BY_ID,
+ payload,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('throws error if fails', async () => {
+ jest
+ .spyOn(gqlClient, 'mutate')
+ .mockResolvedValue({ data: { updateIssue: { errors: ['failed mutation'] } } });
+
+ await expect(actions.setActiveIssueLabels({ getters }, input)).rejects.toThrow(Error);
+ });
+});
+
describe('fetchBacklog', () => {
expectNotImplemented(actions.fetchBacklog);
});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index 288143a0f21..b987080abab 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -1,6 +1,13 @@
import getters from '~/boards/stores/getters';
import { inactiveId } from '~/boards/constants';
-import { mockIssue, mockIssue2, mockIssues, mockIssuesByListId, issues } from '../mock_data';
+import {
+ mockIssue,
+ mockIssue2,
+ mockIssues,
+ mockIssuesByListId,
+ issues,
+ mockListsWithModel,
+} from '../mock_data';
describe('Boards - Getters', () => {
describe('getLabelToggleState', () => {
@@ -130,4 +137,25 @@ describe('Boards - Getters', () => {
);
});
});
+
+ const boardsState = {
+ boardLists: {
+ 'gid://gitlab/List/1': mockListsWithModel[0],
+ 'gid://gitlab/List/2': mockListsWithModel[1],
+ },
+ };
+
+ describe('getListByLabelId', () => {
+ it('returns list for a given label id', () => {
+ expect(getters.getListByLabelId(boardsState)('gid://gitlab/GroupLabel/121')).toEqual(
+ mockListsWithModel[1],
+ );
+ });
+ });
+
+ describe('getListByTitle', () => {
+ it('returns list for a given list title', () => {
+ expect(getters.getListByTitle(boardsState)('To Do')).toEqual(mockListsWithModel[1]);
+ });
+ });
});
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index a13a99a507e..6e53f184bb3 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -2,8 +2,6 @@ import mutations from '~/boards/stores/mutations';
import * as types from '~/boards/stores/mutation_types';
import defaultState from '~/boards/stores/state';
import {
- listObj,
- listObjDuplicate,
mockListsWithModel,
mockLists,
rawIssue,
@@ -22,6 +20,11 @@ const expectNotImplemented = action => {
describe('Board Store Mutations', () => {
let state;
+ const initialBoardListsState = {
+ 'gid://gitlab/List/1': mockListsWithModel[0],
+ 'gid://gitlab/List/2': mockListsWithModel[1],
+ };
+
beforeEach(() => {
state = defaultState();
});
@@ -56,11 +59,19 @@ describe('Board Store Mutations', () => {
describe('RECEIVE_BOARD_LISTS_SUCCESS', () => {
it('Should set boardLists to state', () => {
- const lists = [listObj, listObjDuplicate];
+ mutations[types.RECEIVE_BOARD_LISTS_SUCCESS](state, initialBoardListsState);
+
+ expect(state.boardLists).toEqual(initialBoardListsState);
+ });
+ });
- mutations[types.RECEIVE_BOARD_LISTS_SUCCESS](state, lists);
+ describe('RECEIVE_BOARD_LISTS_FAILURE', () => {
+ it('Should set error in state', () => {
+ mutations[types.RECEIVE_BOARD_LISTS_FAILURE](state);
- expect(state.boardLists).toEqual(lists);
+ expect(state.error).toEqual(
+ 'An error occurred while fetching the board lists. Please reload the page.',
+ );
});
});
@@ -95,7 +106,13 @@ describe('Board Store Mutations', () => {
});
describe('RECEIVE_ADD_LIST_SUCCESS', () => {
- expectNotImplemented(mutations.RECEIVE_ADD_LIST_SUCCESS);
+ it('adds list to boardLists state', () => {
+ mutations.RECEIVE_ADD_LIST_SUCCESS(state, mockListsWithModel[0]);
+
+ expect(state.boardLists).toEqual({
+ [mockListsWithModel[0].id]: mockListsWithModel[0],
+ });
+ });
});
describe('RECEIVE_ADD_LIST_ERROR', () => {
@@ -106,7 +123,7 @@ describe('Board Store Mutations', () => {
it('updates boardLists state with reordered lists', () => {
state = {
...state,
- boardLists: mockListsWithModel,
+ boardLists: initialBoardListsState,
};
mutations.MOVE_LIST(state, {
@@ -114,7 +131,10 @@ describe('Board Store Mutations', () => {
listAtNewIndex: mockListsWithModel[1],
});
- expect(state.boardLists).toEqual([mockListsWithModel[1], mockListsWithModel[0]]);
+ expect(state.boardLists).toEqual({
+ 'gid://gitlab/List/2': mockListsWithModel[1],
+ 'gid://gitlab/List/1': mockListsWithModel[0],
+ });
});
});
@@ -122,13 +142,16 @@ describe('Board Store Mutations', () => {
it('updates boardLists state with previous order and sets error message', () => {
state = {
...state,
- boardLists: [mockListsWithModel[1], mockListsWithModel[0]],
+ boardLists: {
+ 'gid://gitlab/List/2': mockListsWithModel[1],
+ 'gid://gitlab/List/1': mockListsWithModel[0],
+ },
error: undefined,
};
- mutations.UPDATE_LIST_FAILURE(state, mockListsWithModel);
+ mutations.UPDATE_LIST_FAILURE(state, initialBoardListsState);
- expect(state.boardLists).toEqual(mockListsWithModel);
+ expect(state.boardLists).toEqual(initialBoardListsState);
expect(state.error).toEqual('An error occurred while updating the list. Please try again.');
});
});
@@ -145,6 +168,23 @@ describe('Board Store Mutations', () => {
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_ERROR);
});
+ describe('RESET_ISSUES', () => {
+ it('should remove issues from issuesByListId state', () => {
+ const issuesByListId = {
+ 'gid://gitlab/List/1': [mockIssue.id],
+ };
+
+ state = {
+ ...state,
+ issuesByListId,
+ };
+
+ mutations[types.RESET_ISSUES](state);
+
+ expect(state.issuesByListId).toEqual({ 'gid://gitlab/List/1': [] });
+ });
+ });
+
describe('RECEIVE_ISSUES_FOR_LIST_SUCCESS', () => {
it('updates issuesByListId and issues on state', () => {
const listIssues = {
@@ -156,14 +196,23 @@ describe('Board Store Mutations', () => {
state = {
...state,
- isLoadingIssues: true,
- issuesByListId: {},
+ issuesByListId: {
+ 'gid://gitlab/List/1': [],
+ },
issues: {},
- boardLists: mockListsWithModel,
+ boardLists: initialBoardListsState,
+ };
+
+ const listPageInfo = {
+ 'gid://gitlab/List/1': {
+ endCursor: '',
+ hasNextPage: false,
+ },
};
mutations.RECEIVE_ISSUES_FOR_LIST_SUCCESS(state, {
listIssues: { listData: listIssues, issues },
+ listPageInfo,
listId: 'gid://gitlab/List/1',
});
@@ -172,21 +221,11 @@ describe('Board Store Mutations', () => {
});
});
- describe('REQUEST_ISSUES_FOR_ALL_LISTS', () => {
- it('sets isLoadingIssues to true', () => {
- expect(state.isLoadingIssues).toBe(false);
-
- mutations.REQUEST_ISSUES_FOR_ALL_LISTS(state);
-
- expect(state.isLoadingIssues).toBe(true);
- });
- });
-
describe('RECEIVE_ISSUES_FOR_LIST_FAILURE', () => {
it('sets error message', () => {
state = {
...state,
- boardLists: mockListsWithModel,
+ boardLists: initialBoardListsState,
error: undefined,
};
@@ -200,51 +239,10 @@ describe('Board Store Mutations', () => {
});
});
- describe('RECEIVE_ISSUES_FOR_ALL_LISTS_SUCCESS', () => {
- it('sets isLoadingIssues to false and updates issuesByListId object', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id],
- };
- const issues = {
- '1': mockIssue,
- };
-
- state = {
- ...state,
- isLoadingIssues: true,
- issuesByListId: {},
- issues: {},
- };
-
- mutations.RECEIVE_ISSUES_FOR_ALL_LISTS_SUCCESS(state, { listData: listIssues, issues });
-
- expect(state.isLoadingIssues).toBe(false);
- expect(state.issuesByListId).toEqual(listIssues);
- expect(state.issues).toEqual(issues);
- });
- });
-
describe('REQUEST_ADD_ISSUE', () => {
expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
});
- describe('RECEIVE_ISSUES_FOR_ALL_LISTS_FAILURE', () => {
- it('sets isLoadingIssues to false and sets error message', () => {
- state = {
- ...state,
- isLoadingIssues: true,
- error: undefined,
- };
-
- mutations.RECEIVE_ISSUES_FOR_ALL_LISTS_FAILURE(state);
-
- expect(state.isLoadingIssues).toBe(false);
- expect(state.error).toEqual(
- 'An error occurred while fetching the board issues. Please reload the page.',
- );
- });
- });
-
describe('UPDATE_ISSUE_BY_ID', () => {
const issueId = '1';
const prop = 'id';
@@ -254,7 +252,6 @@ describe('Board Store Mutations', () => {
beforeEach(() => {
state = {
...state,
- isLoadingIssues: true,
error: undefined,
issues: {
...issue,
@@ -310,7 +307,7 @@ describe('Board Store Mutations', () => {
state = {
...state,
issuesByListId: listIssues,
- boardLists: mockListsWithModel,
+ boardLists: initialBoardListsState,
issues,
};
@@ -358,6 +355,7 @@ describe('Board Store Mutations', () => {
state = {
...state,
issuesByListId: listIssues,
+ boardLists: initialBoardListsState,
};
mutations.MOVE_ISSUE_FAILURE(state, {
@@ -425,6 +423,7 @@ describe('Board Store Mutations', () => {
...state,
issuesByListId: listIssues,
issues,
+ boardLists: initialBoardListsState,
};
mutations.ADD_ISSUE_TO_LIST_FAILURE(state, { list: mockLists[0], issue: mockIssue2 });
diff --git a/spec/frontend/ci_lint/components/ci_lint_results_spec.js b/spec/frontend/ci_lint/components/ci_lint_results_spec.js
new file mode 100644
index 00000000000..37575a988c5
--- /dev/null
+++ b/spec/frontend/ci_lint/components/ci_lint_results_spec.js
@@ -0,0 +1,114 @@
+import { shallowMount, mount } from '@vue/test-utils';
+import { GlTable } from '@gitlab/ui';
+import CiLintResults from '~/ci_lint/components/ci_lint_results.vue';
+import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
+import { mockJobs, mockErrors, mockWarnings } from '../mock_data';
+
+describe('CI Lint Results', () => {
+ let wrapper;
+
+ const createComponent = (props = {}, mountFn = shallowMount) => {
+ wrapper = mountFn(CiLintResults, {
+ propsData: {
+ valid: true,
+ jobs: mockJobs,
+ errors: [],
+ warnings: [],
+ dryRun: false,
+ ...props,
+ },
+ });
+ };
+
+ const findTable = () => wrapper.find(GlTable);
+ const findByTestId = selector => () => wrapper.find(`[data-testid="ci-lint-${selector}"]`);
+ const findAllByTestId = selector => () => wrapper.findAll(`[data-testid="ci-lint-${selector}"]`);
+ const findErrors = findByTestId('errors');
+ const findWarnings = findByTestId('warnings');
+ const findStatus = findByTestId('status');
+ const findOnlyExcept = findByTestId('only-except');
+ const findLintParameters = findAllByTestId('parameter');
+ const findBeforeScripts = findAllByTestId('before-script');
+ const findScripts = findAllByTestId('script');
+ const findAfterScripts = findAllByTestId('after-script');
+ const filterEmptyScripts = property => mockJobs.filter(job => job[property].length !== 0);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('Invalid results', () => {
+ beforeEach(() => {
+ createComponent({ valid: false, errors: mockErrors, warnings: mockWarnings }, mount);
+ });
+
+ it('does not display the table', () => {
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('displays the invalid status', () => {
+ expect(findStatus().text()).toBe(`Status: ${wrapper.vm.$options.incorrect.text}`);
+ expect(findStatus().props('variant')).toBe(wrapper.vm.$options.incorrect.variant);
+ });
+
+ it('displays the error message', () => {
+ const [expectedError] = mockErrors;
+
+ expect(findErrors().text()).toBe(expectedError);
+ });
+
+ it('displays the warning message', () => {
+ const [expectedWarning] = mockWarnings;
+
+ expect(findWarnings().exists()).toBe(true);
+ expect(findWarnings().text()).toContain(expectedWarning);
+ });
+ });
+
+ describe('Valid results', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+
+ it('displays the valid status', () => {
+ expect(findStatus().text()).toBe(wrapper.vm.$options.correct.text);
+ expect(findStatus().props('variant')).toBe(wrapper.vm.$options.correct.variant);
+ });
+
+ it('does not display only/expect values with dry run', () => {
+ expect(findOnlyExcept().exists()).toBe(false);
+ });
+ });
+
+ describe('Lint results', () => {
+ beforeEach(() => {
+ createComponent({}, mount);
+ });
+
+ it('formats parameter value', () => {
+ findLintParameters().wrappers.forEach((job, index) => {
+ const { stage } = mockJobs[index];
+ const { name } = mockJobs[index];
+
+ expect(job.text()).toBe(`${capitalizeFirstCharacter(stage)} Job - ${name}`);
+ });
+ });
+
+ it('only shows before scripts when data is present', () => {
+ expect(findBeforeScripts()).toHaveLength(filterEmptyScripts('beforeScript').length);
+ });
+
+ it('only shows script when data is present', () => {
+ expect(findScripts()).toHaveLength(filterEmptyScripts('script').length);
+ });
+
+ it('only shows after script when data is present', () => {
+ expect(findAfterScripts()).toHaveLength(filterEmptyScripts('afterScript').length);
+ });
+ });
+});
diff --git a/spec/frontend/ci_lint/components/ci_lint_spec.js b/spec/frontend/ci_lint/components/ci_lint_spec.js
new file mode 100644
index 00000000000..e617cca499d
--- /dev/null
+++ b/spec/frontend/ci_lint/components/ci_lint_spec.js
@@ -0,0 +1,77 @@
+import { shallowMount } from '@vue/test-utils';
+import EditorLite from '~/vue_shared/components/editor_lite.vue';
+import CiLint from '~/ci_lint/components/ci_lint.vue';
+import lintCIMutation from '~/ci_lint/graphql/mutations/lint_ci.mutation.graphql';
+
+describe('CI Lint', () => {
+ let wrapper;
+
+ const endpoint = '/namespace/project/-/ci/lint';
+ const content =
+ "test_job:\n stage: build\n script: echo 'Building'\n only:\n - web\n - chat\n - pushes\n allow_failure: true ";
+
+ const createComponent = () => {
+ wrapper = shallowMount(CiLint, {
+ data() {
+ return {
+ content,
+ };
+ },
+ propsData: {
+ endpoint,
+ helpPagePath: '/help/ci/lint#pipeline-simulation',
+ },
+ mocks: {
+ $apollo: {
+ mutate: jest.fn(),
+ },
+ },
+ });
+ };
+
+ const findEditor = () => wrapper.find(EditorLite);
+ const findValidateBtn = () => wrapper.find('[data-testid="ci-lint-validate"]');
+ const findClearBtn = () => wrapper.find('[data-testid="ci-lint-clear"]');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays the editor', () => {
+ expect(findEditor().exists()).toBe(true);
+ });
+
+ it('validate action calls mutation correctly', () => {
+ findValidateBtn().vm.$emit('click');
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: lintCIMutation,
+ variables: { content, dry: false, endpoint },
+ });
+ });
+
+ it('validate action calls mutation with dry run', async () => {
+ const dryRunEnabled = true;
+
+ await wrapper.setData({ dryRun: dryRunEnabled });
+
+ findValidateBtn().vm.$emit('click');
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: lintCIMutation,
+ variables: { content, dry: dryRunEnabled, endpoint },
+ });
+ });
+
+ it('content is cleared on clear action', async () => {
+ expect(findEditor().props('value')).toBe(content);
+
+ await findClearBtn().vm.$emit('click');
+
+ expect(findEditor().props('value')).toBe('');
+ });
+});
diff --git a/spec/frontend/ci_lint/components/ci_lint_warnings_spec.js b/spec/frontend/ci_lint/components/ci_lint_warnings_spec.js
new file mode 100644
index 00000000000..6e0a4881e14
--- /dev/null
+++ b/spec/frontend/ci_lint/components/ci_lint_warnings_spec.js
@@ -0,0 +1,54 @@
+import { mount } from '@vue/test-utils';
+import { GlAlert, GlSprintf } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
+import CiLintWarnings from '~/ci_lint/components/ci_lint_warnings.vue';
+
+const warnings = ['warning 1', 'warning 2', 'warning 3'];
+
+describe('CI lint warnings', () => {
+ let wrapper;
+
+ const createComponent = (limit = 25) => {
+ wrapper = mount(CiLintWarnings, {
+ propsData: {
+ warnings,
+ maxWarnings: limit,
+ },
+ });
+ };
+
+ const findWarningAlert = () => wrapper.find(GlAlert);
+ const findWarnings = () => wrapper.findAll('[data-testid="ci-lint-warning"]');
+ const findWarningMessage = () => trimText(wrapper.find(GlSprintf).text());
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays the warning alert', () => {
+ createComponent();
+
+ expect(findWarningAlert().exists()).toBe(true);
+ });
+
+ it('displays all the warnings', () => {
+ createComponent();
+
+ expect(findWarnings()).toHaveLength(warnings.length);
+ });
+
+ it('shows the correct message when the limit is not passed', () => {
+ createComponent();
+
+ expect(findWarningMessage()).toBe(`${warnings.length} warnings found:`);
+ });
+
+ it('shows the correct message when the limit is passed', () => {
+ const limit = 2;
+
+ createComponent(limit);
+
+ expect(findWarningMessage()).toBe(`${warnings.length} warnings found: showing first ${limit}`);
+ });
+});
diff --git a/spec/frontend/ci_lint/mock_data.js b/spec/frontend/ci_lint/mock_data.js
new file mode 100644
index 00000000000..cf7d69dcad3
--- /dev/null
+++ b/spec/frontend/ci_lint/mock_data.js
@@ -0,0 +1,49 @@
+export const mockJobs = [
+ {
+ name: 'job_1',
+ stage: 'build',
+ beforeScript: [],
+ script: ["echo 'Building'"],
+ afterScript: [],
+ tagList: [],
+ environment: null,
+ when: 'on_success',
+ allowFailure: true,
+ only: { refs: ['web', 'chat', 'pushes'] },
+ except: null,
+ },
+ {
+ name: 'multi_project_job',
+ stage: 'test',
+ beforeScript: [],
+ script: [],
+ afterScript: [],
+ tagList: [],
+ environment: null,
+ when: 'on_success',
+ allowFailure: false,
+ only: { refs: ['branches', 'tags'] },
+ except: null,
+ },
+ {
+ name: 'job_2',
+ stage: 'test',
+ beforeScript: ["echo 'before script'"],
+ script: ["echo 'script'"],
+ afterScript: ["echo 'after script"],
+ tagList: [],
+ environment: null,
+ when: 'on_success',
+ allowFailure: false,
+ only: { refs: ['branches@gitlab-org/gitlab'] },
+ except: { refs: ['master@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
+ },
+];
+
+export const mockErrors = [
+ '"job_1 job: chosen stage does not exist; available stages are .pre, build, test, deploy, .post"',
+];
+
+export const mockWarnings = [
+ '"jobs:multi_project_job may allow multiple pipelines to run for a single action due to `rules:when` clause with no `workflow:rules` - read more: https://docs.gitlab.com/ee/ci/troubleshooting.html#pipeline-warnings"',
+];
diff --git a/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
new file mode 100644
index 00000000000..e07afb5d736
--- /dev/null
+++ b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
@@ -0,0 +1,102 @@
+import { mount } from '@vue/test-utils';
+import { GlTable, GlBadge } from '@gitlab/ui';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+import TriggersList from '~/ci_settings_pipeline_triggers/components/triggers_list.vue';
+import { triggers } from '../mock_data';
+
+describe('TriggersList', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(TriggersList, {
+ propsData: { triggers, ...props },
+ });
+ };
+
+ const findTable = () => wrapper.find(GlTable);
+ const findHeaderAt = i => wrapper.findAll('thead th').at(i);
+ const findRows = () => wrapper.findAll('tbody tr');
+ const findRowAt = i => findRows().at(i);
+ const findCell = (i, col) =>
+ findRowAt(i)
+ .findAll('td')
+ .at(col);
+ const findClipboardBtn = i => findCell(i, 0).find(ClipboardButton);
+ const findInvalidBadge = i => findCell(i, 0).find(GlBadge);
+ const findEditBtn = i => findRowAt(i).find('[data-testid="edit-btn"]');
+ const findRevokeBtn = i => findRowAt(i).find('[data-testid="trigger_revoke_button"]');
+
+ beforeEach(() => {
+ createComponent();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays a table with expected headers', () => {
+ const headers = ['Token', 'Description', 'Owner', 'Last Used', ''];
+ headers.forEach((header, i) => {
+ expect(findHeaderAt(i).text()).toBe(header);
+ });
+ });
+
+ it('displays a table with rows', () => {
+ expect(findRows()).toHaveLength(triggers.length);
+
+ const [trigger] = triggers;
+
+ expect(findCell(0, 0).text()).toBe(trigger.token);
+ expect(findCell(0, 1).text()).toBe(trigger.description);
+ expect(findCell(0, 2).text()).toContain(trigger.owner.name);
+ });
+
+ it('displays a "copy to cliboard" button for exposed tokens', () => {
+ expect(findClipboardBtn(0).exists()).toBe(true);
+ expect(findClipboardBtn(0).props('text')).toBe(triggers[0].token);
+
+ expect(findClipboardBtn(1).exists()).toBe(false);
+ });
+
+ it('displays an "invalid" label for tokens without access', () => {
+ expect(findInvalidBadge(0).exists()).toBe(false);
+
+ expect(findInvalidBadge(1).exists()).toBe(true);
+ });
+
+ it('displays a time ago label when last used', () => {
+ expect(findCell(0, 3).text()).toBe('Never');
+
+ expect(
+ findCell(1, 3)
+ .find(TimeAgoTooltip)
+ .props('time'),
+ ).toBe(triggers[1].lastUsed);
+ });
+
+ it('displays actions in a rows', () => {
+ const [data] = triggers;
+
+ expect(findEditBtn(0).attributes('href')).toBe(data.editProjectTriggerPath);
+
+ expect(findRevokeBtn(0).attributes('href')).toBe(data.projectTriggerPath);
+ expect(findRevokeBtn(0).attributes('data-method')).toBe('delete');
+ expect(findRevokeBtn(0).attributes('data-confirm')).toBeTruthy();
+ });
+
+ describe('when there are no triggers set', () => {
+ beforeEach(() => {
+ createComponent({ triggers: [] });
+ });
+
+ it('does not display a table', () => {
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('displays a message', () => {
+ expect(wrapper.text()).toBe(
+ 'No triggers have been created yet. Add one using the form above.',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/ci_settings_pipeline_triggers/mock_data.js b/spec/frontend/ci_settings_pipeline_triggers/mock_data.js
new file mode 100644
index 00000000000..6813e941e03
--- /dev/null
+++ b/spec/frontend/ci_settings_pipeline_triggers/mock_data.js
@@ -0,0 +1,30 @@
+export const triggers = [
+ {
+ hasTokenExposed: true,
+ token: '0000',
+ description: 'My trigger',
+ owner: {
+ name: 'My User',
+ username: 'user1',
+ path: '/user1',
+ },
+ lastUsed: null,
+ canAccessProject: true,
+ editProjectTriggerPath: '/triggers/1/edit',
+ projectTriggerPath: '/trigger/1',
+ },
+ {
+ hasTokenExposed: false,
+ token: '1111',
+ description: "Anothe user's trigger",
+ owner: {
+ name: 'Someone else',
+ username: 'user2',
+ path: '/user2',
+ },
+ lastUsed: '2020-09-10T08:26:47.410Z',
+ canAccessProject: false,
+ editProjectTriggerPath: '/triggers/1/edit',
+ projectTriggerPath: '/trigger/1',
+ },
+];
diff --git a/spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
index 7785d436834..7bcd558c60f 100644
--- a/spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem, GlIcon } from '@gitlab/ui';
+import { mount, createLocalVue } from '@vue/test-utils';
+import { GlDropdownItem, GlIcon } from '@gitlab/ui';
import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
const localVue = createLocalVue();
@@ -17,7 +17,7 @@ describe('Ci environments dropdown', () => {
},
});
- wrapper = shallowMount(CiEnvironmentsDropdown, {
+ wrapper = mount(CiEnvironmentsDropdown, {
store,
localVue,
propsData: {
@@ -26,21 +26,21 @@ describe('Ci environments dropdown', () => {
});
};
- const findAllDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
- const findDropdownItemByIndex = index => wrapper.findAll(GlDeprecatedDropdownItem).at(index);
- const findActiveIconByIndex = index => wrapper.findAll(GlIcon).at(index);
+ const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDropdownItemByIndex = index => wrapper.findAll(GlDropdownItem).at(index);
+ const findActiveIconByIndex = index => findDropdownItemByIndex(index).find(GlIcon);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- describe('No enviroments found', () => {
+ describe('No environments found', () => {
beforeEach(() => {
createComponent('stable');
});
- it('renders create button with search term if enviroments do not contain search term', () => {
+ it('renders create button with search term if environments do not contain search term', () => {
expect(findAllDropdownItems()).toHaveLength(2);
expect(findDropdownItemByIndex(1).text()).toBe('Create wildcard: stable');
});
@@ -55,27 +55,31 @@ describe('Ci environments dropdown', () => {
createComponent('');
});
- it('renders all enviroments when search term is empty', () => {
+ it('renders all environments when search term is empty', () => {
expect(findAllDropdownItems()).toHaveLength(3);
expect(findDropdownItemByIndex(0).text()).toBe('dev');
expect(findDropdownItemByIndex(1).text()).toBe('prod');
expect(findDropdownItemByIndex(2).text()).toBe('staging');
});
+
+ it('should not display active checkmark on the inactive stage', () => {
+ expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
+ });
});
- describe('Enviroments found', () => {
+ describe('Environments found', () => {
beforeEach(() => {
createComponent('prod');
});
- it('renders only the enviroment searched for', () => {
+ it('renders only the environment searched for', () => {
expect(findAllDropdownItems()).toHaveLength(1);
expect(findDropdownItemByIndex(0).text()).toBe('prod');
});
it('should not display create button', () => {
- const enviroments = findAllDropdownItems().filter(env => env.text().startsWith('Create'));
- expect(enviroments).toHaveLength(0);
+ const environments = findAllDropdownItems().filter(env => env.text().startsWith('Create'));
+ expect(environments).toHaveLength(0);
expect(findAllDropdownItems()).toHaveLength(1);
});
@@ -84,7 +88,7 @@ describe('Ci environments dropdown', () => {
});
it('should display active checkmark if active', () => {
- expect(findActiveIconByIndex(0).classes('invisible')).toBe(false);
+ expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(false);
});
describe('Custom events', () => {
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index ab32fb12058..5c2d096418d 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
-import { GlButton, GlFormCombobox } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
import createStore from '~/ci_variable_list/store';
@@ -18,7 +18,6 @@ describe('Ci variable modal', () => {
store = createStore();
wrapper = method(CiVariableModal, {
attachToDocument: true,
- provide: { glFeatures: { ciKeyAutocomplete: true } },
stubs: {
GlModal: ModalStub,
},
@@ -42,27 +41,6 @@ describe('Ci variable modal', () => {
wrapper.destroy();
});
- describe('Feature flag', () => {
- describe('when off', () => {
- beforeEach(() => {
- createComponent(shallowMount, { provide: { glFeatures: { ciKeyAutocomplete: false } } });
- });
-
- it('does not render the autocomplete dropdown', () => {
- expect(wrapper.find(GlFormCombobox).exists()).toBe(false);
- });
- });
-
- describe('when on', () => {
- beforeEach(() => {
- createComponent(shallowMount);
- });
- it('renders the autocomplete dropdown', () => {
- expect(wrapper.find(GlFormCombobox).exists()).toBe(true);
- });
- });
- });
-
describe('Basic interactions', () => {
beforeEach(() => {
createComponent(shallowMount);
diff --git a/spec/frontend/ci_variable_list/store/getters_spec.js b/spec/frontend/ci_variable_list/store/getters_spec.js
index 7ad96545652..92f22b18763 100644
--- a/spec/frontend/ci_variable_list/store/getters_spec.js
+++ b/spec/frontend/ci_variable_list/store/getters_spec.js
@@ -3,7 +3,7 @@ import mockData from '../services/mock_data';
describe('Ci variable getters', () => {
describe('joinedEnvironments', () => {
- it('should join fetched enviroments with variable environment scopes', () => {
+ it('should join fetched environments with variable environment scopes', () => {
const state = {
environments: ['All (default)', 'staging', 'deployment', 'prod'],
variables: mockData.mockVariableScopes,
diff --git a/spec/frontend/ci_variable_list/store/mutations_spec.js b/spec/frontend/ci_variable_list/store/mutations_spec.js
index 663b3486a17..a333fb7d8f9 100644
--- a/spec/frontend/ci_variable_list/store/mutations_spec.js
+++ b/spec/frontend/ci_variable_list/store/mutations_spec.js
@@ -73,7 +73,7 @@ describe('CI variable list mutations', () => {
});
describe('ADD_WILD_CARD_SCOPE', () => {
- it('should add wild card scope to enviroments array and sort', () => {
+ it('should add wild card scope to environments array and sort', () => {
stateCopy.environments = ['dev', 'staging'];
mutations[types.ADD_WILD_CARD_SCOPE](stateCopy, 'production');
diff --git a/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
index 3328ec724fd..b6e89281fef 100644
--- a/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
@@ -94,7 +94,7 @@ exports[`Applications Prometheus application shows the correct description 1`] =
Prometheus is an open-source monitoring system with
<a
class="gl-link"
- href="https://docs.gitlab.com/ce/user/project/integrations/prometheus.html"
+ href="https://docs.gitlab.com/ee/user/project/integrations/prometheus.html"
rel="noopener noreferrer"
target="_blank"
>
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index 93b757e008a..15eeadcc8b8 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -5,14 +5,17 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
class="gl-display-flex gl-justify-content-end"
>
<div
- class="dropdown b-dropdown gl-dropdown btn-group"
+ class="dropdown b-dropdown gl-new-dropdown btn-group"
+ menu-class="dropdown-menu-large"
>
<button
- class="btn btn-danger"
+ class="btn btn-danger btn-md gl-button split-content-button"
type="button"
>
+ <!---->
+
<span
- class="gl-dropdown-toggle-text"
+ class="gl-new-dropdown-button-text"
>
Remove integration and resources
</span>
@@ -22,7 +25,7 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
<button
aria-expanded="false"
aria-haspopup="true"
- class="btn dropdown-toggle btn-danger dropdown-toggle-split"
+ class="btn dropdown-toggle btn-danger btn-md gl-button gl-dropdown-toggle dropdown-toggle-split"
type="button"
>
<span
@@ -32,29 +35,58 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
</span>
</button>
<ul
- class="dropdown-menu dropdown-menu-selectable dropdown-menu-large"
+ class="dropdown-menu dropdown-menu-large"
role="menu"
tabindex="-1"
>
+ <!---->
+
<li
+ class="gl-new-dropdown-item"
role="presentation"
>
<button
- class="dropdown-item is-active"
+ class="dropdown-item"
role="menuitem"
type="button"
>
- <strong>
- Remove integration and resources
- </strong>
+ <svg
+ class="gl-icon s16 gl-new-dropdown-item-check-icon"
+ data-testid="mobile-issue-close-icon"
+ >
+ <use
+ href="#mobile-issue-close"
+ />
+ </svg>
+
+ <!---->
- <div>
- Deletes all GitLab resources attached to this cluster during removal
+ <!---->
+
+ <div
+ class="gl-new-dropdown-item-text-wrapper"
+ >
+ <p
+ class="gl-new-dropdown-item-text-primary"
+ >
+ <strong>
+ Remove integration and resources
+ </strong>
+
+ <div>
+ Deletes all GitLab resources attached to this cluster during removal
+ </div>
+ </p>
+
+ <!---->
</div>
+
+ <!---->
</button>
</li>
<li
+ class="gl-new-dropdown-divider"
role="presentation"
>
<hr
@@ -64,6 +96,7 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
/>
</li>
<li
+ class="gl-new-dropdown-item"
role="presentation"
>
<button
@@ -71,13 +104,38 @@ exports[`Remove cluster confirmation modal renders splitbutton with modal includ
role="menuitem"
type="button"
>
- <strong>
- Remove integration
- </strong>
+ <svg
+ class="gl-icon s16 gl-new-dropdown-item-check-icon gl-visibility-hidden"
+ data-testid="mobile-issue-close-icon"
+ >
+ <use
+ href="#mobile-issue-close"
+ />
+ </svg>
+
+ <!---->
- <div>
- Removes cluster from project but keeps associated resources
+ <!---->
+
+ <div
+ class="gl-new-dropdown-item-text-wrapper"
+ >
+ <p
+ class="gl-new-dropdown-item-text-primary"
+ >
+ <strong>
+ Remove integration
+ </strong>
+
+ <div>
+ Removes cluster from project but keeps associated resources
+ </div>
+ </p>
+
+ <!---->
</div>
+
+ <!---->
</button>
</li>
diff --git a/spec/frontend/clusters/components/fluentd_output_settings_spec.js b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
index c263679a45c..25db8785edc 100644
--- a/spec/frontend/clusters/components/fluentd_output_settings_spec.js
+++ b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlAlert, GlDeprecatedDropdown, GlFormCheckbox } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlFormCheckbox } from '@gitlab/ui';
import FluentdOutputSettings from '~/clusters/components/fluentd_output_settings.vue';
import { APPLICATION_STATUS, FLUENTD } from '~/clusters/constants';
import eventHub from '~/clusters/event_hub';
@@ -36,7 +36,7 @@ describe('FluentdOutputSettings', () => {
};
const findSaveButton = () => wrapper.find({ ref: 'saveBtn' });
const findCancelButton = () => wrapper.find({ ref: 'cancelBtn' });
- const findProtocolDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findProtocolDropdown = () => wrapper.find(GlDropdown);
const findCheckbox = name =>
wrapper.findAll(GlFormCheckbox).wrappers.find(x => x.text() === name);
const findHost = () => wrapper.find('#fluentd-host');
diff --git a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
index 3a9a608b2e2..1f07a0b7908 100644
--- a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
+++ b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlAlert, GlToggle, GlDeprecatedDropdown } from '@gitlab/ui';
+import { GlAlert, GlToggle, GlDropdown } from '@gitlab/ui';
import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
import { APPLICATION_STATUS, INGRESS } from '~/clusters/constants';
import eventHub from '~/clusters/event_hub';
@@ -28,10 +28,12 @@ describe('IngressModsecuritySettings', () => {
});
};
- const findSaveButton = () => wrapper.find('.btn-success');
- const findCancelButton = () => wrapper.find('[variant="secondary"]');
+ const findSaveButton = () =>
+ wrapper.find('[data-qa-selector="save_ingress_modsecurity_settings"]');
+ const findCancelButton = () =>
+ wrapper.find('[data-qa-selector="cancel_ingress_modsecurity_settings"]');
const findModSecurityToggle = () => wrapper.find(GlToggle);
- const findModSecurityDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findModSecurityDropdown = () => wrapper.find(GlDropdown);
describe('when ingress is installed', () => {
beforeEach(() => {
diff --git a/spec/frontend/clusters/components/knative_domain_editor_spec.js b/spec/frontend/clusters/components/knative_domain_editor_spec.js
index 11ebe1b5d61..b7f76211fd6 100644
--- a/spec/frontend/clusters/components/knative_domain_editor_spec.js
+++ b/spec/frontend/clusters/components/knative_domain_editor_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem, GlButton } from '@gitlab/ui';
+import { GlDropdownItem, GlButton } from '@gitlab/ui';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
import { APPLICATION_STATUS } from '~/clusters/constants';
@@ -112,7 +112,7 @@ describe('KnativeDomainEditor', () => {
createComponent({ knative: { ...knative, availableDomains: [newDomain] } });
jest.spyOn(wrapper.vm, 'selectDomain');
- wrapper.find(GlDeprecatedDropdownItem).vm.$emit('click');
+ wrapper.find(GlDropdownItem).vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.selectDomain).toHaveBeenCalledWith(newDomain);
diff --git a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
index 57c538d2650..3e5f8de8e7b 100644
--- a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
+++ b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem, GlIcon } from '@gitlab/ui';
+import { GlDropdownItem, GlIcon } from '@gitlab/ui';
import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
describe('CrossplaneProviderStack component', () => {
@@ -37,7 +37,7 @@ describe('CrossplaneProviderStack component', () => {
createComponent({ crossplane });
});
- const findDropdownElements = () => wrapper.findAll(GlDeprecatedDropdownItem);
+ const findDropdownElements = () => wrapper.findAll(GlDropdownItem);
const findFirstDropdownElement = () => findDropdownElements().at(0);
afterEach(() => {
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index 628c35ae839..d61f79071d5 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -6,7 +6,7 @@ import {
GlDeprecatedSkeletonLoading as GlSkeletonLoading,
GlTable,
} from '@gitlab/ui';
-import * as Sentry from '@sentry/browser';
+import * as Sentry from '~/sentry/wrapper';
import axios from '~/lib/utils/axios_utils';
import Clusters from '~/clusters_list/components/clusters.vue';
import ClusterStore from '~/clusters_list/store';
@@ -164,18 +164,18 @@ describe('Clusters', () => {
});
it.each`
- nodeSize | lineNumber
- ${'Unknown'} | ${0}
- ${'1'} | ${1}
- ${'2'} | ${2}
- ${'1'} | ${3}
- ${'1'} | ${4}
- ${'Unknown'} | ${5}
- `('renders node size for each cluster', ({ nodeSize, lineNumber }) => {
+ nodeText | lineNumber
+ ${'Unable to Authenticate'} | ${0}
+ ${'1'} | ${1}
+ ${'2'} | ${2}
+ ${'1'} | ${3}
+ ${'1'} | ${4}
+ ${'Unknown Error'} | ${5}
+ `('renders node size for each cluster', ({ nodeText, lineNumber }) => {
const sizes = findTable().findAll('td:nth-child(3)');
const size = sizes.at(lineNumber);
- expect(size.text()).toBe(nodeSize);
+ expect(size.text()).toContain(nodeText);
expect(size.find(GlSkeletonLoading).exists()).toBe(false);
});
});
diff --git a/spec/frontend/clusters_list/components/node_error_help_text_spec.js b/spec/frontend/clusters_list/components/node_error_help_text_spec.js
new file mode 100644
index 00000000000..4d157b3a8ab
--- /dev/null
+++ b/spec/frontend/clusters_list/components/node_error_help_text_spec.js
@@ -0,0 +1,33 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlPopover } from '@gitlab/ui';
+import NodeErrorHelpText from '~/clusters_list/components/node_error_help_text.vue';
+
+describe('NodeErrorHelpText', () => {
+ let wrapper;
+
+ const createWrapper = propsData => {
+ wrapper = shallowMount(NodeErrorHelpText, { propsData, stubs: { GlPopover } });
+ return wrapper.vm.$nextTick();
+ };
+
+ const findPopover = () => wrapper.find(GlPopover);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ errorType | wrapperText | popoverText
+ ${'authentication_error'} | ${'Unable to Authenticate'} | ${'GitLab failed to authenticate'}
+ ${'connection_error'} | ${'Unable to Connect'} | ${'GitLab failed to connect to the cluster'}
+ ${'http_error'} | ${'Unable to Connect'} | ${'There was an HTTP error when connecting to your cluster'}
+ ${'default'} | ${'Unknown Error'} | ${'An unknown error occurred while attempting to connect to Kubernetes.'}
+ ${'unknown_error_type'} | ${'Unknown Error'} | ${'An unknown error occurred while attempting to connect to Kubernetes.'}
+ ${null} | ${'Unknown Error'} | ${'An unknown error occurred while attempting to connect to Kubernetes.'}
+ `('displays error text', ({ errorType, wrapperText, popoverText }) => {
+ return createWrapper({ errorType, popoverId: 'id' }).then(() => {
+ expect(wrapper.text()).toContain(wrapperText);
+ expect(findPopover().text()).toContain(popoverText);
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/mock_data.js b/spec/frontend/clusters_list/mock_data.js
index 48af3b91c94..ed32655d10e 100644
--- a/spec/frontend/clusters_list/mock_data.js
+++ b/spec/frontend/clusters_list/mock_data.js
@@ -6,6 +6,11 @@ export const clusterList = [
provider_type: 'gcp',
status: 'creating',
nodes: null,
+ kubernetes_errors: {
+ connection_error: 'authentication_error',
+ node_connection_error: 'connection_error',
+ metrics_connection_error: 'http_error',
+ },
},
{
name: 'My Cluster 2',
@@ -19,6 +24,7 @@ export const clusterList = [
usage: { cpu: '246155922n', memory: '1255212Ki' },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 3',
@@ -36,6 +42,7 @@ export const clusterList = [
usage: { cpu: '307051934n', memory: '1379136Ki' },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 4',
@@ -48,6 +55,7 @@ export const clusterList = [
usage: { cpu: '1missingCpuUnit', memory: '1missingMemoryUnit' },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 5',
@@ -59,12 +67,14 @@ export const clusterList = [
status: { allocatable: { cpu: '1missingCpuUnit', memory: '1missingMemoryUnit' } },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 6',
environment_scope: '*',
cluster_type: 'project_type',
status: 'cleanup_ongoing',
+ kubernetes_errors: {},
},
];
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index 053128a179a..3d4e07d00eb 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import * as Sentry from '@sentry/browser';
+import * as Sentry from '~/sentry/wrapper';
import Poll from '~/lib/utils/poll';
import { deprecatedCreateFlash as flashError } from '~/flash';
import axios from '~/lib/utils/axios_utils';
diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
index 745a163951a..62b751ec59b 100644
--- a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
+++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
@@ -56,6 +56,7 @@ exports[`Code navigation popover component renders popover 1`] = `
class="popover-body border-top"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="w-100"
data-testid="go-to-definition-btn"
diff --git a/spec/frontend/collapsed_sidebar_todo_spec.js b/spec/frontend/collapsed_sidebar_todo_spec.js
index b1a304fabcd..86f4c450c05 100644
--- a/spec/frontend/collapsed_sidebar_todo_spec.js
+++ b/spec/frontend/collapsed_sidebar_todo_spec.js
@@ -59,7 +59,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
it('sets default tooltip title', () => {
expect(
document.querySelector('.js-issuable-todo.sidebar-collapsed-icon').getAttribute('title'),
- ).toBe('Add a To Do');
+ ).toBe('Add a to do');
});
it('toggle todo state', done => {
@@ -125,7 +125,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
expect(
document.querySelector('.issuable-sidebar-header .js-issuable-todo').textContent.trim(),
- ).toBe('Add a To Do');
+ ).toBe('Add a to do');
})
.then(done)
.catch(done.fail);
@@ -164,7 +164,7 @@ describe('Issuable right sidebar collapsed todo toggle', () => {
document
.querySelector('.js-issuable-todo.sidebar-collapsed-icon')
.getAttribute('aria-label'),
- ).toBe('Add a To Do');
+ ).toBe('Add a to do');
})
.then(done)
.catch(done.fail);
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index 1086985eec0..625024ee61f 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -142,7 +142,7 @@ describe('Commit pipeline status component', () => {
});
it('renders CI icon', () => {
- expect(findCiIcon().attributes('data-original-title')).toEqual('Pipeline: pending');
+ expect(findCiIcon().attributes('title')).toEqual('Pipeline: pending');
expect(findCiIcon().props('status')).toEqual(mockCiStatus);
});
});
@@ -161,7 +161,7 @@ describe('Commit pipeline status component', () => {
});
it('renders not found CI icon', () => {
- expect(findCiIcon().attributes('data-original-title')).toEqual('Pipeline: not found');
+ expect(findCiIcon().attributes('title')).toEqual('Pipeline: not found');
expect(findCiIcon().props('status')).toEqual({
text: 'not found',
icon: 'status_notfound',
diff --git a/spec/frontend/commit/pipelines/pipelines_spec.js b/spec/frontend/commit/pipelines/pipelines_spec.js
index fdf3c2e85f3..a196b66daa0 100644
--- a/spec/frontend/commit/pipelines/pipelines_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_spec.js
@@ -21,6 +21,10 @@ describe('Pipelines table in Commits and Merge requests', () => {
preloadFixtures(jsonFixtureName);
+ const findRunPipelineBtn = () => vm.$el.querySelector('[data-testid="run_pipeline_button"]');
+ const findRunPipelineBtnMobile = () =>
+ vm.$el.querySelector('[data-testid="run_pipeline_button_mobile"]');
+
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -131,7 +135,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
- expect(vm.$el.querySelector('.js-run-mr-pipeline')).not.toBeNull();
+ expect(findRunPipelineBtn()).not.toBeNull();
+ expect(findRunPipelineBtnMobile()).not.toBeNull();
done();
});
});
@@ -147,7 +152,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
- expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
+ expect(findRunPipelineBtn()).toBeNull();
+ expect(findRunPipelineBtnMobile()).toBeNull();
done();
});
});
@@ -157,7 +163,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- beforeEach(() => {
+ beforeEach(done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
@@ -168,23 +174,46 @@ describe('Pipelines table in Commits and Merge requests', () => {
projectId: '5',
mergeRequestId: 3,
});
- });
- it('updates the loading state', done => {
jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
setImmediate(() => {
- vm.$el.querySelector('.js-run-mr-pipeline').click();
+ done();
+ });
+ });
- vm.$nextTick(() => {
- expect(findModal()).toBeNull();
- expect(vm.state.isRunningMergeRequestPipeline).toBe(true);
+ it('on desktop, shows a loading button', done => {
+ findRunPipelineBtn().click();
- setImmediate(() => {
- expect(vm.state.isRunningMergeRequestPipeline).toBe(false);
+ vm.$nextTick(() => {
+ expect(findModal()).toBeNull();
- done();
- });
+ expect(findRunPipelineBtn().disabled).toBe(true);
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).not.toBeNull();
+
+ setImmediate(() => {
+ expect(findRunPipelineBtn().disabled).toBe(false);
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).toBeNull();
+
+ done();
+ });
+ });
+ });
+
+ it('on mobile, shows a loading button', done => {
+ findRunPipelineBtnMobile().click();
+
+ vm.$nextTick(() => {
+ expect(findModal()).toBeNull();
+
+ expect(findModal()).toBeNull();
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).not.toBeNull();
+
+ setImmediate(() => {
+ expect(findRunPipelineBtn().disabled).toBe(false);
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).toBeNull();
+
+ done();
});
});
});
@@ -194,7 +223,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- beforeEach(() => {
+ beforeEach(done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
@@ -207,18 +236,29 @@ describe('Pipelines table in Commits and Merge requests', () => {
sourceProjectFullPath: 'test/parent-project',
targetProjectFullPath: 'test/fork-project',
});
- });
- it('shows a security warning modal', done => {
jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
setImmediate(() => {
- vm.$el.querySelector('.js-run-mr-pipeline').click();
+ done();
+ });
+ });
- vm.$nextTick(() => {
- expect(findModal()).not.toBeNull();
- done();
- });
+ it('on desktop, shows a security warning modal', done => {
+ findRunPipelineBtn().click();
+
+ vm.$nextTick(() => {
+ expect(findModal()).not.toBeNull();
+ done();
+ });
+ });
+
+ it('on mobile, shows a security warning modal', done => {
+ findRunPipelineBtnMobile().click();
+
+ vm.$nextTick(() => {
+ expect(findModal()).not.toBeNull();
+ done();
});
});
});
diff --git a/spec/frontend/confidential_merge_request/components/dropdown_spec.js b/spec/frontend/confidential_merge_request/components/dropdown_spec.js
index 3e95cd6c0d7..401948e24e4 100644
--- a/spec/frontend/confidential_merge_request/components/dropdown_spec.js
+++ b/spec/frontend/confidential_merge_request/components/dropdown_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import Dropdown from '~/confidential_merge_request/components/dropdown.vue';
let vm;
@@ -30,27 +30,18 @@ describe('Confidential merge request project dropdown component', () => {
},
]);
- expect(vm.findAll(GlDeprecatedDropdownItem).length).toBe(2);
+ expect(vm.findAll(GlDropdownItem).length).toBe(2);
});
- it('renders selected project icon', () => {
- factory([
- {
- id: 1,
- name: 'test',
- },
- {
- id: 2,
- name: 'test 2',
- },
- ]);
+ it('shows lock icon', () => {
+ factory();
+
+ expect(vm.find(GlDropdown).props('icon')).toBe('lock');
+ });
+
+ it('has dropdown text', () => {
+ factory();
- expect(vm.find('.js-active-project-check').classes()).not.toContain('icon');
- expect(
- vm
- .findAll('.js-active-project-check')
- .at(1)
- .classes(),
- ).toContain('icon');
+ expect(vm.find(GlDropdown).props('text')).toBe('Select private project');
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js b/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
index 4bf3ac430f5..e0913fe2e88 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
@@ -12,6 +12,7 @@ describe('CreateEksCluster', () => {
let vm;
let state;
const gitlabManagedClusterHelpPath = 'gitlab-managed-cluster-help-path';
+ const namespacePerEnvironmentHelpPath = 'namespace-per-environment-help-path';
const accountAndExternalIdsHelpPath = 'account-and-external-id-help-path';
const createRoleArnHelpPath = 'role-arn-help-path';
const kubernetesIntegrationHelpPath = 'kubernetes-integration';
@@ -26,6 +27,7 @@ describe('CreateEksCluster', () => {
vm = shallowMount(CreateEksCluster, {
propsData: {
gitlabManagedClusterHelpPath,
+ namespacePerEnvironmentHelpPath,
accountAndExternalIdsHelpPath,
createRoleArnHelpPath,
externalLinkIcon,
@@ -53,6 +55,12 @@ describe('CreateEksCluster', () => {
);
});
+ it('help url for namespace per environment cluster documentation', () => {
+ expect(vm.find(EksClusterConfigurationForm).props('namespacePerEnvironmentHelpPath')).toBe(
+ namespacePerEnvironmentHelpPath,
+ );
+ });
+
it('help url for gitlab managed cluster documentation', () => {
expect(vm.find(EksClusterConfigurationForm).props('kubernetesIntegrationHelpPath')).toBe(
kubernetesIntegrationHelpPath,
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index d7dd7072f67..2600415fc9f 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -169,6 +169,7 @@ describe('EksClusterConfigurationForm', () => {
store,
propsData: {
gitlabManagedClusterHelpPath: '',
+ namespacePerEnvironmentHelpPath: '',
kubernetesIntegrationHelpPath: '',
externalLinkIcon: '',
},
diff --git a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
index ed753888790..f929216689a 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
@@ -14,6 +14,7 @@ import {
SET_ROLE,
SET_SECURITY_GROUP,
SET_GITLAB_MANAGED_CLUSTER,
+ SET_NAMESPACE_PER_ENVIRONMENT,
SET_INSTANCE_TYPE,
SET_NODE_COUNT,
REQUEST_CREATE_ROLE,
@@ -40,6 +41,7 @@ describe('EKS Cluster Store Actions', () => {
let instanceType;
let nodeCount;
let gitlabManagedCluster;
+ let namespacePerEnvironment;
let mock;
let state;
let newClusterUrl;
@@ -57,6 +59,7 @@ describe('EKS Cluster Store Actions', () => {
instanceType = 'small-1';
nodeCount = '5';
gitlabManagedCluster = true;
+ namespacePerEnvironment = true;
newClusterUrl = '/clusters/1';
@@ -76,19 +79,20 @@ describe('EKS Cluster Store Actions', () => {
});
it.each`
- action | mutation | payload | payloadDescription
- ${'setClusterName'} | ${SET_CLUSTER_NAME} | ${{ clusterName }} | ${'cluster name'}
- ${'setEnvironmentScope'} | ${SET_ENVIRONMENT_SCOPE} | ${{ environmentScope }} | ${'environment scope'}
- ${'setKubernetesVersion'} | ${SET_KUBERNETES_VERSION} | ${{ kubernetesVersion }} | ${'kubernetes version'}
- ${'setRole'} | ${SET_ROLE} | ${{ role }} | ${'role'}
- ${'setRegion'} | ${SET_REGION} | ${{ region }} | ${'region'}
- ${'setKeyPair'} | ${SET_KEY_PAIR} | ${{ keyPair }} | ${'key pair'}
- ${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
- ${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
- ${'setSecurityGroup'} | ${SET_SECURITY_GROUP} | ${{ securityGroup }} | ${'securityGroup'}
- ${'setInstanceType'} | ${SET_INSTANCE_TYPE} | ${{ instanceType }} | ${'instance type'}
- ${'setNodeCount'} | ${SET_NODE_COUNT} | ${{ nodeCount }} | ${'node count'}
- ${'setGitlabManagedCluster'} | ${SET_GITLAB_MANAGED_CLUSTER} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
+ action | mutation | payload | payloadDescription
+ ${'setClusterName'} | ${SET_CLUSTER_NAME} | ${{ clusterName }} | ${'cluster name'}
+ ${'setEnvironmentScope'} | ${SET_ENVIRONMENT_SCOPE} | ${{ environmentScope }} | ${'environment scope'}
+ ${'setKubernetesVersion'} | ${SET_KUBERNETES_VERSION} | ${{ kubernetesVersion }} | ${'kubernetes version'}
+ ${'setRole'} | ${SET_ROLE} | ${{ role }} | ${'role'}
+ ${'setRegion'} | ${SET_REGION} | ${{ region }} | ${'region'}
+ ${'setKeyPair'} | ${SET_KEY_PAIR} | ${{ keyPair }} | ${'key pair'}
+ ${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
+ ${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
+ ${'setSecurityGroup'} | ${SET_SECURITY_GROUP} | ${{ securityGroup }} | ${'securityGroup'}
+ ${'setInstanceType'} | ${SET_INSTANCE_TYPE} | ${{ instanceType }} | ${'instance type'}
+ ${'setNodeCount'} | ${SET_NODE_COUNT} | ${{ nodeCount }} | ${'node count'}
+ ${'setGitlabManagedCluster'} | ${SET_GITLAB_MANAGED_CLUSTER} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
+ ${'setNamespacePerEnvironment'} | ${SET_NAMESPACE_PER_ENVIRONMENT} | ${namespacePerEnvironment} | ${'namespace per environment'}
`(`$action commits $mutation with $payloadDescription payload`, data => {
const { action, mutation, payload } = data;
@@ -179,6 +183,7 @@ describe('EKS Cluster Store Actions', () => {
name: clusterName,
environment_scope: environmentScope,
managed: gitlabManagedCluster,
+ namespace_per_environment: namespacePerEnvironment,
provider_aws_attributes: {
kubernetes_version: kubernetesVersion,
region,
@@ -204,6 +209,7 @@ describe('EKS Cluster Store Actions', () => {
selectedInstanceType: instanceType,
nodeCount,
gitlabManagedCluster,
+ namespacePerEnvironment,
});
});
diff --git a/spec/frontend/cycle_analytics/stage_nav_item_spec.js b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
index 1fe80d3b1ce..d577d0b602a 100644
--- a/spec/frontend/cycle_analytics/stage_nav_item_spec.js
+++ b/spec/frontend/cycle_analytics/stage_nav_item_spec.js
@@ -118,7 +118,7 @@ describe('StageNavItem', () => {
expect(wrapper.find('.stage-median').text()).toBe('Not available');
});
it('does not render options menu', () => {
- expect(wrapper.find('.more-actions-toggle').exists()).toBe(false);
+ expect(wrapper.find('[data-testid="more-actions-toggle"]').exists()).toBe(false);
});
});
@@ -135,7 +135,7 @@ describe('StageNavItem', () => {
});
it('does not render options menu', () => {
- expect(wrapper.find('.more-actions-toggle').exists()).toBe(false);
+ expect(wrapper.find('[data-testid="more-actions-toggle"]').exists()).toBe(false);
});
it('can not edit the stage', () => {
diff --git a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js b/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
index 99cb864ce34..7c1a4ff1085 100644
--- a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
+++ b/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem, GlDropdown } from '@gitlab/ui';
+import { GlDropdownItem, GlDropdown } from '@gitlab/ui';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
import createStore from '~/deploy_freeze/store';
@@ -29,8 +29,8 @@ describe('Deploy freeze timezone dropdown', () => {
wrapper.setData({ searchTerm });
};
- const findAllDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
- const findDropdownItemByIndex = index => wrapper.findAll(GlDeprecatedDropdownItem).at(index);
+ const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDropdownItemByIndex = index => wrapper.findAll(GlDropdownItem).at(index);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap
index 62a0f675cff..ed8ed3254ba 100644
--- a/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap
@@ -1,23 +1,9 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Design note pin component should match the snapshot of note when repositioning 1`] = `
-<button
- aria-label="Comment form position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 btn-transparent comment-indicator"
- style="left: 10px; top: 10px; cursor: move;"
- type="button"
->
- <gl-icon-stub
- name="image-comment-dark"
- size="24"
- />
-</button>
-`;
-
exports[`Design note pin component should match the snapshot of note with index 1`] = `
<button
aria-label="Comment '1' position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 js-image-badge badge badge-pill"
+ class="gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-font-lg gl-outline-0! js-image-badge badge badge-pill"
style="left: 10px; top: 10px;"
type="button"
>
@@ -30,7 +16,7 @@ exports[`Design note pin component should match the snapshot of note with index
exports[`Design note pin component should match the snapshot of note without index 1`] = `
<button
aria-label="Comment form position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 btn-transparent comment-indicator"
+ class="gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-font-lg gl-outline-0! btn-transparent comment-indicator gl-p-0"
style="left: 10px; top: 10px;"
type="button"
>
diff --git a/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap
index 189962c5b2e..560533891c9 100644
--- a/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/design_presentation_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Design management design presentation component currentCommentForm is equal to current annotation position when isAnnotating is true 1`] = `
<div
- class="h-100 w-100 p-3 overflow-auto position-relative"
+ class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
>
<div
- class="h-100 w-100 d-flex align-items-center position-relative"
+ class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
>
<design-image-stub
image="test.jpg"
@@ -25,10 +25,10 @@ exports[`Design management design presentation component currentCommentForm is e
exports[`Design management design presentation component currentCommentForm is null when isAnnotating is false 1`] = `
<div
- class="h-100 w-100 p-3 overflow-auto position-relative"
+ class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
>
<div
- class="h-100 w-100 d-flex align-items-center position-relative"
+ class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
>
<design-image-stub
image="test.jpg"
@@ -47,10 +47,10 @@ exports[`Design management design presentation component currentCommentForm is n
exports[`Design management design presentation component currentCommentForm is null when isAnnotating is true but annotation position is falsey 1`] = `
<div
- class="h-100 w-100 p-3 overflow-auto position-relative"
+ class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
>
<div
- class="h-100 w-100 d-flex align-items-center position-relative"
+ class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
>
<design-image-stub
image="test.jpg"
@@ -69,10 +69,10 @@ exports[`Design management design presentation component currentCommentForm is n
exports[`Design management design presentation component renders empty state when no image provided 1`] = `
<div
- class="h-100 w-100 p-3 overflow-auto position-relative"
+ class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
>
<div
- class="h-100 w-100 d-flex align-items-center position-relative"
+ class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
>
<!---->
@@ -83,10 +83,10 @@ exports[`Design management design presentation component renders empty state whe
exports[`Design management design presentation component renders image and overlay when image provided 1`] = `
<div
- class="h-100 w-100 p-3 overflow-auto position-relative"
+ class="gl-h-full gl-w-full gl-p-5 overflow-auto gl-relative"
>
<div
- class="h-100 w-100 d-flex align-items-center position-relative"
+ class="gl-h-full gl-w-full gl-display-flex gl-align-items-center gl-relative"
>
<design-image-stub
image="test.jpg"
diff --git a/spec/frontend/design_management/components/__snapshots__/design_scaler_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/design_scaler_spec.js.snap
index cb4575cbd11..0679b485f77 100644
--- a/spec/frontend/design_management/components/__snapshots__/design_scaler_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/design_scaler_spec.js.snap
@@ -10,7 +10,7 @@ exports[`Design management design scaler component minus and reset buttons are d
disabled="disabled"
>
<span
- class="d-flex-center gl-icon s16"
+ class="gl-display-flex gl-justify-content-center gl-align-items-center gl-icon s16"
>
@@ -48,7 +48,7 @@ exports[`Design management design scaler component minus and reset buttons are e
class="btn"
>
<span
- class="d-flex-center gl-icon s16"
+ class="gl-display-flex gl-justify-content-center gl-align-items-center gl-icon s16"
>
@@ -85,7 +85,7 @@ exports[`Design management design scaler component plus button is disabled when
class="btn"
>
<span
- class="d-flex-center gl-icon s16"
+ class="gl-display-flex gl-justify-content-center gl-align-items-center gl-icon s16"
>
diff --git a/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap
index acaa62b11eb..7cffd3cf3e8 100644
--- a/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/image_spec.js.snap
@@ -2,7 +2,7 @@
exports[`Design management large image component renders image 1`] = `
<div
- class="m-auto js-design-image"
+ class="gl-mx-auto gl-my-auto js-design-image"
>
<!---->
@@ -16,7 +16,7 @@ exports[`Design management large image component renders image 1`] = `
exports[`Design management large image component renders loading state 1`] = `
<div
- class="m-auto js-design-image"
+ class="gl-mx-auto gl-my-auto js-design-image"
isloading="true"
>
<!---->
@@ -31,7 +31,7 @@ exports[`Design management large image component renders loading state 1`] = `
exports[`Design management large image component renders media broken icon on error 1`] = `
<gl-icon-stub
- class="text-secondary-100"
+ class="gl-text-gray-200"
name="media-broken"
size="48"
/>
@@ -39,7 +39,7 @@ exports[`Design management large image component renders media broken icon on er
exports[`Design management large image component sets correct classes and styles if imageStyle is set 1`] = `
<div
- class="m-auto js-design-image"
+ class="gl-mx-auto gl-my-auto js-design-image"
>
<!---->
@@ -54,7 +54,7 @@ exports[`Design management large image component sets correct classes and styles
exports[`Design management large image component zoom sets image style when zoomed 1`] = `
<div
- class="m-auto js-design-image"
+ class="gl-mx-auto gl-my-auto js-design-image"
>
<!---->
diff --git a/spec/frontend/design_management/components/design_note_pin_spec.js b/spec/frontend/design_management/components/design_note_pin_spec.js
index 4e045b58a35..a6219923aca 100644
--- a/spec/frontend/design_management/components/design_note_pin_spec.js
+++ b/spec/frontend/design_management/components/design_note_pin_spec.js
@@ -29,21 +29,4 @@ describe('Design note pin component', () => {
createComponent({ label: 1 });
expect(wrapper.element).toMatchSnapshot();
});
-
- it('should match the snapshot of note when repositioning', () => {
- createComponent({ repositioning: true });
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('pinStyle', () => {
- it('sets cursor to `move` when repositioning = true', () => {
- createComponent({ repositioning: true });
- expect(wrapper.vm.pinStyle.cursor).toBe('move');
- });
-
- it('does not set cursor when repositioning = false', () => {
- createComponent();
- expect(wrapper.vm.pinStyle.cursor).toBe(undefined);
- });
- });
});
diff --git a/spec/frontend/design_management/components/design_overlay_spec.js b/spec/frontend/design_management/components/design_overlay_spec.js
index 673a09320e5..4ef067e3f5e 100644
--- a/spec/frontend/design_management/components/design_overlay_spec.js
+++ b/spec/frontend/design_management/components/design_overlay_spec.js
@@ -202,7 +202,7 @@ describe('Design overlay component', () => {
{ x: position.x, y: position.y },
{ x: 20, y: 20 },
).then(() => {
- expect(findFirstBadge().attributes().style).toBe('left: 20px; top: 20px; cursor: move;');
+ expect(findFirstBadge().attributes().style).toBe('left: 20px; top: 20px;');
});
});
@@ -300,9 +300,7 @@ describe('Design overlay component', () => {
{ x: position.x, y: position.y },
{ x: 20, y: 20 },
).then(() => {
- expect(findCommentBadge().attributes().style).toBe(
- 'left: 20px; top: 20px; cursor: move;',
- );
+ expect(findCommentBadge().attributes().style).toBe('left: 20px; top: 20px;');
});
});
diff --git a/spec/frontend/design_management/components/design_sidebar_spec.js b/spec/frontend/design_management/components/design_sidebar_spec.js
index 700faa8a70f..60266883fcd 100644
--- a/spec/frontend/design_management/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management/components/design_sidebar_spec.js
@@ -43,7 +43,7 @@ describe('Design management design sidebar component', () => {
const findNewDiscussionDisclaimer = () =>
wrapper.find('[data-testid="new-discussion-disclaimer"]');
- function createComponent(props = {}, { enableTodoButton } = {}) {
+ function createComponent(props = {}) {
wrapper = shallowMount(DesignSidebar, {
propsData: {
design,
@@ -58,9 +58,6 @@ describe('Design management design sidebar component', () => {
},
},
stubs: { GlPopover },
- provide: {
- glFeatures: { designManagementTodoButton: enableTodoButton },
- },
});
}
@@ -80,6 +77,12 @@ describe('Design management design sidebar component', () => {
expect(findParticipants().props('participants')).toHaveLength(1);
});
+ it('renders To-Do button', () => {
+ createComponent();
+
+ expect(wrapper.find(DesignTodoButton).exists()).toBe(true);
+ });
+
describe('when has no discussions', () => {
beforeEach(() => {
createComponent({
@@ -245,23 +248,4 @@ describe('Design management design sidebar component', () => {
expect(Cookies.set).toHaveBeenCalledWith(cookieKey, 'true', { expires: 365 * 10 });
});
});
-
- it('does not render To-Do button by default', () => {
- createComponent();
- expect(wrapper.find(DesignTodoButton).exists()).toBe(false);
- });
-
- describe('when `design_management_todo_button` feature flag is enabled', () => {
- beforeEach(() => {
- createComponent({}, { enableTodoButton: true });
- });
-
- it('renders sidebar root element with no top padding', () => {
- expect(wrapper.classes()).toContain('gl-pt-0');
- });
-
- it('renders To-Do button', () => {
- expect(wrapper.find(DesignTodoButton).exists()).toBe(true);
- });
- });
});
diff --git a/spec/frontend/design_management/components/design_todo_button_spec.js b/spec/frontend/design_management/components/design_todo_button_spec.js
index 451c23f0fea..9ebc6ca26a2 100644
--- a/spec/frontend/design_management/components/design_todo_button_spec.js
+++ b/spec/frontend/design_management/components/design_todo_button_spec.js
@@ -111,7 +111,7 @@ describe('Design management design todo button', () => {
});
it('renders correct button text', () => {
- expect(wrapper.text()).toBe('Add a To-Do');
+ expect(wrapper.text()).toBe('Add a To Do');
});
describe('when clicked', () => {
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 822df1f6472..de276bd300b 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -24,6 +24,7 @@ exports[`Design management list item component with notes renders item with mult
<img
alt="test"
class="gl-display-block gl-mx-auto gl-max-w-full mh-100 design-img"
+ data-qa-filename="test"
data-qa-selector="design_image"
src=""
/>
@@ -94,6 +95,7 @@ exports[`Design management list item component with notes renders item with sing
<img
alt="test"
class="gl-display-block gl-mx-auto gl-max-w-full mh-100 design-img"
+ data-qa-filename="test"
data-qa-selector="design_image"
src=""
/>
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
index a7d6145285c..5eb86d4f9cb 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
@@ -4,15 +4,16 @@ exports[`Design management pagination component hides components when designs ar
exports[`Design management pagination component renders navigation buttons 1`] = `
<div
- class="d-flex align-items-center"
+ class="gl-display-flex gl-align-items-center"
>
0 of 2
<gl-button-group-stub
- class="ml-3 mr-3"
+ class="gl-mx-5"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-previous-design"
disabled="true"
@@ -23,6 +24,7 @@ exports[`Design management pagination component renders navigation buttons 1`] =
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-next-design"
icon="angle-right"
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
index b286a74ebb8..723ac0491a7 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
@@ -19,16 +19,16 @@ exports[`Design management toolbar component renders design and updated data 1`]
</a>
<div
- class="overflow-hidden d-flex align-items-center"
+ class="gl-overflow-hidden gl-display-flex gl-align-items-center"
>
<h2
- class="m-0 str-truncated-100 gl-font-base"
+ class="gl-m-0 str-truncated-100 gl-font-base"
>
test.jpg
</h2>
<small
- class="text-secondary"
+ class="gl-text-gray-500"
>
Updated 1 hour ago by Test Name
</small>
@@ -36,11 +36,12 @@ exports[`Design management toolbar component renders design and updated data 1`]
</div>
<design-navigation-stub
- class="ml-auto flex-shrink-0"
+ class="gl-ml-auto gl-flex-shrink-0"
id="1"
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
href="/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d"
icon="download"
diff --git a/spec/frontend/design_management/components/toolbar/design_navigation_spec.js b/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
index 1c6588a9628..1d9b9c002f9 100644
--- a/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
+++ b/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
@@ -43,7 +43,7 @@ describe('Design management pagination component', () => {
it('renders navigation buttons', () => {
wrapper.setData({
- designs: [{ id: '1' }, { id: '2' }],
+ designCollection: { designs: [{ id: '1' }, { id: '2' }] },
});
return wrapper.vm.$nextTick().then(() => {
@@ -54,7 +54,7 @@ describe('Design management pagination component', () => {
describe('keyboard buttons navigation', () => {
beforeEach(() => {
wrapper.setData({
- designs: [{ filename: '1' }, { filename: '2' }, { filename: '3' }],
+ designCollection: { designs: [{ filename: '1' }, { filename: '2' }, { filename: '3' }] },
});
});
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
index 3d7939df28e..eaa7460ae15 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
@@ -5,6 +5,7 @@ exports[`Design management upload button component renders inverted upload desig
isinverted="true"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
size="small"
@@ -30,6 +31,7 @@ exports[`Design management upload button component renders inverted upload desig
exports[`Design management upload button component renders loading icon 1`] = `
<div>
<gl-button-stub
+ buttontextclasses=""
category="primary"
disabled="true"
icon=""
@@ -62,6 +64,7 @@ exports[`Design management upload button component renders loading icon 1`] = `
exports[`Design management upload button component renders upload design button 1`] = `
<div>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
size="small"
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap
index 9284099b40d..1ca5360fa59 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Design management dropzone component when dragging renders correct template when drag event contains files 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
@@ -39,11 +39,11 @@ exports[`Design management dropzone component when dragging renders correct temp
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style=""
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
@@ -58,7 +58,7 @@ exports[`Design management dropzone component when dragging renders correct temp
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style=""
>
<h3
@@ -78,10 +78,10 @@ exports[`Design management dropzone component when dragging renders correct temp
exports[`Design management dropzone component when dragging renders correct template when drag event contains files and text 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
@@ -115,11 +115,11 @@ exports[`Design management dropzone component when dragging renders correct temp
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style=""
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
@@ -134,7 +134,7 @@ exports[`Design management dropzone component when dragging renders correct temp
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style=""
>
<h3
@@ -154,10 +154,10 @@ exports[`Design management dropzone component when dragging renders correct temp
exports[`Design management dropzone component when dragging renders correct template when drag event contains text 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
@@ -191,11 +191,11 @@ exports[`Design management dropzone component when dragging renders correct temp
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style=""
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
>
<h3
class=""
@@ -209,7 +209,7 @@ exports[`Design management dropzone component when dragging renders correct temp
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
@@ -229,10 +229,10 @@ exports[`Design management dropzone component when dragging renders correct temp
exports[`Design management dropzone component when dragging renders correct template when drag event is empty 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
@@ -266,11 +266,11 @@ exports[`Design management dropzone component when dragging renders correct temp
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style=""
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
>
<h3
class=""
@@ -284,7 +284,7 @@ exports[`Design management dropzone component when dragging renders correct temp
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
@@ -304,10 +304,10 @@ exports[`Design management dropzone component when dragging renders correct temp
exports[`Design management dropzone component when dragging renders correct template when dragging stops 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
@@ -341,11 +341,11 @@ exports[`Design management dropzone component when dragging renders correct temp
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style="display: none;"
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
>
<h3
class=""
@@ -359,7 +359,7 @@ exports[`Design management dropzone component when dragging renders correct temp
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
@@ -379,10 +379,10 @@ exports[`Design management dropzone component when dragging renders correct temp
exports[`Design management dropzone component when no slot provided renders default dropzone card 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border gl-w-full gl-h-full gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
@@ -416,11 +416,11 @@ exports[`Design management dropzone component when no slot provided renders defa
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style="display: none;"
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
>
<h3
class=""
@@ -434,7 +434,7 @@ exports[`Design management dropzone component when no slot provided renders defa
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
@@ -454,7 +454,7 @@ exports[`Design management dropzone component when no slot provided renders defa
exports[`Design management dropzone component when slot provided renders dropzone with slot content 1`] = `
<div
- class="w-100 position-relative"
+ class="gl-w-full gl-relative"
>
<div>
dropzone slot
@@ -464,11 +464,11 @@ exports[`Design management dropzone component when slot provided renders dropzon
name="design-dropzone-fade"
>
<div
- class="card design-dropzone-border design-dropzone-overlay w-100 h-100 position-absolute d-flex-center p-3 bg-white"
+ class="card design-dropzone-border design-dropzone-overlay gl-w-full gl-h-full gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-3 gl-bg-white"
style="display: none;"
>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
>
<h3
class=""
@@ -482,7 +482,7 @@ exports[`Design management dropzone component when slot provided renders dropzon
</div>
<div
- class="mw-50 text-center"
+ class="mw-50 gl-text-center"
style="display: none;"
>
<h3
diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js
index 1c7806c292f..5e41210221b 100644
--- a/spec/frontend/design_management/mock_data/apollo_mock.js
+++ b/spec/frontend/design_management/mock_data/apollo_mock.js
@@ -4,6 +4,7 @@ export const designListQueryResponse = {
id: '1',
issue: {
designCollection: {
+ copyState: 'READY',
designs: {
nodes: [
{
@@ -50,6 +51,34 @@ export const designListQueryResponse = {
},
};
+export const designUploadMutationCreatedResponse = {
+ data: {
+ designManagementUpload: {
+ designs: [
+ {
+ id: '1',
+ event: 'CREATION',
+ filename: 'fox_1.jpg',
+ },
+ ],
+ },
+ },
+};
+
+export const designUploadMutationUpdatedResponse = {
+ data: {
+ designManagementUpload: {
+ designs: [
+ {
+ id: '1',
+ event: 'MODIFICATION',
+ filename: 'fox_1.jpg',
+ },
+ ],
+ },
+ },
+};
+
export const permissionsQueryResponse = {
data: {
project: {
diff --git a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
index b80b7fdb43e..2d29b79e31c 100644
--- a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
@@ -8,7 +8,7 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
<div
- class="mt-4"
+ class="gl-mt-6"
>
<ol
class="list-unstyled row"
@@ -19,6 +19,7 @@ exports[`Design management index page designs does not render toolbar when there
>
<design-dropzone-stub
class="design-list-item design-list-item-new"
+ data-qa-selector="design_dropzone_content"
hasdesigns="true"
/>
</li>
@@ -91,7 +92,7 @@ exports[`Design management index page designs renders designs list and header wi
data-testid="designs-root"
>
<header
- class="row-content-block border-top-0 p-2 d-flex"
+ class="row-content-block gl-border-t-0 gl-p-3 gl-display-flex"
>
<div
class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-w-full"
@@ -110,6 +111,7 @@ exports[`Design management index page designs renders designs list and header wi
class="qa-selector-toolbar gl-display-flex gl-align-items-center"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="gl-mr-4 js-select-all"
icon=""
@@ -126,6 +128,7 @@ exports[`Design management index page designs renders designs list and header wi
buttonclass="gl-mr-3"
buttonsize="small"
buttonvariant="warning"
+ data-qa-selector="archive_button"
>
Archive selected
@@ -139,7 +142,7 @@ exports[`Design management index page designs renders designs list and header wi
</header>
<div
- class="mt-4"
+ class="gl-mt-6"
>
<ol
class="list-unstyled row"
@@ -150,6 +153,7 @@ exports[`Design management index page designs renders designs list and header wi
>
<design-dropzone-stub
class="design-list-item design-list-item-new"
+ data-qa-selector="design_dropzone_content"
hasdesigns="true"
/>
</li>
@@ -171,6 +175,8 @@ exports[`Design management index page designs renders designs list and header wi
<input
class="design-checkbox"
+ data-qa-design="design-1-name"
+ data-qa-selector="design_checkbox"
type="checkbox"
/>
</li>
@@ -192,6 +198,8 @@ exports[`Design management index page designs renders designs list and header wi
<input
class="design-checkbox"
+ data-qa-design="design-2-name"
+ data-qa-selector="design_checkbox"
type="checkbox"
/>
</li>
@@ -213,6 +221,8 @@ exports[`Design management index page designs renders designs list and header wi
<input
class="design-checkbox"
+ data-qa-design="design-3-name"
+ data-qa-selector="design_checkbox"
type="checkbox"
/>
</li>
@@ -233,7 +243,7 @@ exports[`Design management index page designs renders error 1`] = `
<!---->
<div
- class="mt-4"
+ class="gl-mt-6"
>
<gl-alert-stub
dismisslabel="Dismiss"
@@ -264,7 +274,7 @@ exports[`Design management index page designs renders loading icon 1`] = `
<!---->
<div
- class="mt-4"
+ class="gl-mt-6"
>
<gl-loading-icon-stub
color="orange"
@@ -287,7 +297,7 @@ exports[`Design management index page when has no designs renders design dropzon
<!---->
<div
- class="mt-4"
+ class="gl-mt-6"
>
<ol
class="list-unstyled row"
@@ -298,6 +308,7 @@ exports[`Design management index page when has no designs renders design dropzon
>
<design-dropzone-stub
class=""
+ data-qa-selector="design_dropzone_content"
/>
</li>
</ol>
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index c849e4d4ed6..3d6c2561ff6 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -2,10 +2,10 @@
exports[`Design management design index page renders design index 1`] = `
<div
- class="design-detail js-design-detail fixed-top w-100 position-bottom-0 d-flex justify-content-center flex-column flex-lg-row"
+ class="design-detail js-design-detail fixed-top gl-w-full gl-bottom-0 gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
>
<div
- class="d-flex overflow-hidden flex-grow-1 flex-column position-relative"
+ class="gl-display-flex gl-overflow-hidden gl-flex-grow-1 gl-flex-direction-column gl-relative"
>
<design-destroyer-stub
filenames="test.jpg"
@@ -23,16 +23,26 @@ exports[`Design management design index page renders design index 1`] = `
/>
<div
- class="design-scaler-wrapper position-absolute mb-4 d-flex-center"
+ class="design-scaler-wrapper gl-absolute gl-mb-6 gl-display-flex gl-justify-content-center gl-align-items-center"
>
<design-scaler-stub />
</div>
</div>
<div
- class="image-notes"
+ class="image-notes gl-pt-0"
>
- <!---->
+ <div
+ class="gl-py-4 gl-mb-4 gl-display-flex gl-justify-content-space-between gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
+ >
+ <span>
+ To Do
+ </span>
+
+ <design-todo-button-stub
+ design="[object Object]"
+ />
+ </div>
<h2
class="gl-font-weight-bold gl-mt-0"
@@ -67,6 +77,7 @@ exports[`Design management design index page renders design index 1`] = `
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="link-inherit-color gl-text-body gl-text-decoration-none gl-font-weight-bold gl-mb-4"
data-testid="resolved-comments"
@@ -121,10 +132,10 @@ exports[`Design management design index page renders design index 1`] = `
exports[`Design management design index page sets loading state 1`] = `
<div
- class="design-detail js-design-detail fixed-top w-100 position-bottom-0 d-flex justify-content-center flex-column flex-lg-row"
+ class="design-detail js-design-detail fixed-top gl-w-full gl-bottom-0 gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
>
<gl-loading-icon-stub
- class="align-self-center"
+ class="gl-align-self-center"
color="orange"
label="Loading"
size="xl"
@@ -134,10 +145,10 @@ exports[`Design management design index page sets loading state 1`] = `
exports[`Design management design index page with error GlAlert is rendered in correct position with correct content 1`] = `
<div
- class="design-detail js-design-detail fixed-top w-100 position-bottom-0 d-flex justify-content-center flex-column flex-lg-row"
+ class="design-detail js-design-detail fixed-top gl-w-full gl-bottom-0 gl-display-flex gl-justify-content-center gl-flex-direction-column gl-lg-flex-direction-row"
>
<div
- class="d-flex overflow-hidden flex-grow-1 flex-column position-relative"
+ class="gl-display-flex gl-overflow-hidden gl-flex-grow-1 gl-flex-direction-column gl-relative"
>
<design-destroyer-stub
filenames="test.jpg"
@@ -146,7 +157,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
/>
<div
- class="p-3"
+ class="gl-p-5"
>
<gl-alert-stub
dismissible="true"
@@ -172,16 +183,26 @@ exports[`Design management design index page with error GlAlert is rendered in c
/>
<div
- class="design-scaler-wrapper position-absolute mb-4 d-flex-center"
+ class="design-scaler-wrapper gl-absolute gl-mb-6 gl-display-flex gl-justify-content-center gl-align-items-center"
>
<design-scaler-stub />
</div>
</div>
<div
- class="image-notes"
+ class="image-notes gl-pt-0"
>
- <!---->
+ <div
+ class="gl-py-4 gl-mb-4 gl-display-flex gl-justify-content-space-between gl-align-items-center gl-border-b-1 gl-border-b-solid gl-border-b-gray-100"
+ >
+ <span>
+ To Do
+ </span>
+
+ <design-todo-button-stub
+ design="[object Object]"
+ />
+ </div>
<h2
class="gl-font-weight-bold gl-mt-0"
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 661717d29a3..27a91b11448 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -4,6 +4,7 @@ import VueDraggable from 'vuedraggable';
import VueRouter from 'vue-router';
import { GlEmptyState } from '@gitlab/ui';
import createMockApollo from 'jest/helpers/mock_apollo_helper';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Index from '~/design_management/pages/index.vue';
import uploadDesignQuery from '~/design_management/graphql/mutations/upload_design.mutation.graphql';
import DesignDestroyer from '~/design_management/components/design_destroyer.vue';
@@ -21,6 +22,8 @@ import * as utils from '~/design_management/utils/design_management_utils';
import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management/constants';
import {
designListQueryResponse,
+ designUploadMutationCreatedResponse,
+ designUploadMutationUpdatedResponse,
permissionsQueryResponse,
moveDesignMutationResponse,
reorderedDesigns,
@@ -29,6 +32,7 @@ import {
import getDesignListQuery from '~/design_management/graphql/queries/get_design_list.query.graphql';
import permissionsQuery from '~/design_management/graphql/queries/design_permissions.query.graphql';
import moveDesignMutation from '~/design_management/graphql/mutations/move_design.mutation.graphql';
+import { DESIGN_TRACKING_PAGE_NAME } from '~/design_management/utils/tracking';
jest.mock('~/flash.js');
const mockPageEl = {
@@ -92,6 +96,8 @@ describe('Design management index page', () => {
const findDesignCheckboxes = () => wrapper.findAll('.design-checkbox');
const findSelectAllButton = () => wrapper.find('.js-select-all');
const findToolbar = () => wrapper.find('.qa-selector-toolbar');
+ const findDesignCollectionIsCopying = () =>
+ wrapper.find('[data-testid="design-collection-is-copying"');
const findDeleteButton = () => wrapper.find(DeleteButton);
const findDropzone = () => wrapper.findAll(DesignDropzone).at(0);
const dropzoneClasses = () => findDropzone().classes();
@@ -99,6 +105,7 @@ describe('Design management index page', () => {
const findFirstDropzoneWithDesign = () => wrapper.findAll(DesignDropzone).at(1);
const findDesignsWrapper = () => wrapper.find('[data-testid="designs-root"]');
const findDesigns = () => wrapper.findAll(Design);
+ const draggableAttributes = () => wrapper.find(VueDraggable).vm.$attrs;
async function moveDesigns(localWrapper) {
await jest.runOnlyPendingTimers();
@@ -115,8 +122,8 @@ describe('Design management index page', () => {
function createComponent({
loading = false,
- designs = [],
allVersions = [],
+ designCollection = { designs: mockDesigns, copyState: 'READY' },
createDesign = true,
stubs = {},
mockMutate = jest.fn().mockResolvedValue(),
@@ -124,7 +131,7 @@ describe('Design management index page', () => {
mutate = mockMutate;
const $apollo = {
queries: {
- designs: {
+ designCollection: {
loading,
},
permissions: {
@@ -137,8 +144,8 @@ describe('Design management index page', () => {
wrapper = shallowMount(Index, {
data() {
return {
- designs,
allVersions,
+ designCollection,
permissions: {
createDesign,
},
@@ -200,13 +207,13 @@ describe('Design management index page', () => {
});
it('renders a toolbar with buttons when there are designs', () => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ createComponent({ allVersions: [mockVersion] });
expect(findToolbar().exists()).toBe(true);
});
it('renders designs list and header with upload button', () => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ createComponent({ allVersions: [mockVersion] });
expect(wrapper.element).toMatchSnapshot();
});
@@ -236,7 +243,7 @@ describe('Design management index page', () => {
describe('when has no designs', () => {
beforeEach(() => {
- createComponent();
+ createComponent({ designCollection: { designs: [], copyState: 'READY' } });
});
it('renders design dropzone', () =>
@@ -259,6 +266,21 @@ describe('Design management index page', () => {
}));
});
+ describe('handling design collection copy state', () => {
+ it.each`
+ copyState | isRendered | description
+ ${'IN_PROGRESS'} | ${true} | ${'renders'}
+ ${'READY'} | ${false} | ${'does not render'}
+ ${'ERROR'} | ${false} | ${'does not render'}
+ `(
+ '$description the copying message if design collection copyState is $copyState',
+ ({ copyState, isRendered }) => {
+ createComponent({ designCollection: { designs: [], copyState } });
+ expect(findDesignCollectionIsCopying().exists()).toBe(isRendered);
+ },
+ );
+ });
+
describe('uploading designs', () => {
it('calls mutation on upload', () => {
createComponent({ stubs: { GlEmptyState } });
@@ -282,6 +304,10 @@ describe('Design management index page', () => {
{
__typename: 'Design',
id: expect.anything(),
+ currentUserTodos: {
+ __typename: 'TodoConnection',
+ nodes: [],
+ },
image: '',
imageV432x230: '',
filename: 'test',
@@ -348,7 +374,7 @@ describe('Design management index page', () => {
createComponent({ stubs: { GlEmptyState } });
wrapper.setData({ filesToBeSaved: [{ name: 'test' }] });
- wrapper.vm.onUploadDesignDone();
+ wrapper.vm.onUploadDesignDone(designUploadMutationCreatedResponse);
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.filesToBeSaved).toEqual([]);
expect(wrapper.vm.isSaving).toBeFalsy();
@@ -460,6 +486,34 @@ describe('Design management index page', () => {
expect(createFlash).toHaveBeenCalledWith(message);
});
});
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking('_category_', undefined, jest.spyOn);
+
+ createComponent({ stubs: { GlEmptyState } });
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks design creation', () => {
+ wrapper.vm.onUploadDesignDone(designUploadMutationCreatedResponse);
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(DESIGN_TRACKING_PAGE_NAME, 'create_design');
+ });
+
+ it('tracks design modification', () => {
+ wrapper.vm.onUploadDesignDone(designUploadMutationUpdatedResponse);
+
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(DESIGN_TRACKING_PAGE_NAME, 'update_design');
+ });
+ });
});
describe('on latest version when has designs', () => {
@@ -531,13 +585,16 @@ describe('Design management index page', () => {
});
it('on latest version when has no designs toolbar buttons are invisible', () => {
- createComponent({ designs: [], allVersions: [mockVersion] });
+ createComponent({
+ designCollection: { designs: [], copyState: 'READY' },
+ allVersions: [mockVersion],
+ });
expect(findToolbar().isVisible()).toBe(false);
});
describe('on non-latest version', () => {
beforeEach(() => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ createComponent({ allVersions: [mockVersion] });
});
it('does not render design checkboxes', async () => {
@@ -626,9 +683,8 @@ describe('Design management index page', () => {
describe('when navigating', () => {
it('ensures fullscreen layout is not applied', () => {
- createComponent(true);
+ createComponent({ loading: true });
- wrapper.vm.$router.push('/');
expect(mockPageEl.classList.remove).toHaveBeenCalledTimes(1);
expect(mockPageEl.classList.remove).toHaveBeenCalledWith(...DESIGN_DETAIL_LAYOUT_CLASSLIST);
});
@@ -637,7 +693,7 @@ describe('Design management index page', () => {
router.replace({
path: '/designs',
});
- createComponent(true);
+ createComponent({ loading: true });
return wrapper.vm.$nextTick().then(() => {
expect(scrollIntoViewMock).toHaveBeenCalled();
@@ -676,6 +732,20 @@ describe('Design management index page', () => {
).toBe('2');
});
+ it('prevents reordering when reorderDesigns mutation is in progress', async () => {
+ createComponentWithApollo({});
+
+ await moveDesigns(wrapper);
+
+ expect(draggableAttributes().disabled).toBe(true);
+
+ await jest.runOnlyPendingTimers(); // kick off the mocked GQL stuff (promises)
+ await wrapper.vm.$nextTick(); // kick off the DOM update
+ await wrapper.vm.$nextTick(); // kick off the DOM update for finally block
+
+ expect(draggableAttributes().disabled).toBe(false);
+ });
+
it('displays flash if mutation had a recoverable error', async () => {
createComponentWithApollo({
moveHandler: jest.fn().mockResolvedValue(moveDesignMutationResponseWithErrors),
diff --git a/spec/frontend/design_management/router_spec.js b/spec/frontend/design_management/router_spec.js
index d4cb9f75a77..fac4f7d368d 100644
--- a/spec/frontend/design_management/router_spec.js
+++ b/spec/frontend/design_management/router_spec.js
@@ -25,7 +25,7 @@ function factory(routeArg) {
mocks: {
$apollo: {
queries: {
- designs: { loading: true },
+ designCollection: { loading: true },
design: { loading: true },
permissions: { loading: true },
},
diff --git a/spec/frontend/design_management/utils/design_management_utils_spec.js b/spec/frontend/design_management/utils/design_management_utils_spec.js
index 7e857d08d25..232cfa2f4ca 100644
--- a/spec/frontend/design_management/utils/design_management_utils_spec.js
+++ b/spec/frontend/design_management/utils/design_management_utils_spec.js
@@ -93,6 +93,10 @@ describe('optimistic responses', () => {
fullPath: '',
notesCount: 0,
event: 'NONE',
+ currentUserTodos: {
+ __typename: 'TodoConnection',
+ nodes: [],
+ },
diffRefs: { __typename: 'DiffRefs', baseSha: '', startSha: '', headSha: '' },
discussions: { __typename: 'DesignDiscussion', nodes: [] },
versions: {
diff --git a/spec/frontend/diff_comments_store_spec.js b/spec/frontend/diff_comments_store_spec.js
deleted file mode 100644
index 6f25c9dd3bc..00000000000
--- a/spec/frontend/diff_comments_store_spec.js
+++ /dev/null
@@ -1,136 +0,0 @@
-/* global CommentsStore */
-
-import '~/diff_notes/models/discussion';
-import '~/diff_notes/models/note';
-import '~/diff_notes/stores/comments';
-
-function createDiscussion(noteId = 1, resolved = true) {
- CommentsStore.create({
- discussionId: 'a',
- noteId,
- canResolve: true,
- resolved,
- resolvedBy: 'test',
- authorName: 'test',
- authorAvatar: 'test',
- noteTruncated: 'test...',
- });
-}
-
-beforeEach(() => {
- CommentsStore.state = {};
-});
-
-describe('New discussion', () => {
- it('creates new discussion', () => {
- expect(Object.keys(CommentsStore.state).length).toBe(0);
- createDiscussion();
-
- expect(Object.keys(CommentsStore.state).length).toBe(1);
- });
-
- it('creates new note in discussion', () => {
- createDiscussion();
- createDiscussion(2);
-
- const discussion = CommentsStore.state.a;
-
- expect(Object.keys(discussion.notes).length).toBe(2);
- });
-});
-
-describe('Get note', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('gets note by ID', () => {
- const note = CommentsStore.get('a', 1);
-
- expect(note).toBeDefined();
- expect(note.id).toBe(1);
- });
-});
-
-describe('Delete discussion', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('deletes discussion by ID', () => {
- CommentsStore.delete('a', 1);
-
- expect(Object.keys(CommentsStore.state).length).toBe(0);
- });
-
- it('deletes discussion when no more notes', () => {
- createDiscussion();
- createDiscussion(2);
-
- expect(Object.keys(CommentsStore.state).length).toBe(1);
- expect(Object.keys(CommentsStore.state.a.notes).length).toBe(2);
-
- CommentsStore.delete('a', 1);
- CommentsStore.delete('a', 2);
-
- expect(Object.keys(CommentsStore.state).length).toBe(0);
- });
-});
-
-describe('Update note', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('updates note to be unresolved', () => {
- CommentsStore.update('a', 1, false, 'test');
-
- const note = CommentsStore.get('a', 1);
-
- expect(note.resolved).toBe(false);
- });
-});
-
-describe('Discussion resolved', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('is resolved with single note', () => {
- const discussion = CommentsStore.state.a;
-
- expect(discussion.isResolved()).toBe(true);
- });
-
- it('is unresolved with 2 notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2, false);
-
- expect(discussion.isResolved()).toBe(false);
- });
-
- it('is resolved with 2 notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2);
-
- expect(discussion.isResolved()).toBe(true);
- });
-
- it('resolve all notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2, false);
-
- discussion.resolveAllNotes();
-
- expect(discussion.isResolved()).toBe(true);
- });
-
- it('unresolve all notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2);
-
- discussion.unResolveAllNotes();
-
- expect(discussion.isResolved()).toBe(false);
- });
-});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index cd3a6aa0e28..86560470ada 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -699,7 +699,7 @@ describe('diffs/components/app', () => {
describe('collapsed files', () => {
it('should render the collapsed files warning if there are any collapsed files', () => {
createComponent({}, ({ state }) => {
- state.diffs.diffFiles = [{ viewer: { collapsed: true } }];
+ state.diffs.diffFiles = [{ viewer: { automaticallyCollapsed: true } }];
});
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(true);
@@ -707,7 +707,7 @@ describe('diffs/components/app', () => {
it('should not render the collapsed files warning if the user has dismissed the alert already', async () => {
createComponent({}, ({ state }) => {
- state.diffs.diffFiles = [{ viewer: { collapsed: true } }];
+ state.diffs.diffFiles = [{ viewer: { automaticallyCollapsed: true } }];
});
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(true);
diff --git a/spec/frontend/diffs/components/collapsed_files_warning_spec.js b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
index 670eab5472f..7bbffb7a1cd 100644
--- a/spec/frontend/diffs/components/collapsed_files_warning_spec.js
+++ b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
@@ -50,7 +50,7 @@ describe('CollapsedFilesWarning', () => {
({ limited, containerClasses }) => {
createComponent({ limited });
- expect(wrapper.classes()).toEqual(containerClasses);
+ expect(wrapper.classes()).toEqual(['col-12'].concat(containerClasses));
},
);
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index c48445790f7..9e4fcddd1b4 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -25,7 +25,7 @@ describe('diffs/components/commit_item', () => {
const getTitleElement = () => wrapper.find('.commit-row-message.item-title');
const getDescElement = () => wrapper.find('pre.commit-row-description');
const getDescExpandElement = () => wrapper.find('.commit-content .js-toggle-button');
- const getShaElement = () => wrapper.find('.commit-sha-group');
+ const getShaElement = () => wrapper.find('[data-testid="commit-sha-group"]');
const getAvatarElement = () => wrapper.find('.user-avatar-link');
const getCommitterElement = () => wrapper.find('.committer');
const getCommitActionsElement = () => wrapper.find('.commit-actions');
@@ -84,8 +84,8 @@ describe('diffs/components/commit_item', () => {
it('renders commit sha', () => {
const shaElement = getShaElement();
- const labelElement = shaElement.find('.label');
- const buttonElement = shaElement.find('button');
+ const labelElement = shaElement.find('[data-testid="commit-sha-group"] button');
+ const buttonElement = shaElement.find('button.input-group-text');
expect(labelElement.text()).toEqual(commit.short_id);
expect(buttonElement.props('text')).toBe(commit.id);
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index a0cad32b9fb..a04486fc5c7 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -1,9 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { GlIcon } from '@gitlab/ui';
import { cloneDeep } from 'lodash';
import DiffFileHeader from '~/diffs/components/diff_file_header.vue';
-import EditButton from '~/diffs/components/edit_button.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import diffDiscussionsMockData from '../mock_data/diff_discussions';
import { truncateSha } from '~/lib/utils/text_utility';
@@ -22,7 +20,7 @@ const diffFile = Object.freeze(
name: 'base.js',
mode: '100644',
readable_text: true,
- icon: 'file-text-o',
+ icon: 'doc-text',
},
}),
);
@@ -76,15 +74,7 @@ describe('DiffFileHeader component', () => {
const findReplacedFileButton = () => wrapper.find({ ref: 'replacedFileButton' });
const findViewFileButton = () => wrapper.find({ ref: 'viewButton' });
const findCollapseIcon = () => wrapper.find({ ref: 'collapseIcon' });
-
- const findIconByName = iconName => {
- const icons = wrapper.findAll(GlIcon).filter(w => w.props('name') === iconName);
- if (icons.length === 0) return icons;
- if (icons.length > 1) {
- throw new Error(`Multiple icons found for ${iconName}`);
- }
- return icons.at(0);
- };
+ const findEditButton = () => wrapper.find({ ref: 'editButton' });
const createComponent = props => {
mockStoreConfig = cloneDeep(defaultMockStoreConfig);
@@ -203,16 +193,6 @@ describe('DiffFileHeader component', () => {
describe('for any file', () => {
const otherModes = Object.keys(diffViewerModes).filter(m => m !== 'mode_changed');
- it('when edit button emits showForkMessage event it is re-emitted', () => {
- createComponent({
- addMergeRequestButtons: true,
- });
- wrapper.find(EditButton).vm.$emit('showForkMessage');
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted().showForkMessage).toBeDefined();
- });
- });
-
it('for mode_changed file mode displays mode changes', () => {
createComponent({
diffFile: {
@@ -271,16 +251,16 @@ describe('DiffFileHeader component', () => {
});
it('should not render edit button', () => {
createComponent({ addMergeRequestButtons: false });
- expect(wrapper.find(EditButton).exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
});
});
describe('when addMergeRequestButtons is true', () => {
describe('without discussions', () => {
- it('renders a disabled toggle discussions button', () => {
+ it('does not render a toggle discussions button', () => {
diffHasDiscussionsResultMock.mockReturnValue(false);
createComponent({ addMergeRequestButtons: true });
- expect(findToggleDiscussionsButton().attributes('disabled')).toBe('true');
+ expect(findToggleDiscussionsButton().exists()).toBe(false);
});
});
@@ -288,7 +268,7 @@ describe('DiffFileHeader component', () => {
it('dispatches toggleFileDiscussionWrappers when user clicks on toggle discussions button', () => {
diffHasDiscussionsResultMock.mockReturnValue(true);
createComponent({ addMergeRequestButtons: true });
- expect(findToggleDiscussionsButton().attributes('disabled')).toBeFalsy();
+ expect(findToggleDiscussionsButton().exists()).toBe(true);
findToggleDiscussionsButton().vm.$emit('click');
expect(
mockStoreConfig.modules.diffs.actions.toggleFileDiscussionWrappers,
@@ -300,7 +280,7 @@ describe('DiffFileHeader component', () => {
createComponent({
addMergeRequestButtons: true,
});
- expect(wrapper.find(EditButton).exists()).toBe(true);
+ expect(findEditButton().exists()).toBe(true);
});
describe('view on environment button', () => {
@@ -334,7 +314,7 @@ describe('DiffFileHeader component', () => {
});
it('should not render edit button', () => {
- expect(wrapper.find(EditButton).exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
});
});
describe('with file blob', () => {
@@ -345,7 +325,7 @@ describe('DiffFileHeader component', () => {
addMergeRequestButtons: true,
});
expect(findViewFileButton().attributes('href')).toBe(viewPath);
- expect(findViewFileButton().attributes('title')).toEqual(
+ expect(findViewFileButton().text()).toEqual(
`View file @ ${diffFile.content_sha.substr(0, 8)}`,
);
});
@@ -375,21 +355,6 @@ describe('DiffFileHeader component', () => {
addMergeRequestButtons: true,
};
- it.each`
- iconName | isShowingFullFile
- ${'doc-expand'} | ${false}
- ${'doc-changes'} | ${true}
- `(
- 'shows $iconName when isShowingFullFile set to $isShowingFullFile',
- ({ iconName, isShowingFullFile }) => {
- createComponent({
- ...fullyNotExpandedFileProps,
- diffFile: { ...fullyNotExpandedFileProps.diffFile, isShowingFullFile },
- });
- expect(findIconByName(iconName).exists()).toBe(true);
- },
- );
-
it('renders expand to full file button if not showing full file already', () => {
createComponent(fullyNotExpandedFileProps);
expect(findExpandButton().exists()).toBe(true);
@@ -455,7 +420,7 @@ describe('DiffFileHeader component', () => {
it('does not show edit button', () => {
createComponent({ diffFile: { ...diffFile, deleted_file: true } });
- expect(wrapper.find(EditButton).exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
});
});
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 4f1376e2c73..a6f0d2bf11d 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -37,7 +37,7 @@ describe('DiffFile', () => {
expect(el.querySelectorAll('.diff-content.hidden').length).toEqual(0);
expect(el.querySelector('.js-file-title')).toBeDefined();
- expect(el.querySelector('.btn-clipboard')).toBeDefined();
+ expect(el.querySelector('[data-testid="diff-file-copy-clipboard"]')).toBeDefined();
expect(el.querySelector('.file-title-name').innerText.indexOf(file_path)).toBeGreaterThan(-1);
expect(el.querySelector('.js-syntax-highlight')).toBeDefined();
@@ -47,7 +47,7 @@ describe('DiffFile', () => {
.then(() => {
expect(el.querySelectorAll('.line_content').length).toBe(8);
expect(el.querySelectorAll('.js-line-expansion-content').length).toBe(1);
- triggerEvent('.btn-clipboard');
+ triggerEvent('[data-testid="diff-file-copy-clipboard"]');
})
.then(done)
.catch(done.fail);
@@ -56,11 +56,11 @@ describe('DiffFile', () => {
it('should track a click event on copy to clip board button', done => {
const el = vm.$el;
- expect(el.querySelector('.btn-clipboard')).toBeDefined();
+ expect(el.querySelector('[data-testid="diff-file-copy-clipboard"]')).toBeDefined();
vm.file.renderIt = true;
vm.$nextTick()
.then(() => {
- triggerEvent('.btn-clipboard');
+ triggerEvent('[data-testid="diff-file-copy-clipboard"]');
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_copy_file_button', {
label: 'diff_copy_file_path_button',
@@ -181,7 +181,7 @@ describe('DiffFile', () => {
});
it('updates local state when changing file state', done => {
- vm.file.viewer.collapsed = true;
+ vm.file.viewer.automaticallyCollapsed = true;
vm.$nextTick(() => {
expect(vm.isCollapsed).toBe(true);
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
new file mode 100644
index 00000000000..394b6cb1914
--- /dev/null
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -0,0 +1,203 @@
+import * as utils from '~/diffs/components/diff_row_utils';
+import {
+ MATCH_LINE_TYPE,
+ CONTEXT_LINE_TYPE,
+ OLD_NO_NEW_LINE_TYPE,
+ NEW_NO_NEW_LINE_TYPE,
+ EMPTY_CELL_TYPE,
+} from '~/diffs/constants';
+
+const LINE_CODE = 'abc123';
+
+describe('isHighlighted', () => {
+ it('should return true if line is highlighted', () => {
+ const state = { diffs: { highlightedRow: LINE_CODE } };
+ const line = { line_code: LINE_CODE };
+ const isCommented = false;
+ expect(utils.isHighlighted(state, line, isCommented)).toBe(true);
+ });
+
+ it('should return false if line is not highlighted', () => {
+ const state = { diffs: { highlightedRow: 'xxx' } };
+ const line = { line_code: LINE_CODE };
+ const isCommented = false;
+ expect(utils.isHighlighted(state, line, isCommented)).toBe(false);
+ });
+
+ it('should return true if isCommented is true', () => {
+ const state = { diffs: { highlightedRow: 'xxx' } };
+ const line = { line_code: LINE_CODE };
+ const isCommented = true;
+ expect(utils.isHighlighted(state, line, isCommented)).toBe(true);
+ });
+});
+
+describe('isContextLine', () => {
+ it('return true if line type is context', () => {
+ expect(utils.isContextLine(CONTEXT_LINE_TYPE)).toBe(true);
+ });
+
+ it('return false if line type is not context', () => {
+ expect(utils.isContextLine('xxx')).toBe(false);
+ });
+});
+
+describe('isMatchLine', () => {
+ it('return true if line type is match', () => {
+ expect(utils.isMatchLine(MATCH_LINE_TYPE)).toBe(true);
+ });
+
+ it('return false if line type is not match', () => {
+ expect(utils.isMatchLine('xxx')).toBe(false);
+ });
+});
+
+describe('isMetaLine', () => {
+ it.each`
+ type | expectation
+ ${OLD_NO_NEW_LINE_TYPE} | ${true}
+ ${NEW_NO_NEW_LINE_TYPE} | ${true}
+ ${EMPTY_CELL_TYPE} | ${true}
+ ${'xxx'} | ${false}
+ `('should return $expectation if type is $type', ({ type, expectation }) => {
+ expect(utils.isMetaLine(type)).toBe(expectation);
+ });
+});
+
+describe('shouldRenderCommentButton', () => {
+ it('should return false if comment button is not rendered', () => {
+ expect(utils.shouldRenderCommentButton(true, false)).toBe(false);
+ });
+
+ it('should return false if not logged in', () => {
+ expect(utils.shouldRenderCommentButton(false, true)).toBe(false);
+ });
+
+ it('should return true logged in and rendered', () => {
+ expect(utils.shouldRenderCommentButton(true, true)).toBe(true);
+ });
+});
+
+describe('hasDiscussions', () => {
+ it('should return false if line is undefined', () => {
+ expect(utils.hasDiscussions()).toBe(false);
+ });
+
+ it('should return false if discussions is undefined', () => {
+ expect(utils.hasDiscussions({})).toBe(false);
+ });
+
+ it('should return false if discussions has legnth of 0', () => {
+ expect(utils.hasDiscussions({ discussions: [] })).toBe(false);
+ });
+
+ it('should return true if discussions has legnth > 0', () => {
+ expect(utils.hasDiscussions({ discussions: [1] })).toBe(true);
+ });
+});
+
+describe('lineHref', () => {
+ it(`should return #${LINE_CODE}`, () => {
+ expect(utils.lineHref({ line_code: LINE_CODE })).toEqual(`#${LINE_CODE}`);
+ });
+
+ it(`should return '#' if line is undefined`, () => {
+ expect(utils.lineHref()).toEqual('#');
+ });
+
+ it(`should return '#' if line_code is undefined`, () => {
+ expect(utils.lineHref({})).toEqual('#');
+ });
+});
+
+describe('lineCode', () => {
+ it(`should return undefined if line_code is undefined`, () => {
+ expect(utils.lineCode()).toEqual(undefined);
+ expect(utils.lineCode({ left: {} })).toEqual(undefined);
+ expect(utils.lineCode({ right: {} })).toEqual(undefined);
+ });
+
+ it(`should return ${LINE_CODE}`, () => {
+ expect(utils.lineCode({ line_code: LINE_CODE })).toEqual(LINE_CODE);
+ expect(utils.lineCode({ left: { line_code: LINE_CODE } })).toEqual(LINE_CODE);
+ expect(utils.lineCode({ right: { line_code: LINE_CODE } })).toEqual(LINE_CODE);
+ });
+});
+
+describe('classNameMapCell', () => {
+ it.each`
+ line | hll | loggedIn | hovered | expectation
+ ${undefined} | ${true} | ${true} | ${true} | ${[]}
+ ${{ type: 'new' }} | ${false} | ${false} | ${false} | ${['new', { hll: false, 'is-over': false }]}
+ ${{ type: 'new' }} | ${true} | ${true} | ${false} | ${['new', { hll: true, 'is-over': false }]}
+ ${{ type: 'new' }} | ${true} | ${false} | ${true} | ${['new', { hll: true, 'is-over': false }]}
+ ${{ type: 'new' }} | ${true} | ${true} | ${true} | ${['new', { hll: true, 'is-over': true }]}
+ `('should return $expectation', ({ line, hll, loggedIn, hovered, expectation }) => {
+ const classes = utils.classNameMapCell(line, hll, loggedIn, hovered);
+ expect(classes).toEqual(expectation);
+ });
+});
+
+describe('addCommentTooltip', () => {
+ const brokenSymLinkTooltip =
+ 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
+ const brokenRealTooltip =
+ 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
+ it('should return default tooltip', () => {
+ expect(utils.addCommentTooltip()).toBeUndefined();
+ });
+
+ it('should return broken symlink tooltip', () => {
+ expect(utils.addCommentTooltip({ commentsDisabled: { wasSymbolic: true } })).toEqual(
+ brokenSymLinkTooltip,
+ );
+ expect(utils.addCommentTooltip({ commentsDisabled: { isSymbolic: true } })).toEqual(
+ brokenSymLinkTooltip,
+ );
+ });
+
+ it('should return broken real tooltip', () => {
+ expect(utils.addCommentTooltip({ commentsDisabled: { wasReal: true } })).toEqual(
+ brokenRealTooltip,
+ );
+ expect(utils.addCommentTooltip({ commentsDisabled: { isReal: true } })).toEqual(
+ brokenRealTooltip,
+ );
+ });
+});
+
+describe('parallelViewLeftLineType', () => {
+ it(`should return ${OLD_NO_NEW_LINE_TYPE}`, () => {
+ expect(utils.parallelViewLeftLineType({ right: { type: NEW_NO_NEW_LINE_TYPE } })).toEqual(
+ OLD_NO_NEW_LINE_TYPE,
+ );
+ });
+
+ it(`should return 'new'`, () => {
+ expect(utils.parallelViewLeftLineType({ left: { type: 'new' } })).toContain('new');
+ });
+
+ it(`should return ${EMPTY_CELL_TYPE}`, () => {
+ expect(utils.parallelViewLeftLineType({})).toContain(EMPTY_CELL_TYPE);
+ });
+
+ it(`should return hll:true`, () => {
+ expect(utils.parallelViewLeftLineType({}, true)[1]).toEqual({ hll: true });
+ });
+});
+
+describe('shouldShowCommentButton', () => {
+ it.each`
+ hover | context | meta | discussions | expectation
+ ${true} | ${false} | ${false} | ${false} | ${true}
+ ${false} | ${false} | ${false} | ${false} | ${false}
+ ${true} | ${true} | ${false} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${true} | ${false}
+ `(
+ 'should return $expectation when hover is $hover',
+ ({ hover, context, meta, discussions, expectation }) => {
+ expect(utils.shouldShowCommentButton(hover, context, meta, discussions)).toBe(expectation);
+ },
+ );
+});
diff --git a/spec/frontend/diffs/components/diff_table_cell_spec.js b/spec/frontend/diffs/components/diff_table_cell_spec.js
deleted file mode 100644
index 02f5c27eecb..00000000000
--- a/spec/frontend/diffs/components/diff_table_cell_spec.js
+++ /dev/null
@@ -1,279 +0,0 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { TEST_HOST } from 'helpers/test_constants';
-import DiffTableCell from '~/diffs/components/diff_table_cell.vue';
-import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
-import { LINE_POSITION_RIGHT } from '~/diffs/constants';
-import { createStore } from '~/mr_notes/stores';
-import discussionsMockData from '../mock_data/diff_discussions';
-import diffFileMockData from '../mock_data/diff_file';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-const TEST_USER_ID = 'abc123';
-const TEST_USER = { id: TEST_USER_ID };
-const TEST_LINE_NUMBER = 1;
-const TEST_LINE_CODE = 'LC_42';
-const TEST_FILE_HASH = diffFileMockData.file_hash;
-
-describe('DiffTableCell', () => {
- const symlinkishFileTooltip =
- 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
- const realishFileTooltip =
- 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
- const otherFileTooltip = 'Add a comment to this line';
-
- let wrapper;
- let line;
- let store;
-
- beforeEach(() => {
- store = createStore();
- store.state.notes.userData = TEST_USER;
-
- line = {
- line_code: TEST_LINE_CODE,
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [{ ...discussionsMockData }],
- discussionsExpanded: true,
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- };
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const setWindowLocation = value => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value,
- });
- };
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(DiffTableCell, {
- localVue,
- store,
- propsData: {
- line,
- fileHash: TEST_FILE_HASH,
- contextLinesPath: '/context/lines/path',
- isHighlighted: false,
- ...props,
- },
- });
- };
-
- const findTd = () => wrapper.find({ ref: 'td' });
- const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButton' });
- const findLineNumber = () => wrapper.find({ ref: 'lineNumberRef' });
- const findTooltip = () => wrapper.find({ ref: 'addNoteTooltip' });
- const findAvatars = () => wrapper.find(DiffGutterAvatars);
-
- describe('td', () => {
- it('highlights when isHighlighted true', () => {
- createComponent({ isHighlighted: true });
-
- expect(findTd().classes()).toContain('hll');
- });
-
- it('does not highlight when isHighlighted false', () => {
- createComponent({ isHighlighted: false });
-
- expect(findTd().classes()).not.toContain('hll');
- });
- });
-
- describe('comment button', () => {
- it.each`
- showCommentButton | userData | query | mergeRefHeadComments | expectation
- ${true} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${true}
- ${true} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${true}
- ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false} | ${false}
- ${false} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${false}
- ${false} | ${TEST_USER} | ${'bogus'} | ${true} | ${false}
- ${true} | ${null} | ${''} | ${true} | ${false}
- `(
- 'exists is $expectation - with showCommentButton ($showCommentButton) userData ($userData) query ($query)',
- ({ showCommentButton, userData, query, mergeRefHeadComments, expectation }) => {
- store.state.notes.userData = userData;
- gon.features = { mergeRefHeadComments };
- setWindowLocation({ href: `${TEST_HOST}?${query}` });
- createComponent({ showCommentButton });
-
- wrapper.setData({ isCommentButtonRendered: showCommentButton });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().exists()).toBe(expectation);
- });
- },
- );
-
- it.each`
- isHover | otherProps | discussions | expectation
- ${true} | ${{}} | ${[]} | ${true}
- ${false} | ${{}} | ${[]} | ${false}
- ${true} | ${{ line: { ...line, type: 'context' } }} | ${[]} | ${false}
- ${true} | ${{ line: { ...line, type: 'old-nonewline' } }} | ${[]} | ${false}
- ${true} | ${{}} | ${[{}]} | ${false}
- `(
- 'visible is $expectation - with isHover ($isHover), discussions ($discussions), otherProps ($otherProps)',
- ({ isHover, otherProps, discussions, expectation }) => {
- line.discussions = discussions;
- createComponent({
- showCommentButton: true,
- isHover,
- ...otherProps,
- });
-
- wrapper.setData({
- isCommentButtonRendered: true,
- });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().isVisible()).toBe(expectation);
- });
- },
- );
-
- it.each`
- disabled | commentsDisabled
- ${'disabled'} | ${true}
- ${undefined} | ${false}
- `(
- 'has attribute disabled=$disabled when the outer component has prop commentsDisabled=$commentsDisabled',
- ({ disabled, commentsDisabled }) => {
- line.commentsDisabled = commentsDisabled;
-
- createComponent({
- showCommentButton: true,
- isHover: true,
- });
-
- wrapper.setData({ isCommentButtonRendered: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().attributes('disabled')).toBe(disabled);
- });
- },
- );
-
- it.each`
- tooltip | commentsDisabled
- ${symlinkishFileTooltip} | ${{ wasSymbolic: true }}
- ${symlinkishFileTooltip} | ${{ isSymbolic: true }}
- ${realishFileTooltip} | ${{ wasReal: true }}
- ${realishFileTooltip} | ${{ isReal: true }}
- ${otherFileTooltip} | ${false}
- `(
- 'has the correct tooltip when commentsDisabled=$commentsDisabled',
- ({ tooltip, commentsDisabled }) => {
- line.commentsDisabled = commentsDisabled;
-
- createComponent({
- showCommentButton: true,
- isHover: true,
- });
-
- wrapper.setData({ isCommentButtonRendered: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findTooltip().attributes('title')).toBe(tooltip);
- });
- },
- );
- });
-
- describe('line number', () => {
- describe('without lineNumber prop', () => {
- it('does not render', () => {
- createComponent({ lineType: 'old' });
-
- expect(findLineNumber().exists()).toBe(false);
- });
- });
-
- describe('with lineNumber prop', () => {
- describe.each`
- lineProps | expectedHref | expectedClickArg
- ${{ line_code: TEST_LINE_CODE }} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE}
- ${{ line_code: undefined }} | ${'#'} | ${undefined}
- ${{ line_code: undefined, left: { line_code: TEST_LINE_CODE } }} | ${'#'} | ${TEST_LINE_CODE}
- ${{ line_code: undefined, right: { line_code: TEST_LINE_CODE } }} | ${'#'} | ${TEST_LINE_CODE}
- `('with line ($lineProps)', ({ lineProps, expectedHref, expectedClickArg }) => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
- Object.assign(line, lineProps);
- createComponent({ lineNumber: TEST_LINE_NUMBER });
- });
-
- it('renders', () => {
- expect(findLineNumber().exists()).toBe(true);
- expect(findLineNumber().attributes()).toEqual({
- href: expectedHref,
- 'data-linenumber': TEST_LINE_NUMBER.toString(),
- });
- });
-
- it('on click, dispatches setHighlightedRow', () => {
- expect(store.dispatch).not.toHaveBeenCalled();
-
- findLineNumber().trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/setHighlightedRow', expectedClickArg);
- });
- });
- });
- });
-
- describe('diff-gutter-avatars', () => {
- describe('with showCommentButton', () => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
-
- createComponent({ showCommentButton: true });
- });
-
- it('renders', () => {
- expect(findAvatars().props()).toEqual({
- discussions: line.discussions,
- discussionsExpanded: line.discussionsExpanded,
- });
- });
-
- it('toggles line discussion', () => {
- expect(store.dispatch).not.toHaveBeenCalled();
-
- findAvatars().vm.$emit('toggleLineDiscussions');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/toggleLineDiscussions', {
- lineCode: TEST_LINE_CODE,
- fileHash: TEST_FILE_HASH,
- expanded: !line.discussionsExpanded,
- });
- });
- });
-
- it.each`
- props | lineProps | expectation
- ${{ showCommentButton: true }} | ${{}} | ${true}
- ${{ showCommentButton: false }} | ${{}} | ${false}
- ${{ showCommentButton: true, linePosition: LINE_POSITION_RIGHT }} | ${{ type: null }} | ${false}
- ${{ showCommentButton: true }} | ${{ discussions: [] }} | ${false}
- `(
- 'exists is $expectation - with props ($props), line ($lineProps)',
- ({ props, lineProps, expectation }) => {
- Object.assign(line, lineProps);
- createComponent(props);
-
- expect(findAvatars().exists()).toBe(expectation);
- },
- );
- });
-});
diff --git a/spec/frontend/diffs/components/edit_button_spec.js b/spec/frontend/diffs/components/edit_button_spec.js
deleted file mode 100644
index 71512c1c4af..00000000000
--- a/spec/frontend/diffs/components/edit_button_spec.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
-import EditButton from '~/diffs/components/edit_button.vue';
-
-const editPath = 'test-path';
-
-describe('EditButton', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(EditButton, {
- propsData: { ...props },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('has correct href attribute', () => {
- createComponent({
- editPath,
- canCurrentUserFork: false,
- });
-
- expect(wrapper.find(GlDeprecatedButton).attributes('href')).toBe(editPath);
- });
-
- it('emits a show fork message event if current user can fork', () => {
- createComponent({
- editPath,
- canCurrentUserFork: true,
- });
- wrapper.find(GlDeprecatedButton).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('showForkMessage')).toBeTruthy();
- });
- });
-
- it('doesnt emit a show fork message event if current user cannot fork', () => {
- createComponent({
- editPath,
- canCurrentUserFork: false,
- });
- wrapper.find(GlDeprecatedButton).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('showForkMessage')).toBeFalsy();
- });
- });
-
- it('doesnt emit a show fork message event if current user can modify blob', () => {
- createComponent({
- editPath,
- canCurrentUserFork: true,
- canModifyBlob: true,
- });
- wrapper.find(GlDeprecatedButton).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('showForkMessage')).toBeFalsy();
- });
- });
-
- it('disables button if editPath is empty', () => {
- createComponent({
- editPath: '',
- canCurrentUserFork: true,
- canModifyBlob: true,
- });
-
- expect(wrapper.find(GlDeprecatedButton).attributes('disabled')).toBe('true');
- });
-});
diff --git a/spec/frontend/diffs/components/inline_diff_table_row_spec.js b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
index 951b3f6258b..c65a39b9083 100644
--- a/spec/frontend/diffs/components/inline_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import { TEST_HOST } from 'helpers/test_constants';
import { createStore } from '~/mr_notes/stores';
import InlineDiffTableRow from '~/diffs/components/inline_diff_table_row.vue';
import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
@@ -28,13 +27,6 @@ describe('InlineDiffTableRow', () => {
});
};
- const setWindowLocation = value => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value,
- });
- };
-
beforeEach(() => {
store = createStore();
store.state.notes.userData = TEST_USER;
@@ -122,22 +114,15 @@ describe('InlineDiffTableRow', () => {
const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButton' });
it.each`
- userData | query | mergeRefHeadComments | expectation
- ${TEST_USER} | ${'diff_head=false'} | ${false} | ${true}
- ${TEST_USER} | ${'diff_head=true'} | ${true} | ${true}
- ${TEST_USER} | ${'diff_head=true'} | ${false} | ${false}
- ${null} | ${''} | ${true} | ${false}
- `(
- 'exists is $expectation - with userData ($userData) query ($query)',
- ({ userData, query, mergeRefHeadComments, expectation }) => {
- store.state.notes.userData = userData;
- gon.features = { mergeRefHeadComments };
- setWindowLocation({ href: `${TEST_HOST}?${query}` });
- createComponent({}, store);
-
- expect(findNoteButton().exists()).toBe(expectation);
- },
- );
+ userData | expectation
+ ${TEST_USER} | ${true}
+ ${null} | ${false}
+ `('exists is $expectation - with userData ($userData)', ({ userData, expectation }) => {
+ store.state.notes.userData = userData;
+ createComponent({}, store);
+
+ expect(findNoteButton().exists()).toBe(expectation);
+ });
it.each`
isHover | line | expectation
diff --git a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
index 13c4ce06f18..13031bd8b66 100644
--- a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
@@ -1,7 +1,6 @@
import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import { TEST_HOST } from 'helpers/test_constants';
import { createStore } from '~/mr_notes/stores';
import ParallelDiffTableRow from '~/diffs/components/parallel_diff_table_row.vue';
import diffFileMockData from '../mock_data/diff_file';
@@ -186,13 +185,6 @@ describe('ParallelDiffTableRow', () => {
});
};
- const setWindowLocation = value => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value,
- });
- };
-
beforeEach(() => {
// eslint-disable-next-line prefer-destructuring
thisLine = diffFileMockData.parallel_diff_lines[2];
@@ -228,19 +220,15 @@ describe('ParallelDiffTableRow', () => {
const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButtonLeft' });
it.each`
- hover | line | userData | query | mergeRefHeadComments | expectation
- ${true} | ${{}} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${true}
- ${true} | ${{ line: { left: null } }} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${false}
- ${true} | ${{}} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${true}
- ${true} | ${{}} | ${TEST_USER} | ${'diff_head=true'} | ${false} | ${false}
- ${true} | ${{}} | ${null} | ${''} | ${true} | ${false}
- ${false} | ${{}} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${false}
+ hover | line | userData | expectation
+ ${true} | ${{}} | ${TEST_USER} | ${true}
+ ${true} | ${{ line: { left: null } }} | ${TEST_USER} | ${false}
+ ${true} | ${{}} | ${null} | ${false}
+ ${false} | ${{}} | ${TEST_USER} | ${false}
`(
- 'exists is $expectation - with userData ($userData) query ($query)',
- async ({ hover, line, userData, query, mergeRefHeadComments, expectation }) => {
+ 'exists is $expectation - with userData ($userData)',
+ async ({ hover, line, userData, expectation }) => {
store.state.notes.userData = userData;
- gon.features = { mergeRefHeadComments };
- setWindowLocation({ href: `${TEST_HOST}?${query}` });
createComponent(line, store);
if (hover) await wrapper.find('.line_holder').trigger('mouseover');
diff --git a/spec/frontend/diffs/mock_data/diff_discussions.js b/spec/frontend/diffs/mock_data/diff_discussions.js
index 711ab543411..eff949bfb0d 100644
--- a/spec/frontend/diffs/mock_data/diff_discussions.js
+++ b/spec/frontend/diffs/mock_data/diff_discussions.js
@@ -260,11 +260,10 @@ export default {
name: 'CHANGELOG',
mode: '100644',
readable_text: true,
- icon: 'file-text-o',
+ icon: 'doc-text',
},
blob_path: 'CHANGELOG',
blob_name: 'CHANGELOG',
- blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>',
file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a',
file_path: 'CHANGELOG.rb',
new_file: false,
diff --git a/spec/frontend/diffs/mock_data/diff_file.js b/spec/frontend/diffs/mock_data/diff_file.js
index c2a4424ee95..d3886819a91 100644
--- a/spec/frontend/diffs/mock_data/diff_file.js
+++ b/spec/frontend/diffs/mock_data/diff_file.js
@@ -7,11 +7,10 @@ export default {
name: 'CHANGELOG',
mode: '100644',
readable_text: true,
- icon: 'file-text-o',
+ icon: 'doc-text',
},
blob_path: 'CHANGELOG',
blob_name: 'CHANGELOG',
- blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>',
file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a',
file_identifier_hash: '928f8286952bda02d674b692addcbe077084663a',
file_path: 'CHANGELOG',
@@ -27,7 +26,7 @@ export default {
viewer: {
name: 'text',
error: null,
- collapsed: false,
+ automaticallyCollapsed: false,
},
added_lines: 2,
removed_lines: 0,
diff --git a/spec/frontend/diffs/mock_data/diff_file_unreadable.js b/spec/frontend/diffs/mock_data/diff_file_unreadable.js
index 8c2df45988e..f6cdca9950a 100644
--- a/spec/frontend/diffs/mock_data/diff_file_unreadable.js
+++ b/spec/frontend/diffs/mock_data/diff_file_unreadable.js
@@ -7,11 +7,10 @@ export default {
name: 'CHANGELOG',
mode: '100644',
readable_text: false,
- icon: 'file-text-o',
+ icon: 'doc-text',
},
blob_path: 'CHANGELOG',
blob_name: 'CHANGELOG',
- blob_icon: '<i aria-hidden="true" data-hidden="true" class="fa fa-file-text-o fa-fw"></i>',
file_hash: '1c497fbb3a46b78edf04cc2a2fa33f67e3ffbe2a',
file_path: 'CHANGELOG',
new_file: false,
@@ -26,7 +25,7 @@ export default {
viewer: {
name: 'text',
error: null,
- collapsed: false,
+ automaticallyCollapsed: false,
},
added_lines: 0,
removed_lines: 0,
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 4f647b0cd41..c3e4ee9c531 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -483,14 +483,14 @@ describe('DiffsStoreActions', () => {
id: 1,
renderIt: false,
viewer: {
- collapsed: false,
+ automaticallyCollapsed: false,
},
},
{
id: 2,
renderIt: false,
viewer: {
- collapsed: false,
+ automaticallyCollapsed: false,
},
},
],
@@ -967,7 +967,7 @@ describe('DiffsStoreActions', () => {
{
file_hash: 'HASH',
viewer: {
- collapsed,
+ automaticallyCollapsed: collapsed,
},
renderIt,
},
@@ -1167,7 +1167,7 @@ describe('DiffsStoreActions', () => {
file_hash: 'testhash',
alternate_viewer: { name: updatedViewerName },
};
- const updatedViewer = { name: updatedViewerName, collapsed: false };
+ const updatedViewer = { name: updatedViewerName, automaticallyCollapsed: false };
const testData = [{ rich_text: 'test' }, { rich_text: 'file2' }];
let renamedFile;
let mock;
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index dac5be2d656..0083f1d8b44 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -51,13 +51,19 @@ describe('Diffs Module Getters', () => {
describe('hasCollapsedFile', () => {
it('returns true when all files are collapsed', () => {
- localState.diffFiles = [{ viewer: { collapsed: true } }, { viewer: { collapsed: true } }];
+ localState.diffFiles = [
+ { viewer: { automaticallyCollapsed: true } },
+ { viewer: { automaticallyCollapsed: true } },
+ ];
expect(getters.hasCollapsedFile(localState)).toEqual(true);
});
it('returns true when at least one file is collapsed', () => {
- localState.diffFiles = [{ viewer: { collapsed: false } }, { viewer: { collapsed: true } }];
+ localState.diffFiles = [
+ { viewer: { automaticallyCollapsed: false } },
+ { viewer: { automaticallyCollapsed: true } },
+ ];
expect(getters.hasCollapsedFile(localState)).toEqual(true);
});
@@ -139,50 +145,74 @@ describe('Diffs Module Getters', () => {
describe('diffHasExpandedDiscussions', () => {
it('returns true when one of the discussions is expanded', () => {
- discussionMock1.expanded = false;
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [discussionMock, discussionMock],
+ discussionsExpanded: true,
+ },
+ ],
+ };
- expect(
- getters.diffHasExpandedDiscussions(localState, {
- getDiffFileDiscussions: () => [discussionMock, discussionMock],
- })(diffFileMock),
- ).toEqual(true);
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(true);
});
it('returns false when there are no discussions', () => {
- expect(
- getters.diffHasExpandedDiscussions(localState, { getDiffFileDiscussions: () => [] })(
- diffFileMock,
- ),
- ).toEqual(false);
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [],
+ discussionsExpanded: true,
+ },
+ ],
+ };
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(false);
});
it('returns false when no discussion is expanded', () => {
- discussionMock.expanded = false;
- discussionMock1.expanded = false;
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [discussionMock, discussionMock],
+ discussionsExpanded: false,
+ },
+ ],
+ };
- expect(
- getters.diffHasExpandedDiscussions(localState, {
- getDiffFileDiscussions: () => [discussionMock, discussionMock1],
- })(diffFileMock),
- ).toEqual(false);
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(false);
});
});
describe('diffHasDiscussions', () => {
it('returns true when getDiffFileDiscussions returns discussions', () => {
- expect(
- getters.diffHasDiscussions(localState, {
- getDiffFileDiscussions: () => [discussionMock],
- })(diffFileMock),
- ).toEqual(true);
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [discussionMock, discussionMock],
+ discussionsExpanded: false,
+ },
+ ],
+ };
+
+ expect(getters.diffHasDiscussions(localState)(diffFile)).toEqual(true);
});
it('returns false when getDiffFileDiscussions returns no discussions', () => {
- expect(
- getters.diffHasDiscussions(localState, {
- getDiffFileDiscussions: () => [],
- })(diffFileMock),
- ).toEqual(false);
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [],
+ discussionsExpanded: false,
+ },
+ ],
+ };
+
+ expect(getters.diffHasDiscussions(localState)(diffFile)).toEqual(false);
});
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index e1d855ae0cf..a84ad63c695 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -130,14 +130,14 @@ describe('DiffsStoreMutations', () => {
it('should change the collapsed prop from diffFiles', () => {
const diffFile = {
viewer: {
- collapsed: true,
+ automaticallyCollapsed: true,
},
};
const state = { expandAllFiles: true, diffFiles: [diffFile] };
mutations[types.EXPAND_ALL_FILES](state);
- expect(state.diffFiles[0].viewer.collapsed).toEqual(false);
+ expect(state.diffFiles[0].viewer.automaticallyCollapsed).toEqual(false);
});
});
@@ -933,12 +933,12 @@ describe('DiffsStoreMutations', () => {
describe('SET_FILE_COLLAPSED', () => {
it('sets collapsed', () => {
const state = {
- diffFiles: [{ file_path: 'test', viewer: { collapsed: false } }],
+ diffFiles: [{ file_path: 'test', viewer: { automaticallyCollapsed: false } }],
};
mutations[types.SET_FILE_COLLAPSED](state, { filePath: 'test', collapsed: true });
- expect(state.diffFiles[0].viewer.collapsed).toBe(true);
+ expect(state.diffFiles[0].viewer.automaticallyCollapsed).toBe(true);
});
});
diff --git a/spec/frontend/editor/editor_lite_spec.js b/spec/frontend/editor/editor_lite_spec.js
index e566d3a4b38..bc17435c6d4 100644
--- a/spec/frontend/editor/editor_lite_spec.js
+++ b/spec/frontend/editor/editor_lite_spec.js
@@ -1,4 +1,5 @@
import { editor as monacoEditor, languages as monacoLanguages, Uri } from 'monaco-editor';
+import waitForPromises from 'helpers/wait_for_promises';
import Editor from '~/editor/editor_lite';
import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
import { EDITOR_LITE_INSTANCE_ERROR_NO_EL, URI_PREFIX } from '~/editor/constants';
@@ -253,55 +254,125 @@ describe('Base editor', () => {
const MyExt3 = {
foo: foo2,
};
- beforeEach(() => {
- instance = editor.createInstance({ el: editorEl, blobPath, blobContent });
- });
- it('is extensible with the extensions', () => {
- expect(instance.foo).toBeUndefined();
+ describe('basic functionality', () => {
+ beforeEach(() => {
+ instance = editor.createInstance({ el: editorEl, blobPath, blobContent });
+ });
- editor.use(MyExt1);
- expect(instance.foo).toEqual(foo1);
- });
+ it('is extensible with the extensions', () => {
+ expect(instance.foo).toBeUndefined();
- it('does not fail if no extensions supplied', () => {
- const spy = jest.spyOn(global.console, 'error');
- editor.use();
+ instance.use(MyExt1);
+ expect(instance.foo).toEqual(foo1);
+ });
- expect(spy).not.toHaveBeenCalled();
- });
+ it('does not fail if no extensions supplied', () => {
+ const spy = jest.spyOn(global.console, 'error');
+ instance.use();
- it('is extensible with multiple extensions', () => {
- expect(instance.foo).toBeUndefined();
- expect(instance.bar).toBeUndefined();
+ expect(spy).not.toHaveBeenCalled();
+ });
- editor.use([MyExt1, MyExt2]);
+ it('is extensible with multiple extensions', () => {
+ expect(instance.foo).toBeUndefined();
+ expect(instance.bar).toBeUndefined();
- expect(instance.foo).toEqual(foo1);
- expect(instance.bar).toEqual(bar);
- });
+ instance.use([MyExt1, MyExt2]);
- it('uses the last definition of a method in case of an overlap', () => {
- editor.use([MyExt1, MyExt2, MyExt3]);
- expect(instance).toEqual(
- expect.objectContaining({
- foo: foo2,
- bar,
- }),
- );
+ expect(instance.foo).toEqual(foo1);
+ expect(instance.bar).toEqual(bar);
+ });
+
+ it('uses the last definition of a method in case of an overlap', () => {
+ instance.use([MyExt1, MyExt2, MyExt3]);
+ expect(instance).toEqual(
+ expect.objectContaining({
+ foo: foo2,
+ bar,
+ }),
+ );
+ });
+
+ it('correctly resolves references withing extensions', () => {
+ const FunctionExt = {
+ inst() {
+ return this;
+ },
+ mod() {
+ return this.getModel();
+ },
+ };
+ instance.use(FunctionExt);
+ expect(instance.inst()).toEqual(editor.instances[0]);
+ });
});
- it('correctly resolves references withing extensions', () => {
- const FunctionExt = {
- inst() {
- return this;
- },
- mod() {
- return this.getModel();
- },
+ describe('extensions as an instance parameter', () => {
+ let editorExtensionSpy;
+ const instanceConstructor = (extensions = []) => {
+ return editor.createInstance({
+ el: editorEl,
+ blobPath,
+ blobContent,
+ blobGlobalId,
+ extensions,
+ });
};
- editor.use(FunctionExt);
- expect(instance.inst()).toEqual(editor.instances[0]);
+
+ beforeEach(() => {
+ editorExtensionSpy = jest.spyOn(Editor, 'pushToImportsArray').mockImplementation(arr => {
+ arr.push(
+ Promise.resolve({
+ default: {},
+ }),
+ );
+ });
+ });
+
+ it.each([undefined, [], [''], ''])(
+ 'does not fail and makes no fetch if extensions is %s',
+ () => {
+ instance = instanceConstructor(null);
+ expect(editorExtensionSpy).not.toHaveBeenCalled();
+ },
+ );
+
+ it.each`
+ type | value | callsCount
+ ${'simple string'} | ${'foo'} | ${1}
+ ${'combined string'} | ${'foo, bar'} | ${2}
+ ${'array of strings'} | ${['foo', 'bar']} | ${2}
+ `('accepts $type as an extension parameter', ({ value, callsCount }) => {
+ instance = instanceConstructor(value);
+ expect(editorExtensionSpy).toHaveBeenCalled();
+ expect(editorExtensionSpy.mock.calls).toHaveLength(callsCount);
+ });
+
+ it.each`
+ desc | path | expectation
+ ${'~/editor'} | ${'foo'} | ${'~/editor/foo'}
+ ${'~/CUSTOM_PATH with leading slash'} | ${'/my_custom_path/bar'} | ${'~/my_custom_path/bar'}
+ ${'~/CUSTOM_PATH without leading slash'} | ${'my_custom_path/delta'} | ${'~/my_custom_path/delta'}
+ `('fetches extensions from $desc path', ({ path, expectation }) => {
+ instance = instanceConstructor(path);
+ expect(editorExtensionSpy).toHaveBeenCalledWith(expect.any(Array), expectation);
+ });
+
+ it('emits editor-ready event after all extensions were applied', async () => {
+ const calls = [];
+ const eventSpy = jest.fn().mockImplementation(() => {
+ calls.push('event');
+ });
+ const useSpy = jest.spyOn(editor, 'use').mockImplementation(() => {
+ calls.push('use');
+ });
+ editorEl.addEventListener('editor-ready', eventSpy);
+ instance = instanceConstructor('foo, bar');
+ await waitForPromises();
+ expect(useSpy.mock.calls).toHaveLength(2);
+ expect(calls).toEqual(['use', 'use', 'event']);
+ });
});
describe('multiple instances', () => {
diff --git a/spec/frontend/emoji/emoji_spec.js b/spec/frontend/emoji/emoji_spec.js
index 53c6d0835bc..f528313ef02 100644
--- a/spec/frontend/emoji/emoji_spec.js
+++ b/spec/frontend/emoji/emoji_spec.js
@@ -1,7 +1,6 @@
-import MockAdapter from 'axios-mock-adapter';
import { trimText } from 'helpers/text_helper';
-import axios from '~/lib/utils/axios_utils';
-import { initEmojiMap, glEmojiTag, EMOJI_VERSION } from '~/emoji';
+import { emojiFixtureMap, initEmojiMock, describeEmojiFields } from 'helpers/emoji';
+import { glEmojiTag, searchEmoji, getEmoji } from '~/emoji';
import isEmojiUnicodeSupported, {
isFlagEmoji,
isRainbowFlagEmoji,
@@ -30,37 +29,11 @@ const emptySupportMap = {
1.1: false,
};
-const emojiFixtureMap = {
- bomb: {
- name: 'bomb',
- moji: '💣',
- unicodeVersion: '6.0',
- },
- construction_worker_tone5: {
- name: 'construction_worker_tone5',
- moji: '👷🏿',
- unicodeVersion: '8.0',
- },
- five: {
- name: 'five',
- moji: '5️⃣',
- unicodeVersion: '3.0',
- },
- grey_question: {
- name: 'grey_question',
- moji: '❔',
- unicodeVersion: '6.0',
- },
-};
-
describe('gl_emoji', () => {
let mock;
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200);
-
- return initEmojiMap().catch(() => {});
+ beforeEach(async () => {
+ mock = await initEmojiMock();
});
afterEach(() => {
@@ -378,4 +351,126 @@ describe('gl_emoji', () => {
expect(isSupported).toBeFalsy();
});
});
+
+ describe('getEmoji', () => {
+ const { grey_question } = emojiFixtureMap;
+
+ describe('when query is undefined', () => {
+ it('should return null by default', () => {
+ expect(getEmoji()).toBe(null);
+ });
+
+ it('should return fallback emoji when fallback is true', () => {
+ expect(getEmoji(undefined, true).name).toEqual(grey_question.name);
+ });
+ });
+ });
+
+ describe('searchEmoji', () => {
+ const { atom, grey_question } = emojiFixtureMap;
+ const search = (query, opts) => searchEmoji(query, opts).map(({ name }) => name);
+ const mangle = str => str.slice(0, 1) + str.slice(-1);
+ const partial = str => str.slice(0, 2);
+
+ describe('with default options', () => {
+ const subject = query => search(query);
+
+ describeEmojiFields('with $field', ({ accessor }) => {
+ it(`should match by lower case: ${accessor(atom)}`, () => {
+ expect(subject(accessor(atom))).toContain(atom.name);
+ });
+
+ it(`should match by upper case: ${accessor(atom).toUpperCase()}`, () => {
+ expect(subject(accessor(atom).toUpperCase())).toContain(atom.name);
+ });
+
+ it(`should not match by partial: ${mangle(accessor(atom))}`, () => {
+ expect(subject(mangle(accessor(atom)))).not.toContain(atom.name);
+ });
+ });
+
+ it(`should match by unicode value: ${atom.moji}`, () => {
+ expect(subject(atom.moji)).toContain(atom.name);
+ });
+
+ it('should not return a fallback value', () => {
+ expect(subject('foo bar baz')).toHaveLength(0);
+ });
+
+ it('should not return a fallback value when query is falsey', () => {
+ expect(subject()).toHaveLength(0);
+ });
+ });
+
+ describe('with fuzzy match', () => {
+ const subject = query => search(query, { match: 'fuzzy' });
+
+ describeEmojiFields('with $field', ({ accessor }) => {
+ it(`should match by lower case: ${accessor(atom)}`, () => {
+ expect(subject(accessor(atom))).toContain(atom.name);
+ });
+
+ it(`should match by upper case: ${accessor(atom).toUpperCase()}`, () => {
+ expect(subject(accessor(atom).toUpperCase())).toContain(atom.name);
+ });
+
+ it(`should match by partial: ${mangle(accessor(atom))}`, () => {
+ expect(subject(mangle(accessor(atom)))).toContain(atom.name);
+ });
+ });
+ });
+
+ describe('with contains match', () => {
+ const subject = query => search(query, { match: 'contains' });
+
+ describeEmojiFields('with $field', ({ accessor }) => {
+ it(`should match by lower case: ${accessor(atom)}`, () => {
+ expect(subject(accessor(atom))).toContain(atom.name);
+ });
+
+ it(`should match by upper case: ${accessor(atom).toUpperCase()}`, () => {
+ expect(subject(accessor(atom).toUpperCase())).toContain(atom.name);
+ });
+
+ it(`should match by partial: ${partial(accessor(atom))}`, () => {
+ expect(subject(partial(accessor(atom)))).toContain(atom.name);
+ });
+
+ it(`should not match by mangled: ${mangle(accessor(atom))}`, () => {
+ expect(subject(mangle(accessor(atom)))).not.toContain(atom.name);
+ });
+ });
+ });
+
+ describe('with fallback', () => {
+ const subject = query => search(query, { fallback: true });
+
+ it.each`
+ query
+ ${'foo bar baz'} | ${undefined}
+ `('should return a fallback value when given $query', ({ query }) => {
+ expect(subject(query)).toContain(grey_question.name);
+ });
+ });
+
+ describe('with name and alias fields', () => {
+ const subject = query => search(query, { fields: ['name', 'alias'] });
+
+ it(`should match by name: ${atom.name}`, () => {
+ expect(subject(atom.name)).toContain(atom.name);
+ });
+
+ it(`should match by alias: ${atom.aliases[0]}`, () => {
+ expect(subject(atom.aliases[0])).toContain(atom.name);
+ });
+
+ it(`should not match by description: ${atom.description}`, () => {
+ expect(subject(atom.description)).not.toContain(atom.name);
+ });
+
+ it(`should not match by unicode value: ${atom.moji}`, () => {
+ expect(subject(atom.moji)).not.toContain(atom.name);
+ });
+ });
+ });
});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 35ca323f5a9..733bf4378eb 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -1,8 +1,8 @@
-/* eslint-disable import/no-commonjs */
+/* eslint-disable import/no-commonjs, max-classes-per-file */
const path = require('path');
const { ErrorWithStack } = require('jest-util');
-const JSDOMEnvironment = require('jest-environment-jsdom-sixteen');
+const JSDOMEnvironment = require('jest-environment-jsdom');
const { TEST_HOST } = require('./helpers/test_constants');
const ROOT_PATH = path.resolve(__dirname, '../..');
@@ -58,6 +58,14 @@ class CustomEnvironment extends JSDOMEnvironment {
measure: () => null,
getEntriesByName: () => [],
});
+
+ this.global.PerformanceObserver = class {
+ /* eslint-disable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
+ constructor(callback) {}
+ disconnect() {}
+ observe(element, initObject) {}
+ /* eslint-enable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
+ };
}
async teardown() {
diff --git a/spec/frontend/environments/enable_review_app_button_spec.js b/spec/frontend/environments/enable_review_app_modal_spec.js
index 5549a1737fc..7ea49a6e1d0 100644
--- a/spec/frontend/environments/enable_review_app_button_spec.js
+++ b/spec/frontend/environments/enable_review_app_modal_spec.js
@@ -1,6 +1,6 @@
-import { shallowMount, mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
-import EnableReviewAppButton from '~/environments/components/enable_review_app_button.vue';
+import EnableReviewAppButton from '~/environments/components/enable_review_app_modal.vue';
describe('Enable Review App Button', () => {
let wrapper;
@@ -9,19 +9,13 @@ describe('Enable Review App Button', () => {
wrapper.destroy();
});
- describe('renders button with text', () => {
- beforeEach(() => {
- wrapper = mount(EnableReviewAppButton);
- });
-
- it('renders Enable Review text', () => {
- expect(wrapper.text()).toBe('Enable review app');
- });
- });
-
describe('renders the modal', () => {
beforeEach(() => {
- wrapper = shallowMount(EnableReviewAppButton);
+ wrapper = shallowMount(EnableReviewAppButton, {
+ propsData: {
+ modalId: 'fake-id',
+ },
+ });
});
it('renders the copyToClipboard button', () => {
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
index ebdc4923045..d305f5e90bd 100644
--- a/spec/frontend/environments/environment_actions_spec.js
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -1,14 +1,22 @@
import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import { GlLoadingIcon, GlIcon } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import eventHub from '~/environments/event_hub';
import EnvironmentActions from '~/environments/components/environment_actions.vue';
describe('EnvironmentActions Component', () => {
let vm;
+ const findEnvironmentActionsButton = () => vm.find('[data-testid="environment-actions-button"]');
+
beforeEach(() => {
- vm = shallowMount(EnvironmentActions, { propsData: { actions: [] } });
+ vm = shallowMount(EnvironmentActions, {
+ propsData: { actions: [] },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
});
afterEach(() => {
@@ -23,6 +31,11 @@ describe('EnvironmentActions Component', () => {
expect(vm.find('.dropdown-new').attributes('aria-label')).toEqual('Deploy to...');
});
+ it('should render a tooltip', () => {
+ const tooltip = getBinding(findEnvironmentActionsButton().element, 'gl-tooltip');
+ expect(tooltip).toBeDefined();
+ });
+
describe('is loading', () => {
beforeEach(() => {
vm.setData({ isLoading: true });
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index fe32bf918dd..bb114e31063 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -1,9 +1,11 @@
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import EnableReviewAppModal from '~/environments/components/enable_review_app_modal.vue';
import Container from '~/environments/components/container.vue';
import EmptyState from '~/environments/components/empty_state.vue';
import EnvironmentsApp from '~/environments/components/environments_app.vue';
+import axios from '~/lib/utils/axios_utils';
import { environment, folder } from './mock_data';
describe('Environment', () => {
@@ -34,12 +36,18 @@ describe('Environment', () => {
});
};
- const createWrapper = (shallow = false) => {
+ const createWrapper = (shallow = false, props = {}) => {
const fn = shallow ? shallowMount : mount;
- wrapper = fn(EnvironmentsApp, { propsData: mockData });
+ wrapper = extendedWrapper(fn(EnvironmentsApp, { propsData: { ...mockData, ...props } }));
return axios.waitForAll();
};
+ const findEnableReviewAppButton = () => wrapper.findByTestId('enable-review-app');
+ const findEnableReviewAppModal = () => wrapper.findAll(EnableReviewAppModal);
+ const findNewEnvironmentButton = () => wrapper.findByTestId('new-environment');
+ const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available > a');
+ const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped > a');
+
beforeEach(() => {
mock = new MockAdapter(axios);
});
@@ -59,19 +67,6 @@ describe('Environment', () => {
it('should render the empty state', () => {
expect(wrapper.find(EmptyState).exists()).toBe(true);
});
-
- describe('when it is possible to enable a review app', () => {
- beforeEach(() => {
- mockRequest(200, { environments: [], review_app: { can_setup_review_app: true } });
- return createWrapper();
- });
-
- it('should render the enable review app button', () => {
- expect(wrapper.find('.js-enable-review-app-button').text()).toContain(
- 'Enable review app',
- );
- });
- });
});
describe('with paginated environments', () => {
@@ -86,7 +81,7 @@ describe('Environment', () => {
return createWrapper();
});
- it('should render a conatiner table with environments', () => {
+ it('should render a container table with environments', () => {
const containerTable = wrapper.find(Container);
expect(containerTable.exists()).toBe(true);
@@ -108,9 +103,16 @@ describe('Environment', () => {
it('should make an API request when using tabs', () => {
jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
- wrapper.find('.js-environments-tab-stopped').trigger('click');
+ findEnvironmentsTabStopped().trigger('click');
expect(wrapper.vm.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
});
+
+ it('should not make the same API request when clicking on the current scope tab', () => {
+ // component starts at available
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ findEnvironmentsTabAvailable().trigger('click');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledTimes(0);
+ });
});
});
});
@@ -165,4 +167,65 @@ describe('Environment', () => {
expect(wrapper.find('.text-center > a.btn').text()).toContain('Show all');
});
});
+
+ describe('environment button', () => {
+ describe('when user can create environment', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [] });
+ return createWrapper(true);
+ });
+
+ it('should render', () => {
+ expect(findNewEnvironmentButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when user can not create environment', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [] });
+ return createWrapper(true, { ...mockData, canCreateEnvironment: false });
+ });
+
+ it('should not render', () => {
+ expect(findNewEnvironmentButton().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('review app modal', () => {
+ describe('when it is not possible to enable a review app', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [] });
+ return createWrapper(true);
+ });
+
+ it('should not render the enable review app button', () => {
+ expect(findEnableReviewAppButton().exists()).toBe(false);
+ });
+
+ it('should not render a review app modal', () => {
+ const modal = findEnableReviewAppModal();
+ expect(modal).toHaveLength(0);
+ expect(modal.exists()).toBe(false);
+ });
+ });
+
+ describe('when it is possible to enable a review app', () => {
+ beforeEach(() => {
+ mockRequest(200, { environments: [], review_app: { can_setup_review_app: true } });
+ return createWrapper(true);
+ });
+
+ it('should render the enable review app button', () => {
+ expect(findEnableReviewAppButton().exists()).toBe(true);
+ expect(findEnableReviewAppButton().text()).toContain('Enable review app');
+ });
+
+ it('should render only one review app modal', () => {
+ const modal = findEnableReviewAppModal();
+ expect(modal).toHaveLength(1);
+ expect(modal.at(0).exists()).toBe(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
index f33c8de0094..f55cb851dde 100644
--- a/spec/frontend/environments/folder/environments_folder_view_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -1,10 +1,10 @@
+import { GlPagination } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { removeBreakLine, removeWhitespace } from 'helpers/text_helper';
-import { GlPagination } from '@gitlab/ui';
-import axios from '~/lib/utils/axios_utils';
-import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
import EnvironmentTable from '~/environments/components/environments_table.vue';
+import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
+import axios from '~/lib/utils/axios_utils';
import { environmentsList } from '../mock_data';
describe('Environments Folder View', () => {
@@ -46,9 +46,10 @@ describe('Environments Folder View', () => {
wrapper = mount(EnvironmentsFolderViewComponent, { propsData: mockData });
};
- const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available');
+ const findEnvironmentsTabAvailable = () =>
+ wrapper.find('[data-testid="environments-tab-available"]');
- const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped');
+ const findEnvironmentsTabStopped = () => wrapper.find('[data-testid="environments-tab-stopped"]');
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -88,9 +89,9 @@ describe('Environments Folder View', () => {
});
it('should render parent folder name', () => {
- expect(removeBreakLine(removeWhitespace(wrapper.find('.js-folder-name').text()))).toContain(
- 'Environments / review',
- );
+ expect(
+ removeBreakLine(removeWhitespace(wrapper.find('[data-testid="folder-name"]').text())),
+ ).toContain('Environments / review');
});
describe('pagination', () => {
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index 21edcb7235a..f4a765a3d73 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -1,7 +1,6 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { GlFormInput } from '@gitlab/ui';
-import LoadingButton from '~/vue_shared/components/loading_button.vue';
+import { GlFormInput, GlButton } from '@gitlab/ui';
import ErrorTrackingForm from '~/error_tracking_settings/components/error_tracking_form.vue';
import createStore from '~/error_tracking_settings/store';
import { defaultProps } from '../mock';
@@ -43,7 +42,7 @@ describe('error tracking settings form', () => {
.attributes('id'),
).toBe('error-tracking-token');
- expect(wrapper.findAll(LoadingButton).exists()).toBe(true);
+ expect(wrapper.findAll(GlButton).exists()).toBe(true);
});
it('is rendered with labels and placeholders', () => {
@@ -72,9 +71,10 @@ describe('error tracking settings form', () => {
});
it('shows loading spinner', () => {
- const { label, loading } = wrapper.find(LoadingButton).props();
- expect(loading).toBe(true);
- expect(label).toBe('Connecting');
+ const buttonEl = wrapper.find(GlButton);
+
+ expect(buttonEl.props('loading')).toBe(true);
+ expect(buttonEl.text()).toBe('Connecting');
});
});
diff --git a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
new file mode 100644
index 00000000000..0e364c47f8d
--- /dev/null
+++ b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
@@ -0,0 +1,159 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal, GlSprintf } from '@gitlab/ui';
+import Component from '~/feature_flags/components/configure_feature_flags_modal.vue';
+import Callout from '~/vue_shared/components/callout.vue';
+
+describe('Configure Feature Flags Modal', () => {
+ const mockEvent = { preventDefault: jest.fn() };
+ const provide = {
+ projectName: 'fakeProjectName',
+ featureFlagsHelpPagePath: '/help/path',
+ featureFlagsClientLibrariesHelpPagePath: '/help/path/#flags',
+ featureFlagsClientExampleHelpPagePath: '/feature-flags#clientexample',
+ unleashApiUrl: '/api/url',
+ };
+
+ const propsData = {
+ instanceId: 'instance-id-token',
+ isRotating: false,
+ hasRotateError: false,
+ canUserRotateToken: true,
+ };
+
+ let wrapper;
+ const factory = (props = {}, { mountFn = shallowMount, ...options } = {}) => {
+ wrapper = mountFn(Component, {
+ provide,
+ stubs: { GlSprintf },
+ propsData: {
+ ...propsData,
+ ...props,
+ },
+ ...options,
+ });
+ };
+
+ const findGlModal = () => wrapper.find(GlModal);
+ const findPrimaryAction = () => findGlModal().props('actionPrimary');
+ const findProjectNameInput = () => wrapper.find('#project_name_verification');
+ const findDangerCallout = () =>
+ wrapper.findAll(Callout).filter(c => c.props('category') === 'danger');
+
+ describe('idle', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory);
+
+ it('should have Primary and Cancel actions', () => {
+ expect(findGlModal().props('actionCancel').text).toBe('Close');
+ expect(findPrimaryAction().text).toBe('Regenerate instance ID');
+ });
+
+ it('should default disable the primary action', async () => {
+ const [{ disabled }] = findPrimaryAction().attributes;
+ expect(disabled).toBe(true);
+ });
+
+ it('should emit a `token` event when clicking on the Primary action', async () => {
+ findGlModal().vm.$emit('primary', mockEvent);
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('token')).toEqual([[]]);
+ expect(mockEvent.preventDefault).toHaveBeenCalled();
+ });
+
+ it('should clear the project name input after generating the token', async () => {
+ findProjectNameInput().vm.$emit('input', provide.projectName);
+ findGlModal().vm.$emit('primary', mockEvent);
+ await wrapper.vm.$nextTick();
+ expect(findProjectNameInput().attributes('value')).toBe('');
+ });
+
+ it('should provide an input for filling the project name', () => {
+ expect(findProjectNameInput().exists()).toBe(true);
+ expect(findProjectNameInput().attributes('value')).toBe('');
+ });
+
+ it('should display an help text', () => {
+ const help = wrapper.find('p');
+ expect(help.text()).toMatch(/More Information/);
+ });
+
+ it('should have links to the documentation', () => {
+ expect(wrapper.find('[data-testid="help-link"]').attributes('href')).toBe(
+ provide.featureFlagsHelpPagePath,
+ );
+ expect(wrapper.find('[data-testid="help-client-link"]').attributes('href')).toBe(
+ provide.featureFlagsClientLibrariesHelpPagePath,
+ );
+ });
+
+ it('should display one and only one danger callout', () => {
+ const dangerCallout = findDangerCallout();
+ expect(dangerCallout.length).toBe(1);
+ expect(dangerCallout.at(0).props('message')).toMatch(/Regenerating the instance ID/);
+ });
+
+ it('should display a message asking to fill the project name', () => {
+ expect(wrapper.find('[data-testid="prevent-accident-text"]').text()).toMatch(
+ provide.projectName,
+ );
+ });
+
+ it('should display the api URL in an input box', () => {
+ const input = wrapper.find('#api_url');
+ expect(input.element.value).toBe('/api/url');
+ });
+
+ it('should display the instance ID in an input box', () => {
+ const input = wrapper.find('#instance_id');
+ expect(input.element.value).toBe('instance-id-token');
+ });
+ });
+
+ describe('verified', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory);
+
+ it('should enable the primary action', async () => {
+ findProjectNameInput().vm.$emit('input', provide.projectName);
+ await wrapper.vm.$nextTick();
+ const [{ disabled }] = findPrimaryAction().attributes;
+ expect(disabled).toBe(false);
+ });
+ });
+
+ describe('cannot rotate token', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory.bind(null, { canUserRotateToken: false }));
+
+ it('should not display the primary action', async () => {
+ expect(findPrimaryAction()).toBe(null);
+ });
+
+ it('shold not display regenerating instance ID', async () => {
+ expect(findDangerCallout().exists()).toBe(false);
+ });
+
+ it('should disable the project name input', async () => {
+ expect(findProjectNameInput().exists()).toBe(false);
+ });
+ });
+
+ describe('has rotate error', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory.bind(null, { hasRotateError: false }));
+
+ it('should display an error', async () => {
+ expect(wrapper.find('.text-danger')).toExist();
+ expect(wrapper.find('[name="warning"]')).toExist();
+ });
+ });
+
+ describe('is rotating', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory.bind(null, { isRotating: true }));
+
+ it('should disable the project name input', async () => {
+ expect(findProjectNameInput().attributes('disabled')).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
new file mode 100644
index 00000000000..6a394251060
--- /dev/null
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -0,0 +1,183 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { GlToggle, GlAlert } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import { mockTracking } from 'helpers/tracking_helper';
+import { LEGACY_FLAG, NEW_VERSION_FLAG, NEW_FLAG_ALERT } from '~/feature_flags/constants';
+import Form from '~/feature_flags/components/form.vue';
+import createStore from '~/feature_flags/store/edit';
+import EditFeatureFlag from '~/feature_flags/components/edit_feature_flag.vue';
+import axios from '~/lib/utils/axios_utils';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const userCalloutId = 'feature_flags_new_version';
+const userCalloutsPath = `${TEST_HOST}/user_callouts`;
+
+describe('Edit feature flag form', () => {
+ let wrapper;
+ let mock;
+
+ const store = createStore({
+ path: '/feature_flags',
+ endpoint: `${TEST_HOST}/feature_flags.json`,
+ });
+
+ const factory = (opts = {}) => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ wrapper = shallowMount(EditFeatureFlag, {
+ localVue,
+ store,
+ provide: {
+ showUserCallout: true,
+ userCalloutId,
+ userCalloutsPath,
+ glFeatures: {
+ featureFlagsNewVersion: true,
+ },
+ ...opts,
+ },
+ });
+ };
+
+ beforeEach(done => {
+ mock = new MockAdapter(axios);
+ mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
+ id: 21,
+ iid: 5,
+ active: true,
+ created_at: '2019-01-17T17:27:39.778Z',
+ updated_at: '2019-01-17T17:27:39.778Z',
+ name: 'feature_flag',
+ description: '',
+ version: LEGACY_FLAG,
+ edit_path: '/h5bp/html5-boilerplate/-/feature_flags/21/edit',
+ destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
+ scopes: [
+ {
+ id: 21,
+ active: false,
+ environment_scope: '*',
+ created_at: '2019-01-17T17:27:39.778Z',
+ updated_at: '2019-01-17T17:27:39.778Z',
+ },
+ ],
+ });
+ factory();
+ setImmediate(() => done());
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ it('should display the iid', () => {
+ expect(wrapper.find('h3').text()).toContain('^5');
+ });
+
+ it('should render the toggle', () => {
+ expect(wrapper.find(GlToggle).exists()).toBe(true);
+ });
+
+ it('should set the value of the toggle to whether or not the flag is active', () => {
+ expect(wrapper.find(GlToggle).props('value')).toBe(true);
+ });
+
+ it('should not alert users that feature flags are changing soon', () => {
+ expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags');
+ });
+
+ describe('with error', () => {
+ it('should render the error', () => {
+ store.dispatch('receiveUpdateFeatureFlagError', { message: ['The name is required'] });
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.alert-danger').exists()).toEqual(true);
+ expect(wrapper.find('.alert-danger').text()).toContain('The name is required');
+ });
+ });
+ });
+
+ describe('without error', () => {
+ it('renders form title', () => {
+ expect(wrapper.text()).toContain('^5 feature_flag');
+ });
+
+ it('should render feature flag form', () => {
+ expect(wrapper.find(Form).exists()).toEqual(true);
+ });
+
+ it('should set the version of the form from the feature flag', () => {
+ expect(wrapper.find(Form).props('version')).toBe(LEGACY_FLAG);
+
+ mock.resetHandlers();
+
+ mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
+ id: 21,
+ iid: 5,
+ active: true,
+ created_at: '2019-01-17T17:27:39.778Z',
+ updated_at: '2019-01-17T17:27:39.778Z',
+ name: 'feature_flag',
+ description: '',
+ version: NEW_VERSION_FLAG,
+ edit_path: '/h5bp/html5-boilerplate/-/feature_flags/21/edit',
+ destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
+ strategies: [],
+ });
+
+ factory();
+
+ return axios.waitForAll().then(() => {
+ expect(wrapper.find(Form).props('version')).toBe(NEW_VERSION_FLAG);
+ });
+ });
+
+ it('should track when the toggle is clicked', () => {
+ const toggle = wrapper.find(GlToggle);
+ const spy = mockTracking('_category_', toggle.element, jest.spyOn);
+
+ toggle.trigger('click');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'feature_flag_toggle',
+ });
+ });
+ });
+
+ describe('without new version flags', () => {
+ beforeEach(() => factory({ glFeatures: { featureFlagsNewVersion: false } }));
+
+ it('should alert users that feature flags are changing soon', () => {
+ expect(findAlert().text()).toBe(NEW_FLAG_ALERT);
+ });
+ });
+
+ describe('dismissing new version alert', () => {
+ beforeEach(() => {
+ factory({ glFeatures: { featureFlagsNewVersion: false } });
+ mock.onPost(userCalloutsPath, { feature_name: userCalloutId }).reply(200);
+ findAlert().vm.$emit('dismiss');
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should hide the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should send the dismissal event', () => {
+ expect(mock.history.post.length).toBe(1);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/environments_dropdown_spec.js b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
new file mode 100644
index 00000000000..917f5f5ccd3
--- /dev/null
+++ b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
@@ -0,0 +1,147 @@
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon, GlDeprecatedButton, GlSearchBoxByType } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
+import axios from '~/lib/utils/axios_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
+
+describe('Feature flags > Environments dropdown ', () => {
+ let wrapper;
+ let mock;
+ const results = ['production', 'staging'];
+ const factory = props => {
+ wrapper = shallowMount(EnvironmentsDropdown, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ environmentsEndpoint: `${TEST_HOST}/environments.json'`,
+ },
+ });
+ };
+
+ const findEnvironmentSearchInput = () => wrapper.find(GlSearchBoxByType);
+ const findDropdownMenu = () => wrapper.find('.dropdown-menu');
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ describe('without value', () => {
+ it('renders the placeholder', () => {
+ factory();
+ expect(findEnvironmentSearchInput().vm.$attrs.placeholder).toBe('Search an environment spec');
+ });
+ });
+
+ describe('with value', () => {
+ it('sets filter to equal the value', () => {
+ factory({ value: 'production' });
+ expect(findEnvironmentSearchInput().props('value')).toBe('production');
+ });
+ });
+
+ describe('on focus', () => {
+ it('sets results with the received data', async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('focus');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.dropdown-content > ul').exists()).toBe(true);
+ expect(wrapper.findAll('.dropdown-content > ul > li').exists()).toBe(true);
+ });
+ });
+
+ describe('on keyup', () => {
+ it('sets results with the received data', async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('keyup');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.dropdown-content > ul').exists()).toBe(true);
+ expect(wrapper.findAll('.dropdown-content > ul > li').exists()).toBe(true);
+ });
+ });
+
+ describe('on input change', () => {
+ describe('on success', () => {
+ beforeEach(async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('focus');
+ findEnvironmentSearchInput().vm.$emit('input', 'production');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('sets filter value', () => {
+ expect(findEnvironmentSearchInput().props('value')).toBe('production');
+ });
+
+ describe('with received data', () => {
+ it('sets is loading to false', () => {
+ expect(wrapper.vm.isLoading).toBe(false);
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ });
+
+ it('shows the suggestions', () => {
+ expect(findDropdownMenu().exists()).toBe(true);
+ });
+
+ it('emits event when a suggestion is clicked', async () => {
+ const button = wrapper
+ .findAll(GlDeprecatedButton)
+ .filter(b => b.text() === 'production')
+ .at(0);
+ button.vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('selectEnvironment')).toEqual([['production']]);
+ });
+ });
+
+ describe('on click clear button', () => {
+ beforeEach(async () => {
+ wrapper.find(GlDeprecatedButton).vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ });
+
+ it('resets filter value', () => {
+ expect(findEnvironmentSearchInput().props('value')).toBe('');
+ });
+
+ it('closes list of suggestions', () => {
+ expect(wrapper.vm.showSuggestions).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('on click create button', () => {
+ beforeEach(async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, []);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('focus');
+ findEnvironmentSearchInput().vm.$emit('input', 'production');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('emits create event', async () => {
+ wrapper
+ .findAll(GlDeprecatedButton)
+ .at(0)
+ .vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('createClicked')).toEqual([['production']]);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/feature_flags_spec.js b/spec/frontend/feature_flags/components/feature_flags_spec.js
new file mode 100644
index 00000000000..3c1234fea94
--- /dev/null
+++ b/spec/frontend/feature_flags/components/feature_flags_spec.js
@@ -0,0 +1,371 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import MockAdapter from 'axios-mock-adapter';
+import { GlAlert, GlEmptyState, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import Api from '~/api';
+import createStore from '~/feature_flags/store/index';
+import FeatureFlagsTab from '~/feature_flags/components/feature_flags_tab.vue';
+import FeatureFlagsComponent from '~/feature_flags/components/feature_flags.vue';
+import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue';
+import UserListsTable from '~/feature_flags/components/user_lists_table.vue';
+import ConfigureFeatureFlagsModal from '~/feature_flags/components/configure_feature_flags_modal.vue';
+import { FEATURE_FLAG_SCOPE, USER_LIST_SCOPE } from '~/feature_flags/constants';
+import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+import axios from '~/lib/utils/axios_utils';
+import { getRequestData, userList } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Feature flags', () => {
+ const mockData = {
+ canUserConfigure: true,
+ csrfToken: 'testToken',
+ featureFlagsClientExampleHelpPagePath: '/help/feature-flags#client-example',
+ featureFlagsClientLibrariesHelpPagePath: '/help/feature-flags#unleash-clients',
+ featureFlagsHelpPagePath: '/help/feature-flags',
+ featureFlagsLimit: '200',
+ featureFlagsLimitExceeded: false,
+ newFeatureFlagPath: 'feature-flags/new',
+ newUserListPath: '/user-list/new',
+ unleashApiUrl: `${TEST_HOST}/api/unleash`,
+ projectName: 'fakeProjectName',
+ errorStateSvgPath: '/assets/illustrations/feature_flag.svg',
+ };
+
+ const mockState = {
+ endpoint: `${TEST_HOST}/endpoint.json`,
+ projectId: '8',
+ unleashApiInstanceId: 'oP6sCNRqtRHmpy1gw2-F',
+ };
+
+ let wrapper;
+ let mock;
+ let store;
+
+ const factory = (provide = mockData, fn = shallowMount) => {
+ store = createStore(mockState);
+ wrapper = fn(FeatureFlagsComponent, {
+ localVue,
+ store,
+ provide,
+ stubs: {
+ FeatureFlagsTab,
+ },
+ });
+ };
+
+ const configureButton = () => wrapper.find('[data-testid="ff-configure-button"]');
+ const newButton = () => wrapper.find('[data-testid="ff-new-button"]');
+ const newUserListButton = () => wrapper.find('[data-testid="ff-new-list-button"]');
+ const limitAlert = () => wrapper.find(GlAlert);
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ jest.spyOn(Api, 'fetchFeatureFlagUserLists').mockResolvedValue({
+ data: [userList],
+ headers: {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '8',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '40',
+ 'X-Total-Pages': '5',
+ },
+ });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when limit exceeded', () => {
+ const provideData = { ...mockData, featureFlagsLimitExceeded: true };
+
+ beforeEach(done => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .reply(200, getRequestData, {});
+ factory(provideData);
+ setImmediate(done);
+ });
+
+ it('makes the new feature flag button do nothing if clicked', () => {
+ expect(newButton().exists()).toBe(true);
+ expect(newButton().props('disabled')).toBe(false);
+ expect(newButton().props('href')).toBe(undefined);
+ });
+
+ it('shows a feature flags limit reached alert', () => {
+ expect(limitAlert().exists()).toBe(true);
+ expect(
+ limitAlert()
+ .find(GlSprintf)
+ .attributes('message'),
+ ).toContain('Feature flags limit reached');
+ });
+
+ describe('when the alert is dismissed', () => {
+ beforeEach(async () => {
+ await limitAlert().vm.$emit('dismiss');
+ });
+
+ it('hides the alert', async () => {
+ expect(limitAlert().exists()).toBe(false);
+ });
+
+ it('re-shows the alert if the new feature flag button is clicked', async () => {
+ await newButton().vm.$emit('click');
+
+ expect(limitAlert().exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('without permissions', () => {
+ const provideData = {
+ ...mockData,
+ canUserConfigure: false,
+ canUserRotateToken: false,
+ newFeatureFlagPath: null,
+ newUserListPath: null,
+ };
+
+ beforeEach(done => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .reply(200, getRequestData, {});
+ factory(provideData);
+ setImmediate(done);
+ });
+
+ it('does not render configure button', () => {
+ expect(configureButton().exists()).toBe(false);
+ });
+
+ it('does not render new feature flag button', () => {
+ expect(newButton().exists()).toBe(false);
+ });
+
+ it('does not render new user list button', () => {
+ expect(newUserListButton().exists()).toBe(false);
+ });
+ });
+
+ describe('loading state', () => {
+ it('renders a loading icon', () => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .replyOnce(200, getRequestData, {});
+
+ factory();
+
+ const loadingElement = wrapper.find(GlLoadingIcon);
+
+ expect(loadingElement.exists()).toBe(true);
+ expect(loadingElement.props('label')).toEqual('Loading feature flags');
+ });
+ });
+
+ describe('successful request', () => {
+ describe('without feature flags', () => {
+ let emptyState;
+
+ beforeEach(async () => {
+ mock.onGet(mockState.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } }).reply(
+ 200,
+ {
+ feature_flags: [],
+ count: {
+ all: 0,
+ enabled: 0,
+ disabled: 0,
+ },
+ },
+ {},
+ );
+
+ factory();
+ await wrapper.vm.$nextTick();
+
+ emptyState = wrapper.find(GlEmptyState);
+ });
+
+ it('should render the empty state', async () => {
+ expect(emptyState.exists()).toBe(true);
+ });
+
+ it('renders configure button', () => {
+ expect(configureButton().exists()).toBe(true);
+ });
+
+ it('renders new feature flag button', () => {
+ expect(newButton().exists()).toBe(true);
+ });
+
+ it('renders new user list button', () => {
+ expect(newUserListButton().exists()).toBe(true);
+ expect(newUserListButton().attributes('href')).toBe('/user-list/new');
+ });
+
+ describe('in feature flags tab', () => {
+ it('renders generic title', () => {
+ expect(emptyState.props('title')).toEqual('Get started with feature flags');
+ });
+ });
+ });
+
+ describe('with paginated feature flags', () => {
+ beforeEach(done => {
+ mock
+ .onGet(mockState.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .replyOnce(200, getRequestData, {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ });
+
+ factory();
+ jest.spyOn(store, 'dispatch');
+ setImmediate(done);
+ });
+
+ it('should render a table with feature flags', () => {
+ const table = wrapper.find(FeatureFlagsTable);
+ expect(table.exists()).toBe(true);
+ expect(table.props(FEATURE_FLAG_SCOPE)).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ name: getRequestData.feature_flags[0].name,
+ description: getRequestData.feature_flags[0].description,
+ }),
+ ]),
+ );
+ });
+
+ it('should toggle a flag when receiving the toggle-flag event', () => {
+ const table = wrapper.find(FeatureFlagsTable);
+
+ const [flag] = table.props(FEATURE_FLAG_SCOPE);
+ table.vm.$emit('toggle-flag', flag);
+
+ expect(store.dispatch).toHaveBeenCalledWith('toggleFeatureFlag', flag);
+ });
+
+ it('renders configure button', () => {
+ expect(configureButton().exists()).toBe(true);
+ });
+
+ it('renders new feature flag button', () => {
+ expect(newButton().exists()).toBe(true);
+ });
+
+ it('renders new user list button', () => {
+ expect(newUserListButton().exists()).toBe(true);
+ expect(newUserListButton().attributes('href')).toBe('/user-list/new');
+ });
+
+ describe('pagination', () => {
+ it('should render pagination', () => {
+ expect(wrapper.find(TablePagination).exists()).toBe(true);
+ });
+
+ it('should make an API request when page is clicked', () => {
+ jest.spyOn(wrapper.vm, 'updateFeatureFlagOptions');
+ wrapper.find(TablePagination).vm.change(4);
+
+ expect(wrapper.vm.updateFeatureFlagOptions).toHaveBeenCalledWith({
+ scope: FEATURE_FLAG_SCOPE,
+ page: '4',
+ });
+ });
+
+ it('should make an API request when using tabs', () => {
+ jest.spyOn(wrapper.vm, 'updateFeatureFlagOptions');
+ wrapper.find('[data-testid="user-lists-tab"]').vm.$emit('changeTab');
+
+ expect(wrapper.vm.updateFeatureFlagOptions).toHaveBeenCalledWith({
+ scope: USER_LIST_SCOPE,
+ page: '1',
+ });
+ });
+ });
+ });
+
+ describe('in user lists tab', () => {
+ beforeEach(done => {
+ factory();
+ setImmediate(done);
+ });
+ beforeEach(() => {
+ wrapper.find('[data-testid="user-lists-tab"]').vm.$emit('changeTab');
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should display the user list table', () => {
+ expect(wrapper.find(UserListsTable).exists()).toBe(true);
+ });
+
+ it('should set the user lists to display', () => {
+ expect(wrapper.find(UserListsTable).props('userLists')).toEqual([userList]);
+ });
+ });
+ });
+
+ describe('unsuccessful request', () => {
+ beforeEach(done => {
+ mock
+ .onGet(mockState.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .replyOnce(500, {});
+ Api.fetchFeatureFlagUserLists.mockRejectedValueOnce();
+
+ factory();
+ setImmediate(done);
+ });
+
+ it('should render error state', () => {
+ const emptyState = wrapper.find(GlEmptyState);
+ expect(emptyState.props('title')).toEqual('There was an error fetching the feature flags.');
+ expect(emptyState.props('description')).toEqual(
+ 'Try again in a few moments or contact your support team.',
+ );
+ });
+
+ it('renders configure button', () => {
+ expect(configureButton().exists()).toBe(true);
+ });
+
+ it('renders new feature flag button', () => {
+ expect(newButton().exists()).toBe(true);
+ });
+
+ it('renders new user list button', () => {
+ expect(newUserListButton().exists()).toBe(true);
+ expect(newUserListButton().attributes('href')).toBe('/user-list/new');
+ });
+ });
+
+ describe('rotate instance id', () => {
+ beforeEach(done => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .reply(200, getRequestData, {});
+ factory();
+ setImmediate(done);
+ });
+
+ it('should fire the rotate action when a `token` event is received', () => {
+ const actionSpy = jest.spyOn(wrapper.vm, 'rotateInstanceId');
+ const modal = wrapper.find(ConfigureFeatureFlagsModal);
+ modal.vm.$emit('token');
+
+ expect(actionSpy).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/feature_flags_tab_spec.js b/spec/frontend/feature_flags/components/feature_flags_tab_spec.js
new file mode 100644
index 00000000000..bc90c5ceb2d
--- /dev/null
+++ b/spec/frontend/feature_flags/components/feature_flags_tab_spec.js
@@ -0,0 +1,168 @@
+import { mount } from '@vue/test-utils';
+import { GlAlert, GlBadge, GlEmptyState, GlLink, GlLoadingIcon, GlTabs } from '@gitlab/ui';
+import FeatureFlagsTab from '~/feature_flags/components/feature_flags_tab.vue';
+
+const DEFAULT_PROPS = {
+ title: 'test',
+ count: 5,
+ alerts: ['an alert', 'another alert'],
+ isLoading: false,
+ loadingLabel: 'test loading',
+ errorState: false,
+ errorTitle: 'test title',
+ emptyState: true,
+ emptyTitle: 'test empty',
+};
+
+const DEFAULT_PROVIDE = {
+ errorStateSvgPath: '/error.svg',
+ featureFlagsHelpPagePath: '/help/page/path',
+};
+
+describe('feature_flags/components/feature_flags_tab.vue', () => {
+ let wrapper;
+
+ const factory = (props = {}) =>
+ mount(
+ {
+ components: {
+ GlTabs,
+ FeatureFlagsTab,
+ },
+ render(h) {
+ return h(GlTabs, [
+ h(FeatureFlagsTab, { props: this.$attrs, on: this.$listeners }, this.$slots.default),
+ ]);
+ },
+ },
+ {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...props,
+ },
+ provide: DEFAULT_PROVIDE,
+ slots: {
+ default: '<p data-testid="test-slot">testing</p>',
+ },
+ },
+ );
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ describe('alerts', () => {
+ let alerts;
+
+ beforeEach(() => {
+ wrapper = factory();
+ alerts = wrapper.findAll(GlAlert);
+ });
+
+ it('should show any alerts', () => {
+ expect(alerts).toHaveLength(DEFAULT_PROPS.alerts.length);
+ alerts.wrappers.forEach((alert, i) => expect(alert.text()).toBe(DEFAULT_PROPS.alerts[i]));
+ });
+
+ it('should emit a dismiss event for a dismissed alert', () => {
+ alerts.at(0).vm.$emit('dismiss');
+
+ expect(wrapper.find(FeatureFlagsTab).emitted('dismissAlert')).toEqual([[0]]);
+ });
+ });
+
+ describe('loading', () => {
+ beforeEach(() => {
+ wrapper = factory({ isLoading: true });
+ });
+
+ it('should show a loading icon and nothing else', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findAll(GlEmptyState)).toHaveLength(0);
+ });
+ });
+
+ describe('error', () => {
+ let emptyState;
+
+ beforeEach(() => {
+ wrapper = factory({ errorState: true });
+ emptyState = wrapper.find(GlEmptyState);
+ });
+
+ it('should show an error state if there has been an error', () => {
+ expect(emptyState.text()).toContain(DEFAULT_PROPS.errorTitle);
+ expect(emptyState.text()).toContain(
+ 'Try again in a few moments or contact your support team.',
+ );
+ expect(emptyState.props('svgPath')).toBe(DEFAULT_PROVIDE.errorStateSvgPath);
+ });
+ });
+
+ describe('empty', () => {
+ let emptyState;
+ let emptyStateLink;
+
+ beforeEach(() => {
+ wrapper = factory({ emptyState: true });
+ emptyState = wrapper.find(GlEmptyState);
+ emptyStateLink = emptyState.find(GlLink);
+ });
+
+ it('should show an empty state if it is empty', () => {
+ expect(emptyState.text()).toContain(DEFAULT_PROPS.emptyTitle);
+ expect(emptyState.text()).toContain(
+ 'Feature flags allow you to configure your code into different flavors by dynamically toggling certain functionality.',
+ );
+ expect(emptyState.props('svgPath')).toBe(DEFAULT_PROVIDE.errorStateSvgPath);
+ expect(emptyStateLink.attributes('href')).toBe(DEFAULT_PROVIDE.featureFlagsHelpPagePath);
+ expect(emptyStateLink.text()).toBe('More information');
+ });
+ });
+
+ describe('slot', () => {
+ let slot;
+
+ beforeEach(async () => {
+ wrapper = factory();
+ await wrapper.vm.$nextTick();
+
+ slot = wrapper.find('[data-testid="test-slot"]');
+ });
+
+ it('should display the passed slot', () => {
+ expect(slot.exists()).toBe(true);
+ expect(slot.text()).toBe('testing');
+ });
+ });
+
+ describe('count', () => {
+ it('should display a count if there is one', async () => {
+ wrapper = factory();
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlBadge).text()).toBe(DEFAULT_PROPS.count.toString());
+ });
+ it('should display 0 if there is no count', async () => {
+ wrapper = factory({ count: undefined });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlBadge).text()).toBe('0');
+ });
+ });
+
+ describe('title', () => {
+ it('should show the title', async () => {
+ wrapper = factory();
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('[data-testid="feature-flags-tab-title"]').text()).toBe(
+ DEFAULT_PROPS.title,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/feature_flags_table_spec.js b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
new file mode 100644
index 00000000000..a488662470e
--- /dev/null
+++ b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
@@ -0,0 +1,266 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlToggle, GlBadge } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ NEW_VERSION_FLAG,
+ LEGACY_FLAG,
+ DEFAULT_PERCENT_ROLLOUT,
+} from '~/feature_flags/constants';
+import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue';
+
+const getDefaultProps = () => ({
+ featureFlags: [
+ {
+ id: 1,
+ iid: 1,
+ active: true,
+ name: 'flag name',
+ description: 'flag description',
+ destroy_path: 'destroy/path',
+ edit_path: 'edit/path',
+ version: LEGACY_FLAG,
+ scopes: [
+ {
+ id: 1,
+ active: true,
+ environmentScope: 'scope',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ shouldBeDestroyed: false,
+ },
+ ],
+ },
+ ],
+});
+
+describe('Feature flag table', () => {
+ let wrapper;
+ let props;
+
+ const createWrapper = (propsData, opts = {}) => {
+ wrapper = shallowMount(FeatureFlagsTable, {
+ propsData,
+ provide: {
+ csrfToken: 'fakeToken',
+ },
+ ...opts,
+ });
+ };
+
+ beforeEach(() => {
+ props = getDefaultProps();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('with an active scope and a standard rollout strategy', () => {
+ beforeEach(() => {
+ createWrapper(props);
+ });
+
+ it('Should render a table', () => {
+ expect(wrapper.classes('table-holder')).toBe(true);
+ });
+
+ it('Should render rows', () => {
+ expect(wrapper.find('.gl-responsive-table-row').exists()).toBe(true);
+ });
+
+ it('should render an ID column', () => {
+ expect(wrapper.find('.js-feature-flag-id').exists()).toBe(true);
+ expect(trimText(wrapper.find('.js-feature-flag-id').text())).toEqual('^1');
+ });
+
+ it('Should render a status column', () => {
+ const badge = wrapper.find('[data-testid="feature-flag-status-badge"]');
+
+ expect(badge.exists()).toBe(true);
+ expect(trimText(badge.text())).toEqual('Active');
+ });
+
+ it('Should render a feature flag column', () => {
+ expect(wrapper.find('.js-feature-flag-title').exists()).toBe(true);
+ expect(trimText(wrapper.find('.feature-flag-name').text())).toEqual('flag name');
+
+ expect(trimText(wrapper.find('.feature-flag-description').text())).toEqual(
+ 'flag description',
+ );
+ });
+
+ it('should render an environments specs column', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(envColumn).toBeDefined();
+ expect(trimText(envColumn.text())).toBe('scope');
+ });
+
+ it('should render an environments specs badge with active class', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('scope');
+ });
+
+ it('should render an actions column', () => {
+ expect(wrapper.find('.table-action-buttons').exists()).toBe(true);
+ expect(wrapper.find('.js-feature-flag-delete-button').exists()).toBe(true);
+ expect(wrapper.find('.js-feature-flag-edit-button').exists()).toBe(true);
+ expect(wrapper.find('.js-feature-flag-edit-button').attributes('href')).toEqual('edit/path');
+ });
+ });
+
+ describe('when active and with an update toggle', () => {
+ let toggle;
+ let spy;
+
+ beforeEach(() => {
+ props.featureFlags[0].update_path = props.featureFlags[0].destroy_path;
+ createWrapper(props);
+ toggle = wrapper.find(GlToggle);
+ spy = mockTracking('_category_', toggle.element, jest.spyOn);
+ });
+
+ it('should have a toggle', () => {
+ expect(toggle.exists()).toBe(true);
+ expect(toggle.props('value')).toBe(true);
+ });
+
+ it('should trigger a toggle event', () => {
+ toggle.vm.$emit('change');
+ const flag = { ...props.featureFlags[0], active: !props.featureFlags[0].active };
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('toggle-flag')).toEqual([[flag]]);
+ });
+ });
+
+ it('should track a click', () => {
+ toggle.trigger('click');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'feature_flag_toggle',
+ });
+ });
+ });
+
+ describe('with an active scope and a percentage rollout strategy', () => {
+ beforeEach(() => {
+ props.featureFlags[0].scopes[0].rolloutStrategy = ROLLOUT_STRATEGY_PERCENT_ROLLOUT;
+ props.featureFlags[0].scopes[0].rolloutPercentage = '54';
+ createWrapper(props);
+ });
+
+ it('should render an environments specs badge with percentage', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('scope: 54%');
+ });
+ });
+
+ describe('with an inactive scope', () => {
+ beforeEach(() => {
+ props.featureFlags[0].scopes[0].active = false;
+ createWrapper(props);
+ });
+
+ it('should render an environments specs badge with inactive class', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('scope');
+ });
+ });
+
+ describe('with a new version flag', () => {
+ let badges;
+
+ beforeEach(() => {
+ const newVersionProps = {
+ ...props,
+ featureFlags: [
+ {
+ id: 1,
+ iid: 1,
+ active: true,
+ name: 'flag name',
+ description: 'flag description',
+ destroy_path: 'destroy/path',
+ edit_path: 'edit/path',
+ version: NEW_VERSION_FLAG,
+ scopes: [],
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environment_scope: '*' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50' },
+ scopes: [{ environment_scope: 'production' }, { environment_scope: 'staging' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: { userIds: '1,2,3,4' },
+ scopes: [{ environment_scope: 'review/*' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ parameters: {},
+ user_list: { name: 'test list' },
+ scopes: [{ environment_scope: '*' }],
+ },
+ ],
+ },
+ ],
+ };
+ createWrapper(newVersionProps, {
+ provide: { csrfToken: 'fakeToken', glFeatures: { featureFlagsNewVersion: true } },
+ });
+
+ badges = wrapper.findAll('[data-testid="strategy-badge"]');
+ });
+
+ it('shows All Environments if the environment scope is *', () => {
+ expect(badges.at(0).text()).toContain('All Environments');
+ });
+
+ it('shows the environment scope if another is set', () => {
+ expect(badges.at(1).text()).toContain('production');
+ expect(badges.at(1).text()).toContain('staging');
+ expect(badges.at(2).text()).toContain('review/*');
+ });
+
+ it('shows All Users for the default strategy', () => {
+ expect(badges.at(0).text()).toContain('All Users');
+ });
+
+ it('shows the percent for a percent rollout', () => {
+ expect(badges.at(1).text()).toContain('Percent of users - 50%');
+ });
+
+ it('shows the number of users for users with ID', () => {
+ expect(badges.at(2).text()).toContain('User IDs - 4 users');
+ });
+
+ it('shows the name of a user list for user list', () => {
+ expect(badges.at(3).text()).toContain('User List - test list');
+ });
+ });
+
+ it('renders a feature flag without an iid', () => {
+ delete props.featureFlags[0].iid;
+ createWrapper(props);
+
+ expect(wrapper.find('.js-feature-flag-id').exists()).toBe(true);
+ expect(trimText(wrapper.find('.js-feature-flag-id').text())).toBe('');
+ });
+});
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
new file mode 100644
index 00000000000..33c7eeb54b7
--- /dev/null
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -0,0 +1,493 @@
+import { uniqueId } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import { GlFormTextarea, GlFormCheckbox, GlButton } from '@gitlab/ui';
+import Api from '~/api';
+import Form from '~/feature_flags/components/form.vue';
+import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
+import Strategy from '~/feature_flags/components/strategy.vue';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ INTERNAL_ID_PREFIX,
+ DEFAULT_PERCENT_ROLLOUT,
+ LEGACY_FLAG,
+ NEW_VERSION_FLAG,
+} from '~/feature_flags/constants';
+import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue';
+import ToggleButton from '~/vue_shared/components/toggle_button.vue';
+import { featureFlag, userList, allUsersStrategy } from '../mock_data';
+
+jest.mock('~/api.js');
+
+describe('feature flag form', () => {
+ let wrapper;
+ const requiredProps = {
+ cancelPath: 'feature_flags',
+ submitText: 'Create',
+ };
+
+ const requiredInjections = {
+ environmentsEndpoint: '/environments.json',
+ projectId: '1',
+ glFeatures: {
+ featureFlagPermissions: true,
+ featureFlagsNewVersion: true,
+ },
+ };
+
+ const factory = (props = {}, provide = {}) => {
+ wrapper = shallowMount(Form, {
+ propsData: { ...requiredProps, ...props },
+ provide: {
+ ...requiredInjections,
+ ...provide,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserLists.mockResolvedValue({ data: [] });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render provided submitText', () => {
+ factory(requiredProps);
+
+ expect(wrapper.find('.js-ff-submit').text()).toEqual(requiredProps.submitText);
+ });
+
+ it('should render provided cancelPath', () => {
+ factory(requiredProps);
+
+ expect(wrapper.find('.js-ff-cancel').attributes('href')).toEqual(requiredProps.cancelPath);
+ });
+
+ it('does not render the related issues widget without the featureFlagIssuesEndpoint', () => {
+ factory(requiredProps);
+
+ expect(wrapper.find(RelatedIssuesRoot).exists()).toBe(false);
+ });
+
+ it('renders the related issues widget when the featureFlagIssuesEndpoint is provided', () => {
+ factory(
+ {},
+ {
+ ...requiredInjections,
+ featureFlagIssuesEndpoint: '/some/endpoint',
+ },
+ );
+
+ expect(wrapper.find(RelatedIssuesRoot).exists()).toBe(true);
+ });
+
+ describe('without provided data', () => {
+ beforeEach(() => {
+ factory(requiredProps);
+ });
+
+ it('should render name input text', () => {
+ expect(wrapper.find('#feature-flag-name').exists()).toBe(true);
+ });
+
+ it('should render description textarea', () => {
+ expect(wrapper.find('#feature-flag-description').exists()).toBe(true);
+ });
+
+ describe('scopes', () => {
+ it('should render scopes table', () => {
+ expect(wrapper.find('.js-scopes-table').exists()).toBe(true);
+ });
+
+ it('should render scopes table with a new row ', () => {
+ expect(wrapper.find('.js-add-new-scope').exists()).toBe(true);
+ });
+
+ describe('status toggle', () => {
+ describe('without filled text input', () => {
+ it('should add a new scope with the text value empty and the status', () => {
+ wrapper.find(ToggleButton).vm.$emit('change', true);
+
+ expect(wrapper.vm.formScopes).toHaveLength(1);
+ expect(wrapper.vm.formScopes[0].active).toEqual(true);
+ expect(wrapper.vm.formScopes[0].environmentScope).toEqual('');
+
+ expect(wrapper.vm.newScope).toEqual('');
+ });
+ });
+
+ it('should be disabled if the feature flag is not active', done => {
+ wrapper.setProps({ active: false });
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
+ done();
+ });
+ });
+ });
+ });
+ });
+
+ describe('with provided data', () => {
+ beforeEach(() => {
+ factory({
+ ...requiredProps,
+ name: featureFlag.name,
+ description: featureFlag.description,
+ active: true,
+ version: LEGACY_FLAG,
+ scopes: [
+ {
+ id: 1,
+ active: true,
+ environmentScope: 'scope',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '54',
+ rolloutUserIds: '123',
+ shouldIncludeUserIds: true,
+ },
+ {
+ id: 2,
+ active: true,
+ environmentScope: 'scope',
+ canUpdate: false,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '54',
+ rolloutUserIds: '123',
+ shouldIncludeUserIds: true,
+ },
+ ],
+ });
+ });
+
+ describe('scopes', () => {
+ it('should be possible to remove a scope', () => {
+ expect(wrapper.find('.js-feature-flag-delete').exists()).toEqual(true);
+ });
+
+ it('renders empty row to add a new scope', () => {
+ expect(wrapper.find('.js-add-new-scope').exists()).toEqual(true);
+ });
+
+ it('renders the user id checkbox', () => {
+ expect(wrapper.find(GlFormCheckbox).exists()).toBe(true);
+ });
+
+ it('renders the user id text area', () => {
+ expect(wrapper.find(GlFormTextarea).exists()).toBe(true);
+
+ expect(wrapper.find(GlFormTextarea).vm.value).toBe('123');
+ });
+
+ describe('update scope', () => {
+ describe('on click on toggle', () => {
+ it('should update the scope', () => {
+ wrapper.find(ToggleButton).vm.$emit('change', false);
+
+ expect(wrapper.vm.formScopes[0].active).toBe(false);
+ });
+
+ it('should be disabled if the feature flag is not active', done => {
+ wrapper.setProps({ active: false });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
+ done();
+ });
+ });
+ });
+ describe('on strategy change', () => {
+ it('should not include user IDs if All Users is selected', () => {
+ const scope = wrapper.find({ ref: 'scopeRow' });
+ scope.find('select').setValue(ROLLOUT_STRATEGY_ALL_USERS);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(scope.find('#rollout-user-id-0').exists()).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('deleting an existing scope', () => {
+ beforeEach(() => {
+ wrapper.find('.js-delete-scope').vm.$emit('click');
+ });
+
+ it('should add `shouldBeDestroyed` key the clicked scope', () => {
+ expect(wrapper.vm.formScopes[0].shouldBeDestroyed).toBe(true);
+ });
+
+ it('should not render deleted scopes', () => {
+ expect(wrapper.vm.filteredScopes).toEqual([expect.objectContaining({ id: 2 })]);
+ });
+ });
+
+ describe('deleting a new scope', () => {
+ it('should remove the scope from formScopes', () => {
+ factory({
+ ...requiredProps,
+ name: 'feature_flag_1',
+ description: 'this is a feature flag',
+ scopes: [
+ {
+ environmentScope: 'new_scope',
+ active: false,
+ id: uniqueId(INTERNAL_ID_PREFIX),
+ canUpdate: true,
+ protected: false,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ ],
+ });
+
+ wrapper.find('.js-delete-scope').vm.$emit('click');
+
+ expect(wrapper.vm.formScopes).toEqual([]);
+ });
+ });
+
+ describe('with * scope', () => {
+ beforeEach(() => {
+ factory({
+ ...requiredProps,
+ name: 'feature_flag_1',
+ description: 'this is a feature flag',
+ scopes: [
+ {
+ environmentScope: '*',
+ active: false,
+ canUpdate: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ },
+ ],
+ });
+ });
+
+ it('renders read only name', () => {
+ expect(wrapper.find('.js-scope-all').exists()).toEqual(true);
+ });
+ });
+
+ describe('without permission to update', () => {
+ it('should have the flag name input disabled', () => {
+ const input = wrapper.find('#feature-flag-name');
+
+ expect(input.element.disabled).toBe(true);
+ });
+
+ it('should have the flag discription text area disabled', () => {
+ const textarea = wrapper.find('#feature-flag-description');
+
+ expect(textarea.element.disabled).toBe(true);
+ });
+
+ it('should have the scope that cannot be updated be disabled', () => {
+ const row = wrapper.findAll('.gl-responsive-table-row').at(2);
+
+ expect(row.find(EnvironmentsDropdown).vm.disabled).toBe(true);
+ expect(row.find(ToggleButton).vm.disabledInput).toBe(true);
+ expect(row.find('.js-delete-scope').exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('on submit', () => {
+ const selectFirstRolloutStrategyOption = dropdownIndex => {
+ wrapper
+ .findAll('select.js-rollout-strategy')
+ .at(dropdownIndex)
+ .findAll('option')
+ .at(1)
+ .setSelected();
+ };
+
+ beforeEach(() => {
+ factory({
+ ...requiredProps,
+ name: 'feature_flag_1',
+ active: true,
+ description: 'this is a feature flag',
+ scopes: [
+ {
+ id: 1,
+ environmentScope: 'production',
+ canUpdate: true,
+ protected: true,
+ active: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ },
+ ],
+ });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should emit handleSubmit with the updated data', () => {
+ wrapper.find('#feature-flag-name').setValue('feature_flag_2');
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper
+ .find('.js-new-scope-name')
+ .find(EnvironmentsDropdown)
+ .vm.$emit('selectEnvironment', 'review');
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ wrapper
+ .find('.js-add-new-scope')
+ .find(ToggleButton)
+ .vm.$emit('change', true);
+ })
+ .then(() => {
+ wrapper.find(ToggleButton).vm.$emit('change', true);
+ return wrapper.vm.$nextTick();
+ })
+
+ .then(() => {
+ selectFirstRolloutStrategyOption(0);
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ selectFirstRolloutStrategyOption(2);
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ wrapper.find('.js-rollout-percentage').setValue('55');
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ wrapper.find({ ref: 'submitButton' }).vm.$emit('click');
+
+ const data = wrapper.emitted().handleSubmit[0][0];
+
+ expect(data.name).toEqual('feature_flag_2');
+ expect(data.description).toEqual('this is a feature flag');
+ expect(data.active).toBe(true);
+
+ expect(data.scopes).toEqual([
+ {
+ id: 1,
+ active: true,
+ environmentScope: 'production',
+ canUpdate: true,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '55',
+ rolloutUserIds: '',
+ shouldIncludeUserIds: false,
+ },
+ {
+ id: expect.any(String),
+ active: false,
+ environmentScope: 'review',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ },
+ {
+ id: expect.any(String),
+ active: true,
+ environmentScope: '',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ shouldIncludeUserIds: false,
+ },
+ ]);
+ });
+ });
+ });
+ });
+
+ describe('with strategies', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserLists.mockResolvedValue({ data: [userList] });
+ factory({
+ ...requiredProps,
+ name: featureFlag.name,
+ description: featureFlag.description,
+ active: true,
+ version: NEW_VERSION_FLAG,
+ strategies: [
+ {
+ type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '30' },
+ scopes: [],
+ },
+ {
+ type: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environment_scope: 'review/*' }],
+ },
+ ],
+ });
+ });
+
+ it('should request the user lists on mount', () => {
+ return wrapper.vm.$nextTick(() => {
+ expect(Api.fetchFeatureFlagUserLists).toHaveBeenCalledWith('1');
+ });
+ });
+
+ it('should show the strategy component', () => {
+ const strategy = wrapper.find(Strategy);
+ expect(strategy.exists()).toBe(true);
+ expect(strategy.props('strategy')).toEqual({
+ type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '30' },
+ scopes: [],
+ });
+ });
+
+ it('should show one strategy component per strategy', () => {
+ expect(wrapper.findAll(Strategy)).toHaveLength(2);
+ });
+
+ it('adds an all users strategy when clicking the Add button', () => {
+ wrapper.find(GlButton).vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ const strategies = wrapper.findAll(Strategy);
+
+ expect(strategies).toHaveLength(3);
+ expect(strategies.at(2).props('strategy')).toEqual(allUsersStrategy);
+ });
+ });
+
+ it('should remove a strategy on delete', () => {
+ const strategy = {
+ type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '30' },
+ scopes: [],
+ };
+ wrapper.find(Strategy).vm.$emit('delete');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(Strategy)).toHaveLength(1);
+ expect(wrapper.find(Strategy).props('strategy')).not.toEqual(strategy);
+ });
+ });
+
+ it('should provide the user lists to the strategy', () => {
+ expect(wrapper.find(Strategy).props('userLists')).toEqual([userList]);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
new file mode 100644
index 00000000000..12dc98fbde8
--- /dev/null
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -0,0 +1,105 @@
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { GlLoadingIcon, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
+import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
+import axios from '~/lib/utils/axios_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
+
+const TEST_HOST = '/test';
+const TEST_SEARCH = 'production';
+
+describe('New Environments Dropdown', () => {
+ let wrapper;
+ let axiosMock;
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ wrapper = shallowMount(NewEnvironmentsDropdown, {
+ provide: { environmentsEndpoint: TEST_HOST },
+ });
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('before results', () => {
+ it('should show a loading icon', () => {
+ axiosMock.onGet(TEST_HOST).reply(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ return axios.waitForAll();
+ });
+
+ it('should not show any dropdown items', () => {
+ axiosMock.onGet(TEST_HOST).reply(() => {
+ expect(wrapper.findAll(GlDropdownItem)).toHaveLength(0);
+ });
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ return axios.waitForAll();
+ });
+ });
+
+ describe('with empty results', () => {
+ let item;
+ beforeEach(() => {
+ axiosMock.onGet(TEST_HOST).reply(200, []);
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.find(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
+ return axios
+ .waitForAll()
+ .then(() => wrapper.vm.$nextTick())
+ .then(() => {
+ item = wrapper.find(GlDropdownItem);
+ });
+ });
+
+ it('should display a Create item label', () => {
+ expect(item.text()).toBe('Create production');
+ });
+
+ it('should display that no matching items are found', () => {
+ expect(wrapper.find({ ref: 'noResults' }).exists()).toBe(true);
+ });
+
+ it('should emit a new scope when selected', () => {
+ item.vm.$emit('click');
+ expect(wrapper.emitted('add')).toEqual([[TEST_SEARCH]]);
+ });
+ });
+
+ describe('with results', () => {
+ let items;
+ beforeEach(() => {
+ axiosMock.onGet(TEST_HOST).reply(httpStatusCodes.OK, ['prod', 'production']);
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.find(GlSearchBoxByType).vm.$emit('input', 'prod');
+ return axios.waitForAll().then(() => {
+ items = wrapper.findAll(GlDropdownItem);
+ });
+ });
+
+ it('should display one item per result', () => {
+ expect(items).toHaveLength(2);
+ });
+
+ it('should emit an add if an item is clicked', () => {
+ items.at(0).vm.$emit('click');
+ expect(wrapper.emitted('add')).toEqual([['prod']]);
+ });
+
+ it('should not display a create label', () => {
+ items = items.filter(i => i.text().startsWith('Create'));
+ expect(items).toHaveLength(0);
+ });
+
+ it('should not display a message about no results', () => {
+ expect(wrapper.find({ ref: 'noResults' }).exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
new file mode 100644
index 00000000000..dbc6e03d922
--- /dev/null
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -0,0 +1,136 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import MockAdapter from 'axios-mock-adapter';
+import { GlAlert } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import Form from '~/feature_flags/components/form.vue';
+import createStore from '~/feature_flags/store/new';
+import NewFeatureFlag from '~/feature_flags/components/new_feature_flag.vue';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ DEFAULT_PERCENT_ROLLOUT,
+ NEW_FLAG_ALERT,
+} from '~/feature_flags/constants';
+import axios from '~/lib/utils/axios_utils';
+import { allUsersStrategy } from '../mock_data';
+
+const userCalloutId = 'feature_flags_new_version';
+const userCalloutsPath = `${TEST_HOST}/user_callouts`;
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('New feature flag form', () => {
+ let wrapper;
+
+ const store = createStore({
+ endpoint: `${TEST_HOST}/feature_flags.json`,
+ path: '/feature_flags',
+ });
+
+ const factory = (opts = {}) => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ wrapper = shallowMount(NewFeatureFlag, {
+ localVue,
+ store,
+ provide: {
+ showUserCallout: true,
+ userCalloutId,
+ userCalloutsPath,
+ environmentsEndpoint: 'environments.json',
+ projectId: '8',
+ glFeatures: {
+ featureFlagsNewVersion: true,
+ },
+ ...opts,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ factory();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ describe('with error', () => {
+ it('should render the error', () => {
+ store.dispatch('receiveCreateFeatureFlagError', { message: ['The name is required'] });
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.alert').exists()).toEqual(true);
+ expect(wrapper.find('.alert').text()).toContain('The name is required');
+ });
+ });
+ });
+
+ it('renders form title', () => {
+ expect(wrapper.find('h3').text()).toEqual('New feature flag');
+ });
+
+ it('should render feature flag form', () => {
+ expect(wrapper.find(Form).exists()).toEqual(true);
+ });
+
+ it('should render default * row', () => {
+ const defaultScope = {
+ id: expect.any(String),
+ environmentScope: '*',
+ active: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ };
+ expect(wrapper.vm.scopes).toEqual([defaultScope]);
+
+ expect(wrapper.find(Form).props('scopes')).toContainEqual(defaultScope);
+ });
+
+ it('should not alert users that feature flags are changing soon', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(false);
+ });
+
+ it('has an all users strategy by default', () => {
+ const strategies = wrapper.find(Form).props('strategies');
+
+ expect(strategies).toEqual([allUsersStrategy]);
+ });
+
+ describe('without new version flags', () => {
+ beforeEach(() => factory({ glFeatures: { featureFlagsNewVersion: false } }));
+
+ it('should alert users that feature flags are changing soon', () => {
+ expect(findAlert().text()).toBe(NEW_FLAG_ALERT);
+ });
+ });
+
+ describe('dismissing new version alert', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onPost(userCalloutsPath, { feature_name: userCalloutId }).reply(200);
+ factory({ glFeatures: { featureFlagsNewVersion: false } });
+ findAlert().vm.$emit('dismiss');
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should hide the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should send the dismissal event', () => {
+ expect(mock.history.post.length).toBe(1);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/default_spec.js b/spec/frontend/feature_flags/components/strategies/default_spec.js
new file mode 100644
index 00000000000..1315cd7d735
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategies/default_spec.js
@@ -0,0 +1,10 @@
+import { shallowMount } from '@vue/test-utils';
+import Default from '~/feature_flags/components/strategies/default.vue';
+
+describe('~/feature_flags/components/strategies/default.vue', () => {
+ it('should emit an empty parameter object on mount', () => {
+ const wrapper = shallowMount(Default);
+
+ expect(wrapper.emitted('change')).toEqual([[{ parameters: {} }]]);
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js b/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
new file mode 100644
index 00000000000..f3f70a325d0
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
@@ -0,0 +1,116 @@
+import { mount } from '@vue/test-utils';
+import { GlFormInput, GlFormSelect } from '@gitlab/ui';
+import FlexibleRollout from '~/feature_flags/components/strategies/flexible_rollout.vue';
+import ParameterFormGroup from '~/feature_flags/components/strategies/parameter_form_group.vue';
+import { PERCENT_ROLLOUT_GROUP_ID } from '~/feature_flags/constants';
+import { flexibleRolloutStrategy } from '../../mock_data';
+
+const DEFAULT_PROPS = {
+ strategy: flexibleRolloutStrategy,
+};
+
+describe('feature_flags/components/strategies/flexible_rollout.vue', () => {
+ let wrapper;
+ let percentageFormGroup;
+ let percentageInput;
+ let stickinessFormGroup;
+ let stickinessSelect;
+
+ const factory = (props = {}) =>
+ mount(FlexibleRollout, { propsData: { ...DEFAULT_PROPS, ...props } });
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ describe('with valid percentage', () => {
+ beforeEach(() => {
+ wrapper = factory();
+
+ percentageFormGroup = wrapper
+ .find('[data-testid="strategy-flexible-rollout-percentage"]')
+ .find(ParameterFormGroup);
+ percentageInput = percentageFormGroup.find(GlFormInput);
+ stickinessFormGroup = wrapper
+ .find('[data-testid="strategy-flexible-rollout-stickiness"]')
+ .find(ParameterFormGroup);
+ stickinessSelect = stickinessFormGroup.find(GlFormSelect);
+ });
+
+ it('displays the current percentage value', () => {
+ expect(percentageInput.element.value).toBe(flexibleRolloutStrategy.parameters.rollout);
+ });
+
+ it('displays the current stickiness value', () => {
+ expect(stickinessSelect.element.value).toBe(flexibleRolloutStrategy.parameters.stickiness);
+ });
+
+ it('emits a change when the percentage value changes', async () => {
+ percentageInput.setValue('75');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ parameters: {
+ rollout: '75',
+ groupId: PERCENT_ROLLOUT_GROUP_ID,
+ stickiness: flexibleRolloutStrategy.parameters.stickiness,
+ },
+ },
+ ],
+ ]);
+ });
+
+ it('emits a change when the stickiness value changes', async () => {
+ stickinessSelect.setValue('USERID');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ parameters: {
+ rollout: flexibleRolloutStrategy.parameters.rollout,
+ groupId: PERCENT_ROLLOUT_GROUP_ID,
+ stickiness: 'USERID',
+ },
+ },
+ ],
+ ]);
+ });
+
+ it('does not show errors', () => {
+ expect(percentageFormGroup.attributes('state')).toBe('true');
+ });
+ });
+
+ describe('with percentage that is out of range', () => {
+ beforeEach(() => {
+ wrapper = factory({ strategy: { parameters: { rollout: '101' } } });
+ });
+
+ it('shows errors', () => {
+ const formGroup = wrapper
+ .find('[data-testid="strategy-flexible-rollout-percentage"]')
+ .find(ParameterFormGroup);
+
+ expect(formGroup.attributes('state')).toBeUndefined();
+ });
+ });
+
+ describe('with percentage that is not a whole number', () => {
+ beforeEach(() => {
+ wrapper = factory({ strategy: { parameters: { rollout: '3.14' } } });
+ });
+
+ it('shows errors', () => {
+ const formGroup = wrapper
+ .find('[data-testid="strategy-flexible-rollout-percentage"]')
+ .find(ParameterFormGroup);
+
+ expect(formGroup.attributes('state')).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
new file mode 100644
index 00000000000..014c6dd98b9
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
@@ -0,0 +1,51 @@
+import { mount } from '@vue/test-utils';
+import { GlFormSelect } from '@gitlab/ui';
+import GitlabUserList from '~/feature_flags/components/strategies/gitlab_user_list.vue';
+import { userListStrategy, userList } from '../../mock_data';
+
+const DEFAULT_PROPS = {
+ strategy: userListStrategy,
+ userLists: [userList],
+};
+
+describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
+ let wrapper;
+
+ const factory = (props = {}) =>
+ mount(GitlabUserList, { propsData: { ...DEFAULT_PROPS, ...props } });
+
+ describe('with user lists', () => {
+ beforeEach(() => {
+ wrapper = factory();
+ });
+
+ it('should show the input for userListId with the correct value', () => {
+ const inputWrapper = wrapper.find(GlFormSelect);
+ expect(inputWrapper.exists()).toBe(true);
+ expect(inputWrapper.element.value).toBe('2');
+ });
+
+ it('should emit a change event when altering the userListId', () => {
+ const inputWrapper = wrapper.find(GitlabUserList);
+ inputWrapper.vm.$emit('change', {
+ userListId: '3',
+ });
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ userListId: '3',
+ },
+ ],
+ ]);
+ });
+ });
+ describe('without user lists', () => {
+ beforeEach(() => {
+ wrapper = factory({ userLists: [] });
+ });
+
+ it('should display a message that there are no user lists', () => {
+ expect(wrapper.text()).toContain('There are no configured user lists');
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js b/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js
new file mode 100644
index 00000000000..a0ffdb1fca0
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js
@@ -0,0 +1,50 @@
+import { mount } from '@vue/test-utils';
+import { GlFormGroup, GlFormInput } from '@gitlab/ui';
+import ParameterFormGroup from '~/feature_flags/components/strategies/parameter_form_group.vue';
+
+describe('~/feature_flags/strategies/parameter_form_group.vue', () => {
+ let wrapper;
+ let formGroup;
+ let slot;
+
+ beforeEach(() => {
+ wrapper = mount(ParameterFormGroup, {
+ propsData: { inputId: 'test-id', label: 'test' },
+ attrs: { description: 'test description' },
+ scopedSlots: {
+ default(props) {
+ return this.$createElement(GlFormInput, {
+ attrs: { id: props.inputId, 'data-testid': 'slot' },
+ });
+ },
+ },
+ });
+
+ formGroup = wrapper.find(GlFormGroup);
+ slot = wrapper.find('[data-testid="slot"]');
+ });
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ it('should display the default slot', () => {
+ expect(slot.exists()).toBe(true);
+ });
+
+ it('should bind the input id to the slot', () => {
+ expect(slot.attributes('id')).toBe('test-id');
+ });
+
+ it('should bind the label-for to the input id', () => {
+ expect(formGroup.find('[for="test-id"]').exists()).toBe(true);
+ });
+
+ it('should bind extra attributes to the form group', () => {
+ expect(formGroup.attributes('description')).toBe('test description');
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js b/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js
new file mode 100644
index 00000000000..de0b439f1c5
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js
@@ -0,0 +1,78 @@
+import { mount } from '@vue/test-utils';
+import { GlFormInput } from '@gitlab/ui';
+import PercentRollout from '~/feature_flags/components/strategies/percent_rollout.vue';
+import ParameterFormGroup from '~/feature_flags/components/strategies/parameter_form_group.vue';
+import { PERCENT_ROLLOUT_GROUP_ID } from '~/feature_flags/constants';
+import { percentRolloutStrategy } from '../../mock_data';
+
+const DEFAULT_PROPS = {
+ strategy: percentRolloutStrategy,
+};
+
+describe('~/feature_flags/components/strategies/percent_rollout.vue', () => {
+ let wrapper;
+ let input;
+ let formGroup;
+
+ const factory = (props = {}) =>
+ mount(PercentRollout, { propsData: { ...DEFAULT_PROPS, ...props } });
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ describe('with valid percentage', () => {
+ beforeEach(() => {
+ wrapper = factory();
+
+ input = wrapper.find(GlFormInput);
+ formGroup = wrapper.find(ParameterFormGroup);
+ });
+
+ it('displays the current value', () => {
+ expect(input.element.value).toBe(percentRolloutStrategy.parameters.percentage);
+ });
+
+ it('emits a change when the value changes', async () => {
+ input.setValue('75');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('change')).toEqual([
+ [{ parameters: { percentage: '75', groupId: PERCENT_ROLLOUT_GROUP_ID } }],
+ ]);
+ });
+
+ it('does not show errors', () => {
+ expect(formGroup.attributes('state')).toBe('true');
+ });
+ });
+
+ describe('with percentage that is out of range', () => {
+ beforeEach(() => {
+ wrapper = factory({ strategy: { parameters: { percentage: '101' } } });
+
+ input = wrapper.find(GlFormInput);
+ formGroup = wrapper.find(ParameterFormGroup);
+ });
+
+ it('shows errors', () => {
+ expect(formGroup.attributes('state')).toBeUndefined();
+ });
+ });
+
+ describe('with percentage that is not a whole number', () => {
+ beforeEach(() => {
+ wrapper = factory({ strategy: { parameters: { percentage: '3.14' } } });
+
+ input = wrapper.find(GlFormInput);
+ formGroup = wrapper.find(ParameterFormGroup);
+ });
+
+ it('shows errors', () => {
+ expect(formGroup.attributes('state')).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js b/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js
new file mode 100644
index 00000000000..460df6ef2ec
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js
@@ -0,0 +1,38 @@
+import { mount } from '@vue/test-utils';
+import { GlFormTextarea } from '@gitlab/ui';
+import UsersWithId from '~/feature_flags/components/strategies/users_with_id.vue';
+import { usersWithIdStrategy } from '../../mock_data';
+
+const DEFAULT_PROPS = {
+ strategy: usersWithIdStrategy,
+};
+
+describe('~/feature_flags/components/users_with_id.vue', () => {
+ let wrapper;
+ let textarea;
+
+ const factory = (props = {}) => mount(UsersWithId, { propsData: { ...DEFAULT_PROPS, ...props } });
+
+ beforeEach(() => {
+ wrapper = factory();
+ textarea = wrapper.find(GlFormTextarea);
+ });
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ it('should display the current value of the parameters', () => {
+ expect(textarea.element.value).toBe(usersWithIdStrategy.parameters.userIds);
+ });
+
+ it('should emit a change event when the IDs change', () => {
+ textarea.setValue('4,5,6');
+
+ expect(wrapper.emitted('change')).toEqual([[{ parameters: { userIds: '4,5,6' } }]]);
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategy_parameters_spec.js b/spec/frontend/feature_flags/components/strategy_parameters_spec.js
new file mode 100644
index 00000000000..314fb0f21f4
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategy_parameters_spec.js
@@ -0,0 +1,83 @@
+import { shallowMount } from '@vue/test-utils';
+import { last } from 'lodash';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+} from '~/feature_flags/constants';
+import Default from '~/feature_flags/components/strategies/default.vue';
+import GitlabUserList from '~/feature_flags/components/strategies/gitlab_user_list.vue';
+import PercentRollout from '~/feature_flags/components/strategies/percent_rollout.vue';
+import UsersWithId from '~/feature_flags/components/strategies/users_with_id.vue';
+import StrategyParameters from '~/feature_flags/components/strategy_parameters.vue';
+import { allUsersStrategy, userList } from '../mock_data';
+
+const DEFAULT_PROPS = {
+ strategy: allUsersStrategy,
+ userLists: [userList],
+};
+
+describe('~/feature_flags/components/strategy_parameters.vue', () => {
+ let wrapper;
+
+ const factory = (props = {}) =>
+ shallowMount(StrategyParameters, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...props,
+ },
+ });
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ describe.each`
+ name | component
+ ${ROLLOUT_STRATEGY_ALL_USERS} | ${Default}
+ ${ROLLOUT_STRATEGY_PERCENT_ROLLOUT} | ${PercentRollout}
+ ${ROLLOUT_STRATEGY_USER_ID} | ${UsersWithId}
+ ${ROLLOUT_STRATEGY_GITLAB_USER_LIST} | ${GitlabUserList}
+ `('with $name', ({ name, component }) => {
+ let strategy;
+
+ beforeEach(() => {
+ strategy = { name, parameters: {} };
+ wrapper = factory({ strategy });
+ });
+
+ it('should show the correct component', () => {
+ expect(wrapper.contains(component)).toBe(true);
+ });
+
+ it('should emit changes from the lower component', () => {
+ const strategyParameterWrapper = wrapper.find(component);
+
+ strategyParameterWrapper.vm.$emit('change', { parameters: { foo: 'bar' } });
+
+ expect(last(wrapper.emitted('change'))).toEqual([
+ {
+ name,
+ parameters: { foo: 'bar' },
+ },
+ ]);
+ });
+ });
+
+ describe('pass through props', () => {
+ it('should pass through any extra props that might be needed', () => {
+ wrapper = factory({
+ strategy: {
+ name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ },
+ });
+
+ expect(wrapper.find(GitlabUserList).props('userLists')).toEqual([userList]);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategy_spec.js b/spec/frontend/feature_flags/components/strategy_spec.js
new file mode 100644
index 00000000000..7d6700ba184
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategy_spec.js
@@ -0,0 +1,264 @@
+import { mount } from '@vue/test-utils';
+import { last } from 'lodash';
+import { GlAlert, GlFormSelect, GlLink, GlToken, GlButton } from '@gitlab/ui';
+import {
+ PERCENT_ROLLOUT_GROUP_ID,
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+} from '~/feature_flags/constants';
+import Strategy from '~/feature_flags/components/strategy.vue';
+import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
+import StrategyParameters from '~/feature_flags/components/strategy_parameters.vue';
+
+import { userList } from '../mock_data';
+
+const provide = {
+ strategyTypeDocsPagePath: 'link-to-strategy-docs',
+ environmentsScopeDocsPath: 'link-scope-docs',
+ environmentsEndpoint: '',
+};
+
+describe('Feature flags strategy', () => {
+ let wrapper;
+
+ const findStrategyParameters = () => wrapper.find(StrategyParameters);
+ const findDocsLinks = () => wrapper.findAll(GlLink);
+
+ const factory = (
+ opts = {
+ propsData: {
+ strategy: {},
+ index: 0,
+ userLists: [userList],
+ },
+ provide,
+ },
+ ) => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ wrapper = mount(Strategy, opts);
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('helper links', () => {
+ const propsData = { strategy: {}, index: 0, userLists: [userList] };
+ factory({ propsData, provide });
+
+ it('should display 2 helper links', () => {
+ const links = findDocsLinks();
+ expect(links.exists()).toBe(true);
+ expect(links.at(0).attributes('href')).toContain('docs');
+ expect(links.at(1).attributes('href')).toContain('docs');
+ });
+ });
+
+ describe.each`
+ name
+ ${ROLLOUT_STRATEGY_ALL_USERS}
+ ${ROLLOUT_STRATEGY_PERCENT_ROLLOUT}
+ ${ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT}
+ ${ROLLOUT_STRATEGY_USER_ID}
+ ${ROLLOUT_STRATEGY_GITLAB_USER_LIST}
+ `('with strategy $name', ({ name }) => {
+ let propsData;
+ let strategy;
+
+ beforeEach(() => {
+ strategy = { name, parameters: {}, scopes: [] };
+ propsData = { strategy, index: 0 };
+ factory({ propsData, provide });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should set the select to match the strategy name', () => {
+ expect(wrapper.find(GlFormSelect).element.value).toBe(name);
+ });
+
+ it('should emit a change if the parameters component does', () => {
+ findStrategyParameters().vm.$emit('change', { name, parameters: { test: 'parameters' } });
+ expect(last(wrapper.emitted('change'))).toEqual([
+ { name, parameters: { test: 'parameters' } },
+ ]);
+ });
+ });
+
+ describe('with the gradualRolloutByUserId strategy', () => {
+ let strategy;
+
+ beforeEach(() => {
+ strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: 'default' },
+ scopes: [{ environmentScope: 'production' }],
+ };
+ const propsData = { strategy, index: 0 };
+ factory({ propsData, provide });
+ });
+
+ it('shows an alert asking users to consider using flexibleRollout instead', () => {
+ expect(wrapper.find(GlAlert).text()).toContain(
+ 'Consider using the more flexible "Percent rollout" strategy instead.',
+ );
+ });
+ });
+
+ describe('with a strategy', () => {
+ describe('with a single environment scope defined', () => {
+ let strategy;
+
+ beforeEach(() => {
+ strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: 'default' },
+ scopes: [{ environmentScope: 'production' }],
+ };
+ const propsData = { strategy, index: 0 };
+ factory({ propsData, provide });
+ });
+
+ it('should revert to all-environments scope when last scope is removed', () => {
+ const token = wrapper.find(GlToken);
+ token.vm.$emit('close');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlToken)).toHaveLength(0);
+ expect(last(wrapper.emitted('change'))).toEqual([
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [{ environmentScope: '*' }],
+ },
+ ]);
+ });
+ });
+ });
+
+ describe('with an all-environments scope defined', () => {
+ let strategy;
+
+ beforeEach(() => {
+ strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [{ environmentScope: '*' }],
+ };
+ const propsData = { strategy, index: 0 };
+ factory({ propsData, provide });
+ });
+
+ it('should change the parameters if a different strategy is chosen', () => {
+ const select = wrapper.find(GlFormSelect);
+ select.setValue(ROLLOUT_STRATEGY_ALL_USERS);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(last(wrapper.emitted('change'))).toEqual([
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environmentScope: '*' }],
+ },
+ ]);
+ });
+ });
+
+ it('should display selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlToken)).toHaveLength(1);
+ expect(wrapper.find(GlToken).text()).toBe('production');
+ });
+ });
+
+ it('should display all selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ dropdown.vm.$emit('add', 'staging');
+ return wrapper.vm.$nextTick().then(() => {
+ const tokens = wrapper.findAll(GlToken);
+ expect(tokens).toHaveLength(2);
+ expect(tokens.at(0).text()).toBe('production');
+ expect(tokens.at(1).text()).toBe('staging');
+ });
+ });
+
+ it('should emit selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(last(wrapper.emitted('change'))).toEqual([
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [
+ { environmentScope: '*', shouldBeDestroyed: true },
+ { environmentScope: 'production' },
+ ],
+ },
+ ]);
+ });
+ });
+
+ it('should emit a delete if the delete button is clicked', () => {
+ wrapper.find(GlButton).vm.$emit('click');
+ expect(wrapper.emitted('delete')).toEqual([[]]);
+ });
+ });
+
+ describe('without scopes defined', () => {
+ beforeEach(() => {
+ const strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [],
+ };
+ const propsData = { strategy, index: 0 };
+ factory({ propsData, provide });
+ });
+
+ it('should display selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlToken)).toHaveLength(1);
+ expect(wrapper.find(GlToken).text()).toBe('production');
+ });
+ });
+
+ it('should display all selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ dropdown.vm.$emit('add', 'staging');
+ return wrapper.vm.$nextTick().then(() => {
+ const tokens = wrapper.findAll(GlToken);
+ expect(tokens).toHaveLength(2);
+ expect(tokens.at(0).text()).toBe('production');
+ expect(tokens.at(1).text()).toBe('staging');
+ });
+ });
+
+ it('should emit selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(last(wrapper.emitted('change'))).toEqual([
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [{ environmentScope: 'production' }],
+ },
+ ]);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/user_lists_table_spec.js b/spec/frontend/feature_flags/components/user_lists_table_spec.js
new file mode 100644
index 00000000000..d6ced3be168
--- /dev/null
+++ b/spec/frontend/feature_flags/components/user_lists_table_spec.js
@@ -0,0 +1,98 @@
+import { mount } from '@vue/test-utils';
+import * as timeago from 'timeago.js';
+import { GlModal } from '@gitlab/ui';
+import UserListsTable from '~/feature_flags/components/user_lists_table.vue';
+import { userList } from '../mock_data';
+
+jest.mock('timeago.js', () => ({
+ format: jest.fn().mockReturnValue('2 weeks ago'),
+ register: jest.fn(),
+}));
+
+describe('User Lists Table', () => {
+ let wrapper;
+ let userLists;
+
+ beforeEach(() => {
+ userLists = new Array(5).fill(userList).map((x, i) => ({ ...x, id: i }));
+ wrapper = mount(UserListsTable, {
+ propsData: { userLists },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should display the details of a user list', () => {
+ expect(wrapper.find('[data-testid="ffUserListName"]').text()).toBe(userList.name);
+ expect(wrapper.find('[data-testid="ffUserListIds"]').text()).toBe(
+ userList.user_xids.replace(/,/g, ', '),
+ );
+ expect(wrapper.find('[data-testid="ffUserListTimestamp"]').text()).toBe('created 2 weeks ago');
+ expect(timeago.format).toHaveBeenCalledWith(userList.created_at);
+ });
+
+ it('should set the title for a tooltip on the created stamp', () => {
+ expect(wrapper.find('[data-testid="ffUserListTimestamp"]').attributes('title')).toBe(
+ 'Feb 4, 2020 8:13am GMT+0000',
+ );
+ });
+
+ it('should display a user list entry per user list', () => {
+ const lists = wrapper.findAll('[data-testid="ffUserList"]');
+ expect(lists).toHaveLength(5);
+ lists.wrappers.forEach(list => {
+ expect(list.find('[data-testid="ffUserListName"]').exists()).toBe(true);
+ expect(list.find('[data-testid="ffUserListIds"]').exists()).toBe(true);
+ expect(list.find('[data-testid="ffUserListTimestamp"]').exists()).toBe(true);
+ });
+ });
+
+ describe('edit button', () => {
+ it('should link to the path for the user list', () => {
+ expect(wrapper.find('[data-testid="edit-user-list"]').attributes('href')).toBe(userList.path);
+ });
+ });
+
+ describe('delete button', () => {
+ it('should display the confirmation modal', () => {
+ const modal = wrapper.find(GlModal);
+
+ wrapper.find('[data-testid="delete-user-list"]').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(modal.text()).toContain(`Delete ${userList.name}?`);
+ expect(modal.text()).toContain(`User list ${userList.name} will be removed.`);
+ });
+ });
+ });
+
+ describe('confirmation modal', () => {
+ let modal;
+
+ beforeEach(() => {
+ modal = wrapper.find(GlModal);
+
+ wrapper.find('button').trigger('click');
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should emit delete with list on confirmation', () => {
+ modal.find('[data-testid="modal-confirm"]').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('delete')).toEqual([[userLists[0]]]);
+ });
+ });
+
+ it('should not emit delete with list when not confirmed', () => {
+ modal.find('button').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('delete')).toBeUndefined();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/mock_data.js b/spec/frontend/feature_flags/mock_data.js
new file mode 100644
index 00000000000..ed06ea059a7
--- /dev/null
+++ b/spec/frontend/feature_flags/mock_data.js
@@ -0,0 +1,155 @@
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT,
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ ROLLOUT_STRATEGY_USER_ID,
+} from '~/feature_flags/constants';
+
+export const featureFlag = {
+ id: 1,
+ active: true,
+ created_at: '2018-12-12T22:07:31.401Z',
+ updated_at: '2018-12-12T22:07:31.401Z',
+ name: 'test flag',
+ description: 'flag for tests',
+ destroy_path: 'feature_flags/1',
+ update_path: 'feature_flags/1',
+ edit_path: 'feature_flags/1/edit',
+ scopes: [
+ {
+ id: 1,
+ active: true,
+ environment_scope: '*',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ {
+ id: 2,
+ active: false,
+ environment_scope: 'production',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ {
+ id: 3,
+ active: false,
+ environment_scope: 'review/*',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ {
+ id: 4,
+ active: true,
+ environment_scope: 'development',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ percentage: '86',
+ },
+ },
+ ],
+ },
+ {
+ id: 5,
+ active: true,
+ environment_scope: 'development',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT,
+ parameters: {
+ rollout: '42',
+ stickiness: 'DEFAULT',
+ },
+ },
+ ],
+ },
+ ],
+};
+
+export const getRequestData = {
+ feature_flags: [featureFlag],
+ count: {
+ all: 1,
+ disabled: 1,
+ enabled: 0,
+ },
+};
+
+export const rotateData = { token: 'oP6sCNRqtRHmpy1gw2-F' };
+
+export const userList = {
+ name: 'test_users',
+ user_xids: 'user3,user4,user5',
+ id: 2,
+ iid: 2,
+ project_id: 1,
+ created_at: '2020-02-04T08:13:10.507Z',
+ updated_at: '2020-02-04T08:13:10.507Z',
+ path: '/path/to/user/list',
+ edit_path: '/path/to/user/list/edit',
+};
+
+export const userListStrategy = {
+ name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ parameters: {},
+ scopes: [],
+ userListId: userList.id,
+};
+
+export const percentRolloutStrategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: 'default' },
+ scopes: [],
+};
+
+export const flexibleRolloutStrategy = {
+ name: ROLLOUT_STRATEGY_FLEXIBLE_ROLLOUT,
+ parameters: { rollout: '50', groupId: 'default', stickiness: 'DEFAULT' },
+ scopes: [],
+};
+
+export const usersWithIdStrategy = {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: { userIds: '1,2,3' },
+ scopes: [],
+};
+
+export const allUsersStrategy = {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [],
+};
diff --git a/spec/frontend/feature_flags/store/edit/actions_spec.js b/spec/frontend/feature_flags/store/edit/actions_spec.js
new file mode 100644
index 00000000000..9d764799d09
--- /dev/null
+++ b/spec/frontend/feature_flags/store/edit/actions_spec.js
@@ -0,0 +1,303 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import {
+ updateFeatureFlag,
+ requestUpdateFeatureFlag,
+ receiveUpdateFeatureFlagSuccess,
+ receiveUpdateFeatureFlagError,
+ fetchFeatureFlag,
+ requestFeatureFlag,
+ receiveFeatureFlagSuccess,
+ receiveFeatureFlagError,
+ toggleActive,
+} from '~/feature_flags/store/edit/actions';
+import state from '~/feature_flags/store/edit/state';
+import { mapStrategiesToRails, mapFromScopesViewModel } from '~/feature_flags/store/helpers';
+import {
+ NEW_VERSION_FLAG,
+ LEGACY_FLAG,
+ ROLLOUT_STRATEGY_ALL_USERS,
+} from '~/feature_flags/constants';
+import * as types from '~/feature_flags/store/edit/mutation_types';
+import axios from '~/lib/utils/axios_utils';
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('Feature flags Edit Module actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state({ endpoint: 'feature_flags.json', path: '/feature_flags' });
+ });
+
+ describe('updateFeatureFlag', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagSuccess ', done => {
+ const featureFlag = {
+ name: 'feature_flag',
+ description: 'feature flag',
+ scopes: [
+ {
+ id: '1',
+ environmentScope: '*',
+ active: true,
+ shouldBeDestroyed: false,
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ },
+ ],
+ version: LEGACY_FLAG,
+ active: true,
+ };
+ mock.onPut(mockedState.endpoint, mapFromScopesViewModel(featureFlag)).replyOnce(200);
+
+ testAction(
+ updateFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUpdateFeatureFlag',
+ },
+ {
+ type: 'receiveUpdateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ it('handles new version flags as well', done => {
+ const featureFlag = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: NEW_VERSION_FLAG,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ id: 1,
+ scopes: [{ id: 1, environmentScope: 'environmentScope', shouldBeDestroyed: false }],
+ shouldBeDestroyed: false,
+ },
+ ],
+ };
+ mock.onPut(mockedState.endpoint, mapStrategiesToRails(featureFlag)).replyOnce(200);
+
+ testAction(
+ updateFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUpdateFeatureFlag',
+ },
+ {
+ type: 'receiveUpdateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagError ', done => {
+ mock.onPut(`${TEST_HOST}/endpoint.json`).replyOnce(500, { message: [] });
+
+ testAction(
+ updateFeatureFlag,
+ {
+ name: 'feature_flag',
+ description: 'feature flag',
+ scopes: [{ environment_scope: '*', active: true }],
+ },
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUpdateFeatureFlag',
+ },
+ {
+ type: 'receiveUpdateFeatureFlagError',
+ payload: { message: [] },
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestUpdateFeatureFlag', () => {
+ it('should commit REQUEST_UPDATE_FEATURE_FLAG mutation', done => {
+ testAction(
+ requestUpdateFeatureFlag,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_UPDATE_FEATURE_FLAG }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUpdateFeatureFlagSuccess', () => {
+ it('should commit RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS mutation', done => {
+ testAction(
+ receiveUpdateFeatureFlagSuccess,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUpdateFeatureFlagError', () => {
+ it('should commit RECEIVE_UPDATE_FEATURE_FLAG_ERROR mutation', done => {
+ testAction(
+ receiveUpdateFeatureFlagError,
+ 'There was an error',
+ mockedState,
+ [{ type: types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR, payload: 'There was an error' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchFeatureFlag', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestFeatureFlag and receiveFeatureFlagSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { id: 1 });
+
+ testAction(
+ fetchFeatureFlag,
+ { id: 1 },
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlag',
+ },
+ {
+ type: 'receiveFeatureFlagSuccess',
+ payload: { id: 1 },
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestFeatureFlag and receiveUpdateFeatureFlagError ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
+
+ testAction(
+ fetchFeatureFlag,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlag',
+ },
+ {
+ type: 'receiveFeatureFlagError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestFeatureFlag', () => {
+ it('should commit REQUEST_FEATURE_FLAG mutation', done => {
+ testAction(
+ requestFeatureFlag,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_FEATURE_FLAG }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagSuccess', () => {
+ it('should commit RECEIVE_FEATURE_FLAG_SUCCESS mutation', done => {
+ testAction(
+ receiveFeatureFlagSuccess,
+ { id: 1 },
+ mockedState,
+ [{ type: types.RECEIVE_FEATURE_FLAG_SUCCESS, payload: { id: 1 } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagError', () => {
+ it('should commit RECEIVE_FEATURE_FLAG_ERROR mutation', done => {
+ testAction(
+ receiveFeatureFlagError,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_FEATURE_FLAG_ERROR,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggelActive', () => {
+ it('should commit TOGGLE_ACTIVE mutation', done => {
+ testAction(
+ toggleActive,
+ true,
+ mockedState,
+ [{ type: types.TOGGLE_ACTIVE, payload: true }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/edit/mutations_spec.js b/spec/frontend/feature_flags/store/edit/mutations_spec.js
new file mode 100644
index 00000000000..1d817fb8004
--- /dev/null
+++ b/spec/frontend/feature_flags/store/edit/mutations_spec.js
@@ -0,0 +1,134 @@
+import state from '~/feature_flags/store/edit/state';
+import mutations from '~/feature_flags/store/edit/mutations';
+import * as types from '~/feature_flags/store/edit/mutation_types';
+
+describe('Feature flags Edit Module Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state({ endpoint: 'feature_flags.json', path: '/feature_flags' });
+ });
+
+ describe('REQUEST_FEATURE_FLAG', () => {
+ it('should set isLoading to true', () => {
+ mutations[types.REQUEST_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.isLoading).toEqual(true);
+ });
+
+ it('should set error to an empty array', () => {
+ mutations[types.REQUEST_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.error).toEqual([]);
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAG_SUCCESS', () => {
+ const data = {
+ name: '*',
+ description: 'All environments',
+ scopes: [{ id: 1 }],
+ iid: 5,
+ version: 'new_version_flag',
+ strategies: [
+ { id: 1, scopes: [{ environment_scope: '*' }], name: 'default', parameters: {} },
+ ],
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAG_SUCCESS](stateCopy, data);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to false', () => {
+ expect(stateCopy.hasError).toEqual(false);
+ });
+
+ it('should set name with the provided one', () => {
+ expect(stateCopy.name).toEqual(data.name);
+ });
+
+ it('should set description with the provided one', () => {
+ expect(stateCopy.description).toEqual(data.description);
+ });
+
+ it('should set scope with the provided one', () => {
+ expect(stateCopy.scope).toEqual(data.scope);
+ });
+
+ it('should set the iid to the provided one', () => {
+ expect(stateCopy.iid).toEqual(data.iid);
+ });
+
+ it('should set the version to the provided one', () => {
+ expect(stateCopy.version).toBe('new_version_flag');
+ });
+
+ it('should set the strategies to the provided one', () => {
+ expect(stateCopy.strategies).toEqual([
+ {
+ id: 1,
+ scopes: [{ environmentScope: '*', shouldBeDestroyed: false }],
+ name: 'default',
+ parameters: {},
+ shouldBeDestroyed: false,
+ },
+ ]);
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAG_ERROR](stateCopy);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_UPDATE_FEATURE_FLAG', () => {
+ beforeEach(() => {
+ mutations[types.REQUEST_UPDATE_FEATURE_FLAG](stateCopy);
+ });
+
+ it('should set isSendingRequest to true', () => {
+ expect(stateCopy.isSendingRequest).toEqual(true);
+ });
+
+ it('should set error to an empty array', () => {
+ expect(stateCopy.error).toEqual([]);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS', () => {
+ it('should set isSendingRequest to false', () => {
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS](stateCopy);
+
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR](stateCopy, {
+ message: ['Name is required'],
+ });
+ });
+
+ it('should set isSendingRequest to false', () => {
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+
+ it('should set error to the given message', () => {
+ expect(stateCopy.error).toEqual(['Name is required']);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/helpers_spec.js b/spec/frontend/feature_flags/store/helpers_spec.js
new file mode 100644
index 00000000000..301b1d09fcc
--- /dev/null
+++ b/spec/frontend/feature_flags/store/helpers_spec.js
@@ -0,0 +1,514 @@
+import { uniqueId } from 'lodash';
+import {
+ mapToScopesViewModel,
+ mapFromScopesViewModel,
+ createNewEnvironmentScope,
+ mapStrategiesToViewModel,
+ mapStrategiesToRails,
+} from '~/feature_flags/store/helpers';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ PERCENT_ROLLOUT_GROUP_ID,
+ INTERNAL_ID_PREFIX,
+ DEFAULT_PERCENT_ROLLOUT,
+ LEGACY_FLAG,
+ NEW_VERSION_FLAG,
+} from '~/feature_flags/constants';
+
+describe('feature flags helpers spec', () => {
+ describe('mapToScopesViewModel', () => {
+ it('converts the data object from the Rails API into something more usable by Vue', () => {
+ const input = [
+ {
+ id: 3,
+ environment_scope: 'environment_scope',
+ active: true,
+ can_update: true,
+ protected: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ percentage: '56',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+
+ _destroy: true,
+ },
+ ];
+
+ const expected = [
+ expect.objectContaining({
+ id: 3,
+ environmentScope: 'environment_scope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '56',
+ rolloutUserIds: '123, 234',
+ shouldBeDestroyed: true,
+ }),
+ ];
+
+ const actual = mapToScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('returns Boolean properties even when their Rails counterparts were not provided (are `undefined`)', () => {
+ const input = [
+ {
+ id: 3,
+ environment_scope: 'environment_scope',
+ },
+ ];
+
+ const [result] = mapToScopesViewModel(input);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ active: false,
+ canUpdate: false,
+ protected: false,
+ shouldBeDestroyed: false,
+ }),
+ );
+ });
+
+ it('returns an empty array if null or undefined is provided as a parameter', () => {
+ expect(mapToScopesViewModel(null)).toEqual([]);
+ expect(mapToScopesViewModel(undefined)).toEqual([]);
+ });
+
+ describe('with user IDs per environment', () => {
+ let oldGon;
+
+ beforeEach(() => {
+ oldGon = window.gon;
+ window.gon = { features: { featureFlagsUsersPerEnvironment: true } };
+ });
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
+
+ it('sets the user IDs as a comma separated string', () => {
+ const input = [
+ {
+ id: 3,
+ environment_scope: 'environment_scope',
+ active: true,
+ can_update: true,
+ protected: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ percentage: '56',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+
+ _destroy: true,
+ },
+ ];
+
+ const expected = [
+ {
+ id: 3,
+ environmentScope: 'environment_scope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '56',
+ rolloutUserIds: '123, 234',
+ shouldBeDestroyed: true,
+ shouldIncludeUserIds: true,
+ },
+ ];
+
+ const actual = mapToScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+ });
+ });
+
+ describe('mapFromScopesViewModel', () => {
+ it('converts the object emitted from the Vue component into an object than is in the right format to be submitted to the Rails API', () => {
+ const input = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ scopes: [
+ {
+ id: 4,
+ environmentScope: 'environmentScope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ shouldBeDestroyed: true,
+ shouldIncludeUserIds: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '48',
+ rolloutUserIds: '123, 234',
+ },
+ ],
+ };
+
+ const expected = {
+ operations_feature_flag: {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: LEGACY_FLAG,
+ scopes_attributes: [
+ {
+ id: 4,
+ environment_scope: 'environmentScope',
+ active: true,
+ can_update: true,
+ protected: true,
+ _destroy: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ groupId: PERCENT_ROLLOUT_GROUP_ID,
+ percentage: '48',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+ },
+ ],
+ },
+ };
+
+ const actual = mapFromScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('should strip out internal IDs', () => {
+ const input = {
+ scopes: [{ id: 3 }, { id: uniqueId(INTERNAL_ID_PREFIX) }],
+ };
+
+ const result = mapFromScopesViewModel(input);
+ const [realId, internalId] = result.operations_feature_flag.scopes_attributes;
+
+ expect(realId.id).toBe(3);
+ expect(internalId.id).toBeUndefined();
+ });
+
+ it('returns scopes_attributes as [] if param.scopes is null or undefined', () => {
+ let {
+ operations_feature_flag: { scopes_attributes: actualScopes },
+ } = mapFromScopesViewModel({ scopes: null });
+
+ expect(actualScopes).toEqual([]);
+
+ ({
+ operations_feature_flag: { scopes_attributes: actualScopes },
+ } = mapFromScopesViewModel({ scopes: undefined }));
+
+ expect(actualScopes).toEqual([]);
+ });
+ describe('with user IDs per environment', () => {
+ it('sets the user IDs as a comma separated string', () => {
+ const input = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ scopes: [
+ {
+ id: 4,
+ environmentScope: 'environmentScope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ shouldBeDestroyed: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '48',
+ rolloutUserIds: '123, 234',
+ shouldIncludeUserIds: true,
+ },
+ ],
+ };
+
+ const expected = {
+ operations_feature_flag: {
+ name: 'name',
+ description: 'description',
+ version: LEGACY_FLAG,
+ active: true,
+ scopes_attributes: [
+ {
+ id: 4,
+ environment_scope: 'environmentScope',
+ active: true,
+ can_update: true,
+ protected: true,
+ _destroy: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ groupId: PERCENT_ROLLOUT_GROUP_ID,
+ percentage: '48',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+ },
+ ],
+ },
+ };
+
+ const actual = mapFromScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+ });
+ });
+
+ describe('createNewEnvironmentScope', () => {
+ it('should return a new environment scope object populated with the default options', () => {
+ const expected = {
+ environmentScope: '',
+ active: false,
+ id: expect.stringContaining(INTERNAL_ID_PREFIX),
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ };
+
+ const actual = createNewEnvironmentScope();
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('should return a new environment scope object with overrides applied', () => {
+ const overrides = {
+ environmentScope: 'environmentScope',
+ active: true,
+ };
+
+ const expected = {
+ environmentScope: 'environmentScope',
+ active: true,
+ id: expect.stringContaining(INTERNAL_ID_PREFIX),
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ };
+
+ const actual = createNewEnvironmentScope(overrides);
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('sets canUpdate and protected when called with featureFlagPermissions=true', () => {
+ expect(createNewEnvironmentScope({}, true)).toEqual(
+ expect.objectContaining({
+ canUpdate: true,
+ protected: false,
+ }),
+ );
+ });
+ });
+
+ describe('mapStrategiesToViewModel', () => {
+ it('should map rails casing to view model casing', () => {
+ expect(
+ mapStrategiesToViewModel([
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ scopes: [
+ {
+ environment_scope: '*',
+ id: '1',
+ },
+ ],
+ },
+ ]),
+ ).toEqual([
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ shouldBeDestroyed: false,
+ scopes: [
+ {
+ shouldBeDestroyed: false,
+ environmentScope: '*',
+ id: '1',
+ },
+ ],
+ },
+ ]);
+ });
+
+ it('inserts spaces between user ids', () => {
+ const strategy = mapStrategiesToViewModel([
+ {
+ id: '1',
+ name: 'userWithId',
+ parameters: { userIds: 'user1,user2,user3' },
+ scopes: [],
+ },
+ ])[0];
+
+ expect(strategy.parameters).toEqual({ userIds: 'user1, user2, user3' });
+ });
+ });
+
+ describe('mapStrategiesToRails', () => {
+ it('should map rails casing to view model casing', () => {
+ expect(
+ mapStrategiesToRails({
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ shouldBeDestroyed: true,
+ scopes: [
+ {
+ environmentScope: '*',
+ id: '1',
+ shouldBeDestroyed: true,
+ },
+ ],
+ },
+ ],
+ }),
+ ).toEqual({
+ operations_feature_flag: {
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies_attributes: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ _destroy: true,
+ scopes_attributes: [
+ {
+ environment_scope: '*',
+ id: '1',
+ _destroy: true,
+ },
+ ],
+ },
+ ],
+ },
+ });
+ });
+
+ it('should insert a default * scope if there are none', () => {
+ expect(
+ mapStrategiesToRails({
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ scopes: [],
+ },
+ ],
+ }),
+ ).toEqual({
+ operations_feature_flag: {
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies_attributes: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ scopes_attributes: [
+ {
+ environment_scope: '*',
+ },
+ ],
+ },
+ ],
+ },
+ });
+ });
+
+ it('removes white space between user ids', () => {
+ const result = mapStrategiesToRails({
+ name: 'test',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies: [
+ {
+ id: '1',
+ name: 'userWithId',
+ parameters: { userIds: 'user1, user2, user3' },
+ scopes: [],
+ },
+ ],
+ });
+
+ const strategyAttrs = result.operations_feature_flag.strategies_attributes[0];
+
+ expect(strategyAttrs.parameters).toEqual({ userIds: 'user1,user2,user3' });
+ });
+
+ it('preserves the value of active', () => {
+ const result = mapStrategiesToRails({
+ name: 'test',
+ version: NEW_VERSION_FLAG,
+ active: false,
+ strategies: [],
+ });
+
+ expect(result.operations_feature_flag.active).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/index/actions_spec.js b/spec/frontend/feature_flags/store/index/actions_spec.js
new file mode 100644
index 00000000000..d223bb2c292
--- /dev/null
+++ b/spec/frontend/feature_flags/store/index/actions_spec.js
@@ -0,0 +1,563 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import Api from '~/api';
+import {
+ requestFeatureFlags,
+ receiveFeatureFlagsSuccess,
+ receiveFeatureFlagsError,
+ fetchFeatureFlags,
+ setFeatureFlagsOptions,
+ rotateInstanceId,
+ requestRotateInstanceId,
+ receiveRotateInstanceIdSuccess,
+ receiveRotateInstanceIdError,
+ toggleFeatureFlag,
+ updateFeatureFlag,
+ receiveUpdateFeatureFlagSuccess,
+ receiveUpdateFeatureFlagError,
+ requestUserLists,
+ receiveUserListsSuccess,
+ receiveUserListsError,
+ fetchUserLists,
+ deleteUserList,
+ receiveDeleteUserListError,
+ clearAlert,
+} from '~/feature_flags/store/index/actions';
+import { mapToScopesViewModel } from '~/feature_flags/store/helpers';
+import state from '~/feature_flags/store/index/state';
+import * as types from '~/feature_flags/store/index/mutation_types';
+import axios from '~/lib/utils/axios_utils';
+import { getRequestData, rotateData, featureFlag, userList } from '../../mock_data';
+
+jest.mock('~/api.js');
+
+describe('Feature flags actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state({});
+ });
+
+ describe('setFeatureFlagsOptions', () => {
+ it('should commit SET_FEATURE_FLAGS_OPTIONS mutation', done => {
+ testAction(
+ setFeatureFlagsOptions,
+ { page: '1', scope: 'all' },
+ mockedState,
+ [{ type: types.SET_FEATURE_FLAGS_OPTIONS, payload: { page: '1', scope: 'all' } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchFeatureFlags', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestFeatureFlags and receiveFeatureFlagsSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, getRequestData, {});
+
+ testAction(
+ fetchFeatureFlags,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlags',
+ },
+ {
+ payload: { data: getRequestData, headers: {} },
+ type: 'receiveFeatureFlagsSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestFeatureFlags and receiveFeatureFlagsError ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
+
+ testAction(
+ fetchFeatureFlags,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlags',
+ },
+ {
+ type: 'receiveFeatureFlagsError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestFeatureFlags', () => {
+ it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', done => {
+ testAction(
+ requestFeatureFlags,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_FEATURE_FLAGS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagsSuccess', () => {
+ it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', done => {
+ testAction(
+ receiveFeatureFlagsSuccess,
+ { data: getRequestData, headers: {} },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_FEATURE_FLAGS_SUCCESS,
+ payload: { data: getRequestData, headers: {} },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagsError', () => {
+ it('should commit RECEIVE_FEATURE_FLAGS_ERROR mutation', done => {
+ testAction(
+ receiveFeatureFlagsError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_FEATURE_FLAGS_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchUserLists', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserLists.mockResolvedValue({ data: [userList], headers: {} });
+ });
+
+ describe('success', () => {
+ it('dispatches requestUserLists and receiveUserListsSuccess ', done => {
+ testAction(
+ fetchUserLists,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUserLists',
+ },
+ {
+ payload: { data: [userList], headers: {} },
+ type: 'receiveUserListsSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestUserLists and receiveUserListsError ', done => {
+ Api.fetchFeatureFlagUserLists.mockRejectedValue();
+
+ testAction(
+ fetchUserLists,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUserLists',
+ },
+ {
+ type: 'receiveUserListsError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestUserLists', () => {
+ it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', done => {
+ testAction(
+ requestUserLists,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_USER_LISTS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUserListsSuccess', () => {
+ it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', done => {
+ testAction(
+ receiveUserListsSuccess,
+ { data: [userList], headers: {} },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_USER_LISTS_SUCCESS,
+ payload: { data: [userList], headers: {} },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUserListsError', () => {
+ it('should commit RECEIVE_USER_LISTS_ERROR mutation', done => {
+ testAction(
+ receiveUserListsError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_USER_LISTS_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('rotateInstanceId', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.rotateEndpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestRotateInstanceId and receiveRotateInstanceIdSuccess ', done => {
+ mock.onPost(`${TEST_HOST}/endpoint.json`).replyOnce(200, rotateData, {});
+
+ testAction(
+ rotateInstanceId,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestRotateInstanceId',
+ },
+ {
+ payload: { data: rotateData, headers: {} },
+ type: 'receiveRotateInstanceIdSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestRotateInstanceId and receiveRotateInstanceIdError ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
+
+ testAction(
+ rotateInstanceId,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestRotateInstanceId',
+ },
+ {
+ type: 'receiveRotateInstanceIdError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestRotateInstanceId', () => {
+ it('should commit REQUEST_ROTATE_INSTANCE_ID mutation', done => {
+ testAction(
+ requestRotateInstanceId,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_ROTATE_INSTANCE_ID }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveRotateInstanceIdSuccess', () => {
+ it('should commit RECEIVE_ROTATE_INSTANCE_ID_SUCCESS mutation', done => {
+ testAction(
+ receiveRotateInstanceIdSuccess,
+ { data: rotateData, headers: {} },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_ROTATE_INSTANCE_ID_SUCCESS,
+ payload: { data: rotateData, headers: {} },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveRotateInstanceIdError', () => {
+ it('should commit RECEIVE_ROTATE_INSTANCE_ID_ERROR mutation', done => {
+ testAction(
+ receiveRotateInstanceIdError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_ROTATE_INSTANCE_ID_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleFeatureFlag', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+ describe('success', () => {
+ it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', done => {
+ mock.onPut(featureFlag.update_path).replyOnce(200, featureFlag, {});
+
+ testAction(
+ toggleFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'updateFeatureFlag',
+ payload: featureFlag,
+ },
+ {
+ payload: featureFlag,
+ type: 'receiveUpdateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ describe('error', () => {
+ it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', done => {
+ mock.onPut(featureFlag.update_path).replyOnce(500);
+
+ testAction(
+ toggleFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'updateFeatureFlag',
+ payload: featureFlag,
+ },
+ {
+ payload: featureFlag.id,
+ type: 'receiveUpdateFeatureFlagError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+ describe('updateFeatureFlag', () => {
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ ...f,
+ scopes: mapToScopesViewModel(f.scopes || []),
+ }));
+ });
+
+ it('commits UPDATE_FEATURE_FLAG with the given flag', done => {
+ testAction(
+ updateFeatureFlag,
+ featureFlag,
+ mockedState,
+ [
+ {
+ type: 'UPDATE_FEATURE_FLAG',
+ payload: featureFlag,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('receiveUpdateFeatureFlagSuccess', () => {
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ ...f,
+ scopes: mapToScopesViewModel(f.scopes || []),
+ }));
+ });
+
+ it('commits RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS with the given flag', done => {
+ testAction(
+ receiveUpdateFeatureFlagSuccess,
+ featureFlag,
+ mockedState,
+ [
+ {
+ type: 'RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS',
+ payload: featureFlag,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('receiveUpdateFeatureFlagError', () => {
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ ...f,
+ scopes: mapToScopesViewModel(f.scopes || []),
+ }));
+ });
+
+ it('commits RECEIVE_UPDATE_FEATURE_FLAG_ERROR with the given flag id', done => {
+ testAction(
+ receiveUpdateFeatureFlagError,
+ featureFlag.id,
+ mockedState,
+ [
+ {
+ type: 'RECEIVE_UPDATE_FEATURE_FLAG_ERROR',
+ payload: featureFlag.id,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('deleteUserList', () => {
+ beforeEach(() => {
+ mockedState.userLists = [userList];
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ Api.deleteFeatureFlagUserList.mockResolvedValue();
+ });
+
+ it('should refresh the user lists', done => {
+ testAction(
+ deleteUserList,
+ userList,
+ mockedState,
+ [],
+ [{ type: 'requestDeleteUserList', payload: userList }, { type: 'fetchUserLists' }],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ Api.deleteFeatureFlagUserList.mockRejectedValue({ response: { data: 'some error' } });
+ });
+
+ it('should dispatch receiveDeleteUserListError', done => {
+ testAction(
+ deleteUserList,
+ userList,
+ mockedState,
+ [],
+ [
+ { type: 'requestDeleteUserList', payload: userList },
+ {
+ type: 'receiveDeleteUserListError',
+ payload: { list: userList, error: 'some error' },
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveDeleteUserListError', () => {
+ it('should commit RECEIVE_DELETE_USER_LIST_ERROR with the given list', done => {
+ testAction(
+ receiveDeleteUserListError,
+ { list: userList, error: 'mock error' },
+ mockedState,
+ [
+ {
+ type: 'RECEIVE_DELETE_USER_LIST_ERROR',
+ payload: { list: userList, error: 'mock error' },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('clearAlert', () => {
+ it('should commit RECEIVE_CLEAR_ALERT', done => {
+ const alertIndex = 3;
+
+ testAction(
+ clearAlert,
+ alertIndex,
+ mockedState,
+ [{ type: 'RECEIVE_CLEAR_ALERT', payload: alertIndex }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/index/mutations_spec.js b/spec/frontend/feature_flags/store/index/mutations_spec.js
new file mode 100644
index 00000000000..376c7b069fa
--- /dev/null
+++ b/spec/frontend/feature_flags/store/index/mutations_spec.js
@@ -0,0 +1,307 @@
+import state from '~/feature_flags/store/index/state';
+import mutations from '~/feature_flags/store/index/mutations';
+import * as types from '~/feature_flags/store/index/mutation_types';
+import { mapToScopesViewModel } from '~/feature_flags/store/helpers';
+import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
+import { getRequestData, rotateData, featureFlag, userList } from '../../mock_data';
+
+describe('Feature flags store Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state({});
+ });
+
+ describe('SET_FEATURE_FLAGS_OPTIONS', () => {
+ it('should set provided options', () => {
+ mutations[types.SET_FEATURE_FLAGS_OPTIONS](stateCopy, { page: '1', scope: 'all' });
+
+ expect(stateCopy.options).toEqual({ page: '1', scope: 'all' });
+ });
+ });
+ describe('REQUEST_FEATURE_FLAGS', () => {
+ it('should set isLoading to true', () => {
+ mutations[types.REQUEST_FEATURE_FLAGS](stateCopy);
+
+ expect(stateCopy.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAGS_SUCCESS', () => {
+ const headers = {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAGS_SUCCESS](stateCopy, { data: getRequestData, headers });
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to false', () => {
+ expect(stateCopy.hasError).toEqual(false);
+ });
+
+ it('should set featureFlags with the transformed data', () => {
+ const expected = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+
+ expect(stateCopy.featureFlags).toEqual(expected);
+ });
+
+ it('should set count with the given data', () => {
+ expect(stateCopy.count.featureFlags).toEqual(37);
+ });
+
+ it('should set pagination', () => {
+ expect(stateCopy.pageInfo.featureFlags).toEqual(
+ parseIntPagination(normalizeHeaders(headers)),
+ );
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAGS_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAGS_ERROR](stateCopy);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_USER_LISTS', () => {
+ it('sets isLoading to true', () => {
+ mutations[types.REQUEST_USER_LISTS](stateCopy);
+ expect(stateCopy.isLoading).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_USER_LISTS_SUCCESS', () => {
+ const headers = {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LISTS_SUCCESS](stateCopy, { data: [userList], headers });
+ });
+
+ it('sets isLoading to false', () => {
+ expect(stateCopy.isLoading).toBe(false);
+ });
+
+ it('sets userLists to the received userLists', () => {
+ expect(stateCopy.userLists).toEqual([userList]);
+ });
+
+ it('sets pagination info for user lits', () => {
+ expect(stateCopy.pageInfo.userLists).toEqual(parseIntPagination(normalizeHeaders(headers)));
+ });
+
+ it('sets the count for user lists', () => {
+ expect(stateCopy.count.userLists).toBe(parseInt(headers['X-TOTAL'], 10));
+ });
+ });
+
+ describe('RECEIVE_USER_LISTS_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LISTS_ERROR](stateCopy);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_ROTATE_INSTANCE_ID', () => {
+ beforeEach(() => {
+ mutations[types.REQUEST_ROTATE_INSTANCE_ID](stateCopy);
+ });
+
+ it('should set isRotating to true', () => {
+ expect(stateCopy.isRotating).toBe(true);
+ });
+
+ it('should set hasRotateError to false', () => {
+ expect(stateCopy.hasRotateError).toBe(false);
+ });
+ });
+
+ describe('RECEIVE_ROTATE_INSTANCE_ID_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_ROTATE_INSTANCE_ID_SUCCESS](stateCopy, { data: rotateData });
+ });
+
+ it('should set the instance id to the received data', () => {
+ expect(stateCopy.instanceId).toBe(rotateData.token);
+ });
+
+ it('should set isRotating to false', () => {
+ expect(stateCopy.isRotating).toBe(false);
+ });
+
+ it('should set hasRotateError to false', () => {
+ expect(stateCopy.hasRotateError).toBe(false);
+ });
+ });
+
+ describe('RECEIVE_ROTATE_INSTANCE_ID_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_ROTATE_INSTANCE_ID_ERROR](stateCopy);
+ });
+
+ it('should set isRotating to false', () => {
+ expect(stateCopy.isRotating).toBe(false);
+ });
+
+ it('should set hasRotateError to true', () => {
+ expect(stateCopy.hasRotateError).toBe(true);
+ });
+ });
+
+ describe('UPDATE_FEATURE_FLAG', () => {
+ beforeEach(() => {
+ stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ stateCopy.count = { featureFlags: 1, userLists: 0 };
+
+ mutations[types.UPDATE_FEATURE_FLAG](stateCopy, {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ });
+ });
+
+ it('should update the flag with the matching ID', () => {
+ expect(stateCopy.featureFlags).toEqual([
+ {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ },
+ ]);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS', () => {
+ const runUpdate = (stateCount, flagState, featureFlagUpdateParams) => {
+ stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ ...flagState,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ stateCopy.count.featureFlags = stateCount;
+
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS](stateCopy, {
+ ...featureFlag,
+ ...featureFlagUpdateParams,
+ });
+ };
+
+ it('updates the flag with the matching ID', () => {
+ runUpdate({ all: 1, enabled: 1, disabled: 0 }, { active: true }, { active: false });
+
+ expect(stateCopy.featureFlags).toEqual([
+ {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ },
+ ]);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ stateCopy.count = { enabled: 1, disabled: 0 };
+
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR](stateCopy, featureFlag.id);
+ });
+
+ it('should update the flag with the matching ID, toggling active', () => {
+ expect(stateCopy.featureFlags).toEqual([
+ {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ },
+ ]);
+ });
+ });
+
+ describe('REQUEST_DELETE_USER_LIST', () => {
+ beforeEach(() => {
+ stateCopy.userLists = [userList];
+ mutations[types.REQUEST_DELETE_USER_LIST](stateCopy, userList);
+ });
+
+ it('should remove the deleted list', () => {
+ expect(stateCopy.userLists).not.toContain(userList);
+ });
+ });
+
+ describe('RECEIVE_DELETE_USER_LIST_ERROR', () => {
+ beforeEach(() => {
+ stateCopy.userLists = [];
+ mutations[types.RECEIVE_DELETE_USER_LIST_ERROR](stateCopy, {
+ list: userList,
+ error: 'some error',
+ });
+ });
+
+ it('should set isLoading to false and hasError to false', () => {
+ expect(stateCopy.isLoading).toBe(false);
+ expect(stateCopy.hasError).toBe(false);
+ });
+
+ it('should add the user list back to the list of user lists', () => {
+ expect(stateCopy.userLists).toContain(userList);
+ });
+ });
+
+ describe('RECEIVE_CLEAR_ALERT', () => {
+ it('clears the alert', () => {
+ stateCopy.alerts = ['a server error'];
+
+ mutations[types.RECEIVE_CLEAR_ALERT](stateCopy, 0);
+
+ expect(stateCopy.alerts).toEqual([]);
+ });
+
+ it('clears the alert at the specified index', () => {
+ stateCopy.alerts = ['a server error', 'another error', 'final error'];
+
+ mutations[types.RECEIVE_CLEAR_ALERT](stateCopy, 1);
+
+ expect(stateCopy.alerts).toEqual(['a server error', 'final error']);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/new/actions_spec.js b/spec/frontend/feature_flags/store/new/actions_spec.js
new file mode 100644
index 00000000000..130c5235aa0
--- /dev/null
+++ b/spec/frontend/feature_flags/store/new/actions_spec.js
@@ -0,0 +1,192 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import {
+ createFeatureFlag,
+ requestCreateFeatureFlag,
+ receiveCreateFeatureFlagSuccess,
+ receiveCreateFeatureFlagError,
+} from '~/feature_flags/store/new/actions';
+import state from '~/feature_flags/store/new/state';
+import * as types from '~/feature_flags/store/new/mutation_types';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ LEGACY_FLAG,
+ NEW_VERSION_FLAG,
+} from '~/feature_flags/constants';
+import { mapFromScopesViewModel, mapStrategiesToRails } from '~/feature_flags/store/helpers';
+import axios from '~/lib/utils/axios_utils';
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('Feature flags New Module Actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state({ endpoint: 'feature_flags.json', path: '/feature_flags' });
+ });
+
+ describe('createFeatureFlag', () => {
+ let mock;
+
+ const actionParams = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: LEGACY_FLAG,
+ scopes: [
+ {
+ id: 1,
+ environmentScope: 'environmentScope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ shouldBeDestroyed: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ },
+ ],
+ };
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagSuccess ', done => {
+ const convertedActionParams = mapFromScopesViewModel(actionParams);
+
+ mock.onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams).replyOnce(200);
+
+ testAction(
+ createFeatureFlag,
+ actionParams,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestCreateFeatureFlag',
+ },
+ {
+ type: 'receiveCreateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+
+ it('sends strategies for new style feature flags', done => {
+ const newVersionFlagParams = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: NEW_VERSION_FLAG,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ id: 1,
+ scopes: [{ id: 1, environmentScope: 'environmentScope', shouldBeDestroyed: false }],
+ shouldBeDestroyed: false,
+ },
+ ],
+ };
+ mock
+ .onPost(`${TEST_HOST}/endpoint.json`, mapStrategiesToRails(newVersionFlagParams))
+ .replyOnce(200);
+
+ testAction(
+ createFeatureFlag,
+ newVersionFlagParams,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestCreateFeatureFlag',
+ },
+ {
+ type: 'receiveCreateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagError ', done => {
+ const convertedActionParams = mapFromScopesViewModel(actionParams);
+
+ mock
+ .onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams)
+ .replyOnce(500, { message: [] });
+
+ testAction(
+ createFeatureFlag,
+ actionParams,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestCreateFeatureFlag',
+ },
+ {
+ type: 'receiveCreateFeatureFlagError',
+ payload: { message: [] },
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestCreateFeatureFlag', () => {
+ it('should commit REQUEST_CREATE_FEATURE_FLAG mutation', done => {
+ testAction(
+ requestCreateFeatureFlag,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_CREATE_FEATURE_FLAG }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveCreateFeatureFlagSuccess', () => {
+ it('should commit RECEIVE_CREATE_FEATURE_FLAG_SUCCESS mutation', done => {
+ testAction(
+ receiveCreateFeatureFlagSuccess,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_CREATE_FEATURE_FLAG_SUCCESS,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveCreateFeatureFlagError', () => {
+ it('should commit RECEIVE_CREATE_FEATURE_FLAG_ERROR mutation', done => {
+ testAction(
+ receiveCreateFeatureFlagError,
+ 'There was an error',
+ mockedState,
+ [{ type: types.RECEIVE_CREATE_FEATURE_FLAG_ERROR, payload: 'There was an error' }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/new/mutations_spec.js b/spec/frontend/feature_flags/store/new/mutations_spec.js
new file mode 100644
index 00000000000..e8609a6d116
--- /dev/null
+++ b/spec/frontend/feature_flags/store/new/mutations_spec.js
@@ -0,0 +1,49 @@
+import state from '~/feature_flags/store/new/state';
+import mutations from '~/feature_flags/store/new/mutations';
+import * as types from '~/feature_flags/store/new/mutation_types';
+
+describe('Feature flags New Module Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state({ endpoint: 'feature_flags.json', path: 'feature_flags' });
+ });
+
+ describe('REQUEST_CREATE_FEATURE_FLAG', () => {
+ it('should set isSendingRequest to true', () => {
+ mutations[types.REQUEST_CREATE_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.isSendingRequest).toEqual(true);
+ });
+
+ it('should set error to an empty array', () => {
+ mutations[types.REQUEST_CREATE_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.error).toEqual([]);
+ });
+ });
+
+ describe('RECEIVE_CREATE_FEATURE_FLAG_SUCCESS', () => {
+ it('should set isSendingRequest to false', () => {
+ mutations[types.RECEIVE_CREATE_FEATURE_FLAG_SUCCESS](stateCopy);
+
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_CREATE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_CREATE_FEATURE_FLAG_ERROR](stateCopy, {
+ message: ['Name is required'],
+ });
+ });
+
+ it('should set isSendingRequest to false', () => {
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.error).toEqual(['Name is required']);
+ });
+ });
+});
diff --git a/spec/frontend/fixtures/blob.rb b/spec/frontend/fixtures/blob.rb
index 712c3bd9b23..a365ee805af 100644
--- a/spec/frontend/fixtures/blob.rb
+++ b/spec/frontend/fixtures/blob.rb
@@ -33,4 +33,14 @@ RSpec.describe Projects::BlobController, '(JavaScript fixtures)', type: :control
expect(response).to be_successful
end
+
+ it 'blob/show_readme.html' do
+ get(:show, params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: 'master/README.md'
+ })
+
+ expect(response).to be_successful
+ end
end
diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb
new file mode 100644
index 00000000000..dc282b49be5
--- /dev/null
+++ b/spec/frontend/fixtures/releases.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Releases (JavaScript fixtures)' do
+ include ApiHelpers
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:admin) { create(:admin, username: 'administrator', email: 'admin@example.gitlab.com') }
+ let_it_be(:namespace) { create(:namespace, path: 'releases-namespace') }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace, path: 'releases-project') }
+
+ let_it_be(:milestone_12_3) do
+ create(:milestone,
+ id: 123,
+ project: project,
+ title: '12.3',
+ description: 'The 12.3 milestone',
+ start_date: Time.zone.parse('2018-12-10'),
+ due_date: Time.zone.parse('2019-01-10'))
+ end
+
+ let_it_be(:milestone_12_4) do
+ create(:milestone,
+ id: 124,
+ project: project,
+ title: '12.4',
+ description: 'The 12.4 milestone',
+ start_date: Time.zone.parse('2019-01-10'),
+ due_date: Time.zone.parse('2019-02-10'))
+ end
+
+ let_it_be(:open_issues_12_3) do
+ create_list(:issue, 2, milestone: milestone_12_3, project: project)
+ end
+
+ let_it_be(:closed_issues_12_3) do
+ create_list(:issue, 3, :closed, milestone: milestone_12_3, project: project)
+ end
+
+ let_it_be(:open_issues_12_4) do
+ create_list(:issue, 3, milestone: milestone_12_4, project: project)
+ end
+
+ let_it_be(:closed_issues_12_4) do
+ create_list(:issue, 1, :closed, milestone: milestone_12_4, project: project)
+ end
+
+ let_it_be(:release) do
+ create(:release,
+ milestones: [milestone_12_3, milestone_12_4],
+ project: project,
+ tag: 'v1.1',
+ name: 'The first release',
+ author: admin,
+ description: 'Best. Release. **Ever.** :rocket:',
+ created_at: Time.zone.parse('2018-12-3'),
+ released_at: Time.zone.parse('2018-12-10'))
+ end
+
+ let_it_be(:evidence) do
+ create(:evidence,
+ release: release,
+ collected_at: Time.zone.parse('2018-12-03'))
+ end
+
+ let_it_be(:other_link) do
+ create(:release_link,
+ id: 10,
+ release: release,
+ name: 'linux-amd64 binaries',
+ filepath: '/binaries/linux-amd64',
+ url: 'https://downloads.example.com/bin/gitlab-linux-amd64')
+ end
+
+ let_it_be(:runbook_link) do
+ create(:release_link,
+ id: 11,
+ release: release,
+ name: 'Runbook',
+ url: "#{release.project.web_url}/runbook",
+ link_type: :runbook)
+ end
+
+ let_it_be(:package_link) do
+ create(:release_link,
+ id: 12,
+ release: release,
+ name: 'Package',
+ url: 'https://example.com/package',
+ link_type: :package)
+ end
+
+ let_it_be(:image_link) do
+ create(:release_link,
+ id: 13,
+ release: release,
+ name: 'Image',
+ url: 'https://example.com/image',
+ link_type: :image)
+ end
+
+ after(:all) do
+ remove_repository(project)
+ end
+
+ describe API::Releases, type: :request do
+ before(:all) do
+ clean_frontend_fixtures('api/releases/')
+ end
+
+ it 'api/releases/release.json' do
+ get api("/projects/#{project.id}/releases/#{release.tag}", admin)
+
+ expect(response).to be_successful
+ end
+ end
+
+ describe GraphQL::Query, type: :request do
+ include GraphqlHelpers
+
+ all_releases_query_path = 'releases/queries/all_releases.query.graphql'
+ one_release_query_path = 'releases/queries/one_release.query.graphql'
+ fragment_paths = ['releases/queries/release.fragment.graphql']
+
+ before(:all) do
+ clean_frontend_fixtures('graphql/releases/')
+ end
+
+ it "graphql/#{all_releases_query_path}.json" do
+ query = get_graphql_query_as_string(all_releases_query_path, fragment_paths)
+
+ post_graphql(query, current_user: admin, variables: { fullPath: project.full_path })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "graphql/#{one_release_query_path}.json" do
+ query = get_graphql_query_as_string(one_release_query_path, fragment_paths)
+
+ post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/snippet.rb b/spec/frontend/fixtures/snippet.rb
index 26b088bbd88..2e67a2ecfe3 100644
--- a/spec/frontend/fixtures/snippet.rb
+++ b/spec/frontend/fixtures/snippet.rb
@@ -17,7 +17,6 @@ RSpec.describe SnippetsController, '(JavaScript fixtures)', type: :controller do
end
before do
- stub_feature_flags(snippets_vue: false)
sign_in(admin)
allow(Discussion).to receive(:build_discussion_id).and_return(['discussionid:ceterumcenseo'])
end
diff --git a/spec/frontend/fixtures/static/issue_sidebar_label.html b/spec/frontend/fixtures/static/issue_sidebar_label.html
deleted file mode 100644
index ec8fb30f219..00000000000
--- a/spec/frontend/fixtures/static/issue_sidebar_label.html
+++ /dev/null
@@ -1,26 +0,0 @@
-<div class="block labels">
-<div class="sidebar-collapsed-icon js-sidebar-labels-tooltip"></div>
-<div class="title hide-collapsed">
-<a class="edit-link float-right" href="#">
-Edit
-</a>
-</div>
-<div class="selectbox hide-collapsed" style="display: none;">
-<div class="dropdown">
-<button class="dropdown-menu-toggle js-label-select js-multiselect" data-ability-name="issue" data-field-name="issue[label_names][]" data-issue-update="/root/test/issues/2.json" data-labels="/root/test/labels.json" data-project-id="12" data-show-any="true" data-show-no="true" data-toggle="dropdown" type="button">
-<span class="dropdown-toggle-text">
-Label
-</span>
-<i class="fa fa-chevron-down"></i>
-</button>
-<div class="dropdown-menu dropdown-select dropdown-menu-paging dropdown-menu-labels dropdown-menu-selectable">
-<div class="dropdown-page-one">
-<div class="dropdown-content"></div>
-<div class="dropdown-loading">
-<i class="fa fa-spinner fa-spin"></i>
-</div>
-</div>
-</div>
-</div>
-</div>
-</div>
diff --git a/spec/frontend/fixtures/static/pipeline_graph.html b/spec/frontend/fixtures/static/pipeline_graph.html
index 422372bb7d5..d2c30ff9211 100644
--- a/spec/frontend/fixtures/static/pipeline_graph.html
+++ b/spec/frontend/fixtures/static/pipeline_graph.html
@@ -10,7 +10,7 @@ Test
<div class="curve"></div>
<a>
<svg></svg>
-<div class="ci-status-text">
+<div>
stop_review
</div>
</a>
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 6c40b1ba3a7..8da4320d993 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -1,6 +1,7 @@
/* eslint no-param-reassign: "off" */
import $ from 'jquery';
+import { emojiFixtureMap, initEmojiMock, describeEmojiFields } from 'helpers/emoji';
import '~/lib/utils/jquery_at_who';
import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete';
@@ -119,7 +120,7 @@ describe('GfmAutoComplete', () => {
const defaultMatcher = (context, flag, subtext) =>
gfmAutoCompleteCallbacks.matcher.call(context, flag, subtext);
- const flagsUseDefaultMatcher = ['@', '#', '!', '~', '%', '$', '+'];
+ const flagsUseDefaultMatcher = ['@', '#', '!', '~', '%', '$'];
const otherFlags = ['/', ':'];
const flags = flagsUseDefaultMatcher.concat(otherFlags);
@@ -153,6 +154,7 @@ describe('GfmAutoComplete', () => {
'я',
'.',
"'",
+ '+',
'-',
'_',
];
@@ -416,8 +418,9 @@ describe('GfmAutoComplete', () => {
let $textarea;
beforeEach(() => {
+ setFixtures('<textarea></textarea>');
autocomplete = new GfmAutoComplete(dataSources);
- $textarea = $('<textarea></textarea>');
+ $textarea = $('textarea');
autocomplete.setup($textarea, { labels: true });
});
@@ -488,4 +491,114 @@ describe('GfmAutoComplete', () => {
`('$input shows $output.length labels', expectLabels);
});
});
+
+ describe('emoji', () => {
+ const { atom, heart, star } = emojiFixtureMap;
+ const assertInserted = ({ input, subject, emoji }) =>
+ expect(subject).toBe(`:${emoji?.name || input}:`);
+ const assertTemplated = ({ input, subject, emoji, field }) =>
+ expect(subject.replace(/\s+/g, ' ')).toBe(
+ `<li>${field || input} <gl-emoji data-name="${emoji?.name || input}"></gl-emoji> </li>`,
+ );
+
+ let mock;
+
+ beforeEach(async () => {
+ mock = await initEmojiMock();
+
+ await new GfmAutoComplete({}).loadEmojiData({ atwho() {}, trigger() {} }, ':');
+ if (!GfmAutoComplete.glEmojiTag) throw new Error('emoji not loaded');
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe.each`
+ name | inputFormat | assert
+ ${'insertTemplateFunction'} | ${name => ({ name })} | ${assertInserted}
+ ${'templateFunction'} | ${name => name} | ${assertTemplated}
+ `('Emoji.$name', ({ name, inputFormat, assert }) => {
+ const execute = (accessor, input, emoji) =>
+ assert({
+ input,
+ emoji,
+ field: accessor && accessor(emoji),
+ subject: GfmAutoComplete.Emoji[name](inputFormat(input)),
+ });
+
+ describeEmojiFields('for $field', ({ accessor }) => {
+ it('should work with lowercase', () => {
+ execute(accessor, accessor(atom), atom);
+ });
+
+ it('should work with uppercase', () => {
+ execute(accessor, accessor(atom).toUpperCase(), atom);
+ });
+
+ it('should work with partial value', () => {
+ execute(accessor, accessor(atom).slice(1), atom);
+ });
+ });
+
+ it('should work with unicode value', () => {
+ execute(null, atom.moji, atom);
+ });
+
+ it('should pass through unknown value', () => {
+ execute(null, 'foo bar baz');
+ });
+ });
+
+ const expectEmojiOrder = (first, second) => {
+ const keys = Object.keys(emojiFixtureMap);
+ const firstIndex = keys.indexOf(first);
+ const secondIndex = keys.indexOf(second);
+ expect(firstIndex).toBeGreaterThanOrEqual(0);
+ expect(secondIndex).toBeGreaterThanOrEqual(0);
+ expect(firstIndex).toBeLessThan(secondIndex);
+ };
+
+ describe('Emoji.insertTemplateFunction', () => {
+ it('should map ":heart" to :heart: [regression]', () => {
+ // the bug mapped heart to black_heart because the latter sorted first
+ expectEmojiOrder('black_heart', 'heart');
+
+ const item = GfmAutoComplete.Emoji.insertTemplateFunction({ name: 'heart' });
+ expect(item).toEqual(`:${heart.name}:`);
+ });
+
+ it('should map ":star" to :star: [regression]', () => {
+ // the bug mapped star to custard because the latter sorted first
+ expectEmojiOrder('custard', 'star');
+
+ const item = GfmAutoComplete.Emoji.insertTemplateFunction({ name: 'star' });
+ expect(item).toEqual(`:${star.name}:`);
+ });
+ });
+
+ describe('Emoji.templateFunction', () => {
+ it('should map ":heart" to ❤ [regression]', () => {
+ // the bug mapped heart to black_heart because the latter sorted first
+ expectEmojiOrder('black_heart', 'heart');
+
+ const item = GfmAutoComplete.Emoji.templateFunction('heart')
+ .replace(/(<gl-emoji)\s+(data-name)/, '$1 $2')
+ .replace(/>\s+|\s+</g, s => s.trim());
+ expect(item).toEqual(
+ `<li>${heart.name}<gl-emoji data-name="${heart.name}"></gl-emoji></li>`,
+ );
+ });
+
+ it('should map ":star" to ⭐ [regression]', () => {
+ // the bug mapped star to custard because the latter sorted first
+ expectEmojiOrder('custard', 'star');
+
+ const item = GfmAutoComplete.Emoji.templateFunction('star')
+ .replace(/(<gl-emoji)\s+(data-name)/, '$1 $2')
+ .replace(/>\s+|\s+</g, s => s.trim());
+ expect(item).toEqual(`<li>${star.name}<gl-emoji data-name="${star.name}"></gl-emoji></li>`);
+ });
+ });
+ });
});
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 0befe1aa192..e880f585daa 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -17,6 +17,7 @@ exports[`grafana integration component default state to match the default snapsh
</h3>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-settings-toggle"
icon=""
@@ -92,20 +93,17 @@ exports[`grafana integration component default state to match the default snapsh
</p>
</gl-form-group-stub>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ icon=""
+ size="medium"
+ variant="success"
>
- <gl-button-stub
- category="primary"
- icon=""
- size="medium"
- variant="success"
- >
-
- Save Changes
- </gl-button-stub>
- </div>
+ Save Changes
+
+ </gl-button-stub>
</form>
</div>
</section>
diff --git a/spec/frontend/group_settings/components/shared_runners_form_spec.js b/spec/frontend/group_settings/components/shared_runners_form_spec.js
new file mode 100644
index 00000000000..9e3ee8a2cb1
--- /dev/null
+++ b/spec/frontend/group_settings/components/shared_runners_form_spec.js
@@ -0,0 +1,169 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
+import MockAxiosAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import SharedRunnersForm from '~/group_settings/components/shared_runners_form.vue';
+import { ENABLED, DISABLED, ALLOW_OVERRIDE } from '~/group_settings/constants';
+import axios from '~/lib/utils/axios_utils';
+
+const TEST_UPDATE_PATH = '/test/update';
+const DISABLED_PAYLOAD = { shared_runners_setting: DISABLED };
+const ENABLED_PAYLOAD = { shared_runners_setting: ENABLED };
+const OVERRIDE_PAYLOAD = { shared_runners_setting: ALLOW_OVERRIDE };
+
+jest.mock('~/flash');
+
+describe('group_settings/components/shared_runners_form', () => {
+ let wrapper;
+ let mock;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(SharedRunnersForm, {
+ propsData: {
+ updatePath: TEST_UPDATE_PATH,
+ sharedRunnersAvailability: ENABLED,
+ parentSharedRunnersAvailability: null,
+ ...props,
+ },
+ });
+ };
+
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findErrorAlert = () => wrapper.find(GlAlert);
+ const findEnabledToggle = () => wrapper.find('[data-testid="enable-runners-toggle"]');
+ const findOverrideToggle = () => wrapper.find('[data-testid="override-runners-toggle"]');
+ const changeToggle = toggle => toggle.vm.$emit('change', !toggle.props('value'));
+ const getRequestPayload = () => JSON.parse(mock.history.put[0].data);
+ const isLoadingIconVisible = () => findLoadingIcon().exists();
+
+ beforeEach(() => {
+ mock = new MockAxiosAdapter(axios);
+
+ mock.onPut(TEST_UPDATE_PATH).reply(200);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+
+ mock.restore();
+ });
+
+ describe('with default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('loading icon does not exist', () => {
+ expect(isLoadingIconVisible()).toBe(false);
+ });
+
+ it('enabled toggle exists', () => {
+ expect(findEnabledToggle().exists()).toBe(true);
+ });
+
+ it('override toggle does not exist', () => {
+ expect(findOverrideToggle().exists()).toBe(false);
+ });
+ });
+
+ describe('loading icon', () => {
+ it('shows and hides the loading icon on request', async () => {
+ createComponent();
+
+ expect(isLoadingIconVisible()).toBe(false);
+
+ findEnabledToggle().vm.$emit('change', true);
+
+ await wrapper.vm.$nextTick();
+
+ expect(isLoadingIconVisible()).toBe(true);
+
+ await waitForPromises();
+
+ expect(isLoadingIconVisible()).toBe(false);
+ });
+ });
+
+ describe('enable toggle', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('enabling the toggle sends correct payload', async () => {
+ findEnabledToggle().vm.$emit('change', true);
+
+ await waitForPromises();
+
+ expect(getRequestPayload()).toEqual(ENABLED_PAYLOAD);
+ expect(findOverrideToggle().exists()).toBe(false);
+ });
+
+ it('disabling the toggle sends correct payload', async () => {
+ findEnabledToggle().vm.$emit('change', false);
+
+ await waitForPromises();
+
+ expect(getRequestPayload()).toEqual(DISABLED_PAYLOAD);
+ expect(findOverrideToggle().exists()).toBe(true);
+ });
+ });
+
+ describe('override toggle', () => {
+ beforeEach(() => {
+ createComponent({ sharedRunnersAvailability: ALLOW_OVERRIDE });
+ });
+
+ it('enabling the override toggle sends correct payload', async () => {
+ findOverrideToggle().vm.$emit('change', true);
+
+ await waitForPromises();
+
+ expect(getRequestPayload()).toEqual(OVERRIDE_PAYLOAD);
+ });
+
+ it('disabling the override toggle sends correct payload', async () => {
+ findOverrideToggle().vm.$emit('change', false);
+
+ await waitForPromises();
+
+ expect(getRequestPayload()).toEqual(DISABLED_PAYLOAD);
+ });
+ });
+
+ describe('toggle disabled state', () => {
+ it(`toggles are not disabled with setting ${DISABLED}`, () => {
+ createComponent({ sharedRunnersAvailability: DISABLED });
+ expect(findEnabledToggle().props('disabled')).toBe(false);
+ expect(findOverrideToggle().props('disabled')).toBe(false);
+ });
+
+ it('toggles are disabled', () => {
+ createComponent({
+ sharedRunnersAvailability: DISABLED,
+ parentSharedRunnersAvailability: DISABLED,
+ });
+ expect(findEnabledToggle().props('disabled')).toBe(true);
+ expect(findOverrideToggle().props('disabled')).toBe(true);
+ });
+ });
+
+ describe.each`
+ errorObj | message
+ ${{}} | ${'An error occurred while updating configuration. Refresh the page and try again.'}
+ ${{ error: 'Undefined error' }} | ${'Undefined error Refresh the page and try again.'}
+ `(`with error $errorObj`, ({ errorObj, message }) => {
+ beforeEach(async () => {
+ mock.onPut(TEST_UPDATE_PATH).reply(500, errorObj);
+
+ createComponent();
+ changeToggle(findEnabledToggle());
+
+ await waitForPromises();
+ });
+
+ it('error should be shown', () => {
+ expect(findErrorAlert().text()).toBe(message);
+ });
+ });
+});
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 7eb1c54ddb2..83acbb152b5 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -203,7 +203,7 @@ describe('GroupItemComponent', () => {
expect(vm.$el.querySelector('.title a.no-expand')).toBeDefined();
expect(visibilityIconEl).not.toBe(null);
- expect(visibilityIconEl.dataset.originalTitle).toBe(vm.visibilityTooltip);
+ expect(visibilityIconEl.title).toBe(vm.visibilityTooltip);
expect(visibilityIconEl.querySelectorAll('svg').length).toBeGreaterThan(0);
expect(vm.$el.querySelector('.access-type')).toBeDefined();
diff --git a/spec/frontend/groups/components/item_actions_spec.js b/spec/frontend/groups/components/item_actions_spec.js
index f5df8c180d5..d4aa29eaadd 100644
--- a/spec/frontend/groups/components/item_actions_spec.js
+++ b/spec/frontend/groups/components/item_actions_spec.js
@@ -1,84 +1,87 @@
-import Vue from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemActionsComponent from '~/groups/components/item_actions.vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemActions from '~/groups/components/item_actions.vue';
import eventHub from '~/groups/event_hub';
import { mockParentGroupItem, mockChildren } from '../mock_data';
-const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => {
- const Component = Vue.extend(itemActionsComponent);
+describe('ItemActions', () => {
+ let wrapper;
+ const parentGroup = mockChildren[0];
- return mountComponent(Component, {
- group,
+ const defaultProps = {
+ group: mockParentGroupItem,
parentGroup,
- });
-};
-
-describe('ItemActionsComponent', () => {
- let vm;
+ };
- beforeEach(() => {
- vm = createComponent();
- });
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemActions, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
- describe('methods', () => {
- describe('onLeaveGroup', () => {
- it('emits `showLeaveGroupModal` event with `group` and `parentGroup` props', () => {
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- vm.onLeaveGroup();
-
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'showLeaveGroupModal',
- vm.group,
- vm.parentGroup,
- );
- });
- });
- });
+ const findEditGroupBtn = () => wrapper.find('[data-testid="edit-group-btn"]');
+ const findEditGroupIcon = () => findEditGroupBtn().find(GlIcon);
+ const findLeaveGroupBtn = () => wrapper.find('[data-testid="leave-group-btn"]');
+ const findLeaveGroupIcon = () => findLeaveGroupBtn().find(GlIcon);
describe('template', () => {
- it('should render component template correctly', () => {
- expect(vm.$el.classList.contains('controls')).toBeTruthy();
- });
+ it('renders component template correctly', () => {
+ createComponent();
- it('should render Edit Group button with correct attribute values', () => {
- const group = { ...mockParentGroupItem };
- group.canEdit = true;
- const newVm = createComponent(group);
+ expect(wrapper.classes()).toContain('controls');
+ });
- const editBtn = newVm.$el.querySelector('a.edit-group');
+ it('renders "Edit group" button with correct attribute values', () => {
+ const group = {
+ ...mockParentGroupItem,
+ canEdit: true,
+ };
+
+ createComponent({ group });
+
+ expect(findEditGroupBtn().exists()).toBe(true);
+ expect(findEditGroupBtn().classes()).toContain('no-expand');
+ expect(findEditGroupBtn().attributes('href')).toBe(group.editPath);
+ expect(findEditGroupBtn().attributes('aria-label')).toBe('Edit group');
+ expect(findEditGroupBtn().attributes('data-original-title')).toBe('Edit group');
+ expect(findEditGroupIcon().exists()).toBe(true);
+ expect(findEditGroupIcon().props('name')).toBe('settings');
+ });
- expect(editBtn).toBeDefined();
- expect(editBtn.classList.contains('no-expand')).toBeTruthy();
- expect(editBtn.getAttribute('href')).toBe(group.editPath);
- expect(editBtn.getAttribute('aria-label')).toBe('Edit group');
- expect(editBtn.dataset.originalTitle).toBe('Edit group');
- expect(editBtn.querySelectorAll('svg').length).not.toBe(0);
- expect(editBtn.querySelector('svg').getAttribute('data-testid')).toBe('settings-icon');
+ describe('`canLeave` is true', () => {
+ const group = {
+ ...mockParentGroupItem,
+ canLeave: true,
+ };
- newVm.$destroy();
- });
+ beforeEach(() => {
+ createComponent({ group });
+ });
- it('should render Leave Group button with correct attribute values', () => {
- const group = { ...mockParentGroupItem };
- group.canLeave = true;
- const newVm = createComponent(group);
+ it('renders "Leave this group" button with correct attribute values', () => {
+ expect(findLeaveGroupBtn().exists()).toBe(true);
+ expect(findLeaveGroupBtn().classes()).toContain('no-expand');
+ expect(findLeaveGroupBtn().attributes('href')).toBe(group.leavePath);
+ expect(findLeaveGroupBtn().attributes('aria-label')).toBe('Leave this group');
+ expect(findLeaveGroupBtn().attributes('data-original-title')).toBe('Leave this group');
+ expect(findLeaveGroupIcon().exists()).toBe(true);
+ expect(findLeaveGroupIcon().props('name')).toBe('leave');
+ });
- const leaveBtn = newVm.$el.querySelector('a.leave-group');
+ it('emits event on "Leave this group" button click', () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- expect(leaveBtn).toBeDefined();
- expect(leaveBtn.classList.contains('no-expand')).toBeTruthy();
- expect(leaveBtn.getAttribute('href')).toBe(group.leavePath);
- expect(leaveBtn.getAttribute('aria-label')).toBe('Leave this group');
- expect(leaveBtn.dataset.originalTitle).toBe('Leave this group');
- expect(leaveBtn.querySelectorAll('svg').length).not.toBe(0);
- expect(leaveBtn.querySelector('svg').getAttribute('data-testid')).toBe('leave-icon');
+ findLeaveGroupBtn().trigger('click');
- newVm.$destroy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('showLeaveGroupModal', group, parentGroup);
+ });
});
});
});
diff --git a/spec/frontend/groups/components/item_caret_spec.js b/spec/frontend/groups/components/item_caret_spec.js
index 4ff7482414c..b2915607a06 100644
--- a/spec/frontend/groups/components/item_caret_spec.js
+++ b/spec/frontend/groups/components/item_caret_spec.js
@@ -1,38 +1,48 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemCaret from '~/groups/components/item_caret.vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemCaretComponent from '~/groups/components/item_caret.vue';
+describe('ItemCaret', () => {
+ let wrapper;
-const createComponent = (isGroupOpen = false) => {
- const Component = Vue.extend(itemCaretComponent);
+ const defaultProps = {
+ isGroupOpen: false,
+ };
- return mountComponent(Component, {
- isGroupOpen,
- });
-};
-
-describe('ItemCaretComponent', () => {
- let vm;
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemCaret, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
+ const findAllGlIcons = () => wrapper.findAll(GlIcon);
+ const findGlIcon = () => wrapper.find(GlIcon);
+
describe('template', () => {
- it('should render component template correctly', () => {
- vm = createComponent();
- expect(vm.$el.classList.contains('folder-caret')).toBeTruthy();
- expect(vm.$el.querySelectorAll('svg').length).toBe(1);
- });
+ it('renders component template correctly', () => {
+ createComponent();
- it('should render caret down icon if `isGroupOpen` prop is `true`', () => {
- vm = createComponent(true);
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('angle-down-icon');
+ expect(wrapper.classes()).toContain('folder-caret');
+ expect(findAllGlIcons()).toHaveLength(1);
});
- it('should render caret right icon if `isGroupOpen` prop is `false`', () => {
- vm = createComponent();
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('angle-right-icon');
+ it.each`
+ isGroupOpen | icon
+ ${true} | ${'angle-down'}
+ ${false} | ${'angle-right'}
+ `('renders "$icon" icon when `isGroupOpen` is $isGroupOpen', ({ isGroupOpen, icon }) => {
+ createComponent({
+ isGroupOpen,
+ });
+
+ expect(findGlIcon().props('name')).toBe(icon);
});
});
});
diff --git a/spec/frontend/groups/components/item_stats_spec.js b/spec/frontend/groups/components/item_stats_spec.js
index 771643609ec..d8c88a608ac 100644
--- a/spec/frontend/groups/components/item_stats_spec.js
+++ b/spec/frontend/groups/components/item_stats_spec.js
@@ -1,119 +1,50 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import ItemStats from '~/groups/components/item_stats.vue';
+import ItemStatsValue from '~/groups/components/item_stats_value.vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemStatsComponent from '~/groups/components/item_stats.vue';
-import {
- mockParentGroupItem,
- ITEM_TYPE,
- VISIBILITY_TYPE_ICON,
- GROUP_VISIBILITY_TYPE,
- PROJECT_VISIBILITY_TYPE,
-} from '../mock_data';
+import { mockParentGroupItem, ITEM_TYPE } from '../mock_data';
-const createComponent = (item = mockParentGroupItem) => {
- const Component = Vue.extend(itemStatsComponent);
+describe('ItemStats', () => {
+ let wrapper;
- return mountComponent(Component, {
- item,
- });
-};
-
-describe('ItemStatsComponent', () => {
- describe('computed', () => {
- describe('visibilityIcon', () => {
- it('should return icon class based on `item.visibility` value', () => {
- Object.keys(VISIBILITY_TYPE_ICON).forEach(visibility => {
- const item = { ...mockParentGroupItem, visibility };
- const vm = createComponent(item);
+ const defaultProps = {
+ item: mockParentGroupItem,
+ };
- expect(vm.visibilityIcon).toBe(VISIBILITY_TYPE_ICON[visibility]);
- vm.$destroy();
- });
- });
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemStats, {
+ propsData: { ...defaultProps, ...props },
});
+ };
- describe('visibilityTooltip', () => {
- it('should return tooltip string for Group based on `item.visibility` value', () => {
- Object.keys(GROUP_VISIBILITY_TYPE).forEach(visibility => {
- const item = { ...mockParentGroupItem, visibility, type: ITEM_TYPE.GROUP };
- const vm = createComponent(item);
-
- expect(vm.visibilityTooltip).toBe(GROUP_VISIBILITY_TYPE[visibility]);
- vm.$destroy();
- });
- });
-
- it('should return tooltip string for Project based on `item.visibility` value', () => {
- Object.keys(PROJECT_VISIBILITY_TYPE).forEach(visibility => {
- const item = { ...mockParentGroupItem, visibility, type: ITEM_TYPE.PROJECT };
- const vm = createComponent(item);
-
- expect(vm.visibilityTooltip).toBe(PROJECT_VISIBILITY_TYPE[visibility]);
- vm.$destroy();
- });
- });
- });
-
- describe('isProject', () => {
- it('should return boolean value representing whether `item.type` is Project or not', () => {
- let item;
- let vm;
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.PROJECT };
- vm = createComponent(item);
-
- expect(vm.isProject).toBeTruthy();
- vm.$destroy();
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.GROUP };
- vm = createComponent(item);
-
- expect(vm.isProject).toBeFalsy();
- vm.$destroy();
- });
- });
-
- describe('isGroup', () => {
- it('should return boolean value representing whether `item.type` is Group or not', () => {
- let item;
- let vm;
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.GROUP };
- vm = createComponent(item);
-
- expect(vm.isGroup).toBeTruthy();
- vm.$destroy();
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.PROJECT };
- vm = createComponent(item);
-
- expect(vm.isGroup).toBeFalsy();
- vm.$destroy();
- });
- });
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
+ const findItemStatsValue = () => wrapper.find(ItemStatsValue);
+
describe('template', () => {
it('renders component container element correctly', () => {
- const vm = createComponent();
+ createComponent();
- expect(vm.$el.classList.contains('stats')).toBeTruthy();
-
- vm.$destroy();
+ expect(wrapper.classes()).toContain('stats');
});
it('renders start count and last updated information for project item correctly', () => {
- const item = { ...mockParentGroupItem, type: ITEM_TYPE.PROJECT, starCount: 4 };
- const vm = createComponent(item);
-
- const projectStarIconEl = vm.$el.querySelector('.project-stars');
+ const item = {
+ ...mockParentGroupItem,
+ type: ITEM_TYPE.PROJECT,
+ starCount: 4,
+ };
- expect(projectStarIconEl).not.toBeNull();
- expect(projectStarIconEl.querySelectorAll('svg').length).toBeGreaterThan(0);
- expect(projectStarIconEl.querySelectorAll('.stat-value').length).toBeGreaterThan(0);
- expect(vm.$el.querySelectorAll('.last-updated').length).toBeGreaterThan(0);
+ createComponent({ item });
- vm.$destroy();
+ expect(findItemStatsValue().exists()).toBe(true);
+ expect(findItemStatsValue().props('cssClass')).toBe('project-stars');
+ expect(wrapper.contains('.last-updated')).toBe(true);
});
});
});
diff --git a/spec/frontend/groups/components/item_stats_value_spec.js b/spec/frontend/groups/components/item_stats_value_spec.js
index 11246390444..bca233883af 100644
--- a/spec/frontend/groups/components/item_stats_value_spec.js
+++ b/spec/frontend/groups/components/item_stats_value_spec.js
@@ -1,82 +1,67 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemStatsValue from '~/groups/components/item_stats_value.vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemStatsValueComponent from '~/groups/components/item_stats_value.vue';
+describe('ItemStatsValue', () => {
+ let wrapper;
-const createComponent = ({ title, cssClass, iconName, tooltipPlacement, value }) => {
- const Component = Vue.extend(itemStatsValueComponent);
+ const defaultProps = {
+ title: 'Subgroups',
+ cssClass: 'number-subgroups',
+ iconName: 'folder',
+ tooltipPlacement: 'left',
+ };
- return mountComponent(Component, {
- title,
- cssClass,
- iconName,
- tooltipPlacement,
- value,
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemStatsValue, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
-};
-describe('ItemStatsValueComponent', () => {
- describe('computed', () => {
- let vm;
- const itemConfig = {
- title: 'Subgroups',
- cssClass: 'number-subgroups',
- iconName: 'folder',
- tooltipPlacement: 'left',
- };
+ const findGlIcon = () => wrapper.find(GlIcon);
+ const findStatValue = () => wrapper.find('[data-testid="itemStatValue"]');
- describe('isValuePresent', () => {
- it('returns true if non-empty `value` is present', () => {
- vm = createComponent({ ...itemConfig, value: 10 });
+ describe('template', () => {
+ describe('when `value` is not provided', () => {
+ it('does not render value count', () => {
+ createComponent();
- expect(vm.isValuePresent).toBeTruthy();
+ expect(findStatValue().exists()).toBe(false);
});
+ });
- it('returns false if empty `value` is present', () => {
- vm = createComponent(itemConfig);
-
- expect(vm.isValuePresent).toBeFalsy();
+ describe('when `value` is provided', () => {
+ beforeEach(() => {
+ createComponent({
+ value: 10,
+ });
});
- afterEach(() => {
- vm.$destroy();
+ it('renders component element correctly', () => {
+ expect(wrapper.classes()).toContain('number-subgroups');
});
- });
- });
- describe('template', () => {
- let vm;
- beforeEach(() => {
- vm = createComponent({
- title: 'Subgroups',
- cssClass: 'number-subgroups',
- iconName: 'folder',
- tooltipPlacement: 'left',
- value: 10,
+ it('renders element tooltip correctly', () => {
+ expect(wrapper.attributes('title')).toBe('Subgroups');
+ expect(wrapper.attributes('data-placement')).toBe('left');
});
- });
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders component element correctly', () => {
- expect(vm.$el.classList.contains('number-subgroups')).toBeTruthy();
- expect(vm.$el.querySelectorAll('svg').length).toBeGreaterThan(0);
- expect(vm.$el.querySelectorAll('.stat-value').length).toBeGreaterThan(0);
- });
-
- it('renders element tooltip correctly', () => {
- expect(vm.$el.dataset.originalTitle).toBe('Subgroups');
- expect(vm.$el.dataset.placement).toBe('left');
- });
-
- it('renders element icon correctly', () => {
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('folder-icon');
- });
+ it('renders element icon correctly', () => {
+ expect(findGlIcon().exists()).toBe(true);
+ expect(findGlIcon().props('name')).toBe('folder');
+ });
- it('renders value count correctly', () => {
- expect(vm.$el.querySelector('.stat-value').innerText.trim()).toContain('10');
+ it('renders value count correctly', () => {
+ expect(findStatValue().classes()).toContain('stat-value');
+ expect(findStatValue().text()).toBe('10');
+ });
});
});
});
diff --git a/spec/frontend/groups/components/item_type_icon_spec.js b/spec/frontend/groups/components/item_type_icon_spec.js
index 477c413ddcd..5e7056be218 100644
--- a/spec/frontend/groups/components/item_type_icon_spec.js
+++ b/spec/frontend/groups/components/item_type_icon_spec.js
@@ -1,53 +1,53 @@
-import Vue from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemTypeIconComponent from '~/groups/components/item_type_icon.vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemTypeIcon from '~/groups/components/item_type_icon.vue';
import { ITEM_TYPE } from '../mock_data';
-const createComponent = (itemType = ITEM_TYPE.GROUP, isGroupOpen = false) => {
- const Component = Vue.extend(itemTypeIconComponent);
-
- return mountComponent(Component, {
- itemType,
- isGroupOpen,
- });
-};
+describe('ItemTypeIcon', () => {
+ let wrapper;
-describe('ItemTypeIconComponent', () => {
- describe('template', () => {
- it('should render component template correctly', () => {
- const vm = createComponent();
+ const defaultProps = {
+ itemType: ITEM_TYPE.GROUP,
+ isGroupOpen: false,
+ };
- expect(vm.$el.classList.contains('item-type-icon')).toBeTruthy();
- vm.$destroy();
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemTypeIcon, {
+ propsData: { ...defaultProps, ...props },
});
+ };
- it('should render folder open or close icon based `isGroupOpen` prop value', () => {
- let vm;
-
- vm = createComponent(ITEM_TYPE.GROUP, true);
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('folder-open-icon');
- vm.$destroy();
+ const findGlIcon = () => wrapper.find(GlIcon);
- vm = createComponent(ITEM_TYPE.GROUP);
+ describe('template', () => {
+ it('renders component template correctly', () => {
+ createComponent();
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('folder-o-icon');
- vm.$destroy();
+ expect(wrapper.classes()).toContain('item-type-icon');
});
- it('should render bookmark icon based on `isProject` prop value', () => {
- let vm;
-
- vm = createComponent(ITEM_TYPE.PROJECT);
-
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('bookmark-icon');
- vm.$destroy();
-
- vm = createComponent(ITEM_TYPE.GROUP);
-
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).not.toBe('bookmark-icon');
- vm.$destroy();
- });
+ it.each`
+ type | isGroupOpen | icon
+ ${ITEM_TYPE.GROUP} | ${true} | ${'folder-open'}
+ ${ITEM_TYPE.GROUP} | ${false} | ${'folder-o'}
+ ${ITEM_TYPE.PROJECT} | ${true} | ${'bookmark'}
+ ${ITEM_TYPE.PROJECT} | ${false} | ${'bookmark'}
+ `(
+ 'shows "$icon" icon when `itemType` is "$type" and `isGroupOpen` is $isGroupOpen',
+ ({ type, isGroupOpen, icon }) => {
+ createComponent({
+ itemType: type,
+ isGroupOpen,
+ });
+ expect(findGlIcon().props('name')).toBe(icon);
+ },
+ );
});
});
diff --git a/spec/frontend/groups/members/components/app_spec.js b/spec/frontend/groups/members/components/app_spec.js
new file mode 100644
index 00000000000..de9f30649e9
--- /dev/null
+++ b/spec/frontend/groups/members/components/app_spec.js
@@ -0,0 +1,89 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import Vuex from 'vuex';
+import { GlAlert } from '@gitlab/ui';
+import App from '~/groups/members/components/app.vue';
+import * as commonUtils from '~/lib/utils/common_utils';
+import {
+ RECEIVE_MEMBER_ROLE_ERROR,
+ HIDE_ERROR,
+} from '~/vuex_shared/modules/members/mutation_types';
+import mutations from '~/vuex_shared/modules/members/mutations';
+
+describe('GroupMembersApp', () => {
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+
+ let wrapper;
+ let store;
+
+ const createComponent = (state = {}) => {
+ store = new Vuex.Store({
+ state: {
+ showError: true,
+ errorMessage: 'Something went wrong, please try again.',
+ ...state,
+ },
+ mutations,
+ });
+
+ wrapper = shallowMount(App, {
+ localVue,
+ store,
+ });
+ };
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ beforeEach(() => {
+ commonUtils.scrollToElement = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ store = null;
+ });
+
+ describe('when `showError` is changed to `true`', () => {
+ it('renders and scrolls to error alert', async () => {
+ createComponent({ showError: false, errorMessage: '' });
+
+ store.commit(RECEIVE_MEMBER_ROLE_ERROR);
+
+ await nextTick();
+
+ const alert = findAlert();
+
+ expect(alert.exists()).toBe(true);
+ expect(alert.text()).toBe(
+ "An error occurred while updating the member's role, please try again.",
+ );
+ expect(commonUtils.scrollToElement).toHaveBeenCalledWith(alert.element);
+ });
+ });
+
+ describe('when `showError` is changed to `false`', () => {
+ it('does not render and scroll to error alert', async () => {
+ createComponent();
+
+ store.commit(HIDE_ERROR);
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ expect(commonUtils.scrollToElement).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when alert is dismissed', () => {
+ it('hides alert', async () => {
+ createComponent();
+
+ findAlert().vm.$emit('dismiss');
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/groups/members/index_spec.js b/spec/frontend/groups/members/index_spec.js
index 70fce0d60fb..2fb7904bcfe 100644
--- a/spec/frontend/groups/members/index_spec.js
+++ b/spec/frontend/groups/members/index_spec.js
@@ -1,5 +1,5 @@
import { createWrapper } from '@vue/test-utils';
-import initGroupMembersApp from '~/groups/members';
+import { initGroupMembersApp } from '~/groups/members';
import GroupMembersApp from '~/groups/members/components/app.vue';
import { membersJsonString, membersParsed } from './mock_data';
@@ -9,7 +9,7 @@ describe('initGroupMembersApp', () => {
let wrapper;
const setup = () => {
- vm = initGroupMembersApp(el);
+ vm = initGroupMembersApp(el, ['account'], () => ({}));
wrapper = createWrapper(vm);
};
@@ -17,14 +17,12 @@ describe('initGroupMembersApp', () => {
el = document.createElement('div');
el.setAttribute('data-members', membersJsonString);
el.setAttribute('data-group-id', '234');
+ el.setAttribute('data-member-path', '/groups/foo-bar/-/group_members/:id');
window.gon = { current_user_id: 123 };
-
- document.body.appendChild(el);
});
afterEach(() => {
- document.body.innerHTML = '';
el = null;
wrapper.destroy();
@@ -63,4 +61,22 @@ describe('initGroupMembersApp', () => {
expect(vm.$store.state.members).toEqual(membersParsed);
});
+
+ it('sets `tableFields` in Vuex store', () => {
+ setup();
+
+ expect(vm.$store.state.tableFields).toEqual(['account']);
+ });
+
+ it('sets `requestFormatter` in Vuex store', () => {
+ setup();
+
+ expect(vm.$store.state.requestFormatter()).toEqual({});
+ });
+
+ it('sets `memberPath` in Vuex store', () => {
+ setup();
+
+ expect(vm.$store.state.memberPath).toBe('/groups/foo-bar/-/group_members/:id');
+ });
});
diff --git a/spec/frontend/groups/members/utils_spec.js b/spec/frontend/groups/members/utils_spec.js
new file mode 100644
index 00000000000..b0921c7642f
--- /dev/null
+++ b/spec/frontend/groups/members/utils_spec.js
@@ -0,0 +1,51 @@
+import { membersJsonString, membersParsed } from './mock_data';
+import {
+ parseDataAttributes,
+ memberRequestFormatter,
+ groupLinkRequestFormatter,
+} from '~/groups/members/utils';
+
+describe('group member utils', () => {
+ describe('parseDataAttributes', () => {
+ let el;
+
+ beforeEach(() => {
+ el = document.createElement('div');
+ el.setAttribute('data-members', membersJsonString);
+ el.setAttribute('data-group-id', '234');
+ });
+
+ afterEach(() => {
+ el = null;
+ });
+
+ it('correctly parses the data attributes', () => {
+ expect(parseDataAttributes(el)).toEqual({
+ members: membersParsed,
+ sourceId: 234,
+ });
+ });
+ });
+
+ describe('memberRequestFormatter', () => {
+ it('returns expected format', () => {
+ expect(
+ memberRequestFormatter({
+ accessLevel: 50,
+ expires_at: '2020-10-16',
+ }),
+ ).toEqual({ group_member: { access_level: 50, expires_at: '2020-10-16' } });
+ });
+ });
+
+ describe('groupLinkRequestFormatter', () => {
+ it('returns expected format', () => {
+ expect(
+ groupLinkRequestFormatter({
+ accessLevel: 50,
+ expires_at: '2020-10-16',
+ }),
+ ).toEqual({ group_link: { group_access: 50, expires_at: '2020-10-16' } });
+ });
+ });
+});
diff --git a/spec/frontend/helpers/dom_shims/create_object_url.js b/spec/frontend/helpers/dom_shims/create_object_url.js
new file mode 100644
index 00000000000..94d060cab08
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/create_object_url.js
@@ -0,0 +1,3 @@
+URL.createObjectURL = function createObjectURL() {
+ return 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b';
+};
diff --git a/spec/frontend/helpers/dom_shims/index.js b/spec/frontend/helpers/dom_shims/index.js
index 2ba5701fc77..9b70cb86b8b 100644
--- a/spec/frontend/helpers/dom_shims/index.js
+++ b/spec/frontend/helpers/dom_shims/index.js
@@ -1,3 +1,4 @@
+import './create_object_url';
import './element_scroll_into_view';
import './element_scroll_by';
import './element_scroll_to';
diff --git a/spec/frontend/helpers/emoji.js b/spec/frontend/helpers/emoji.js
new file mode 100644
index 00000000000..e8a93e21818
--- /dev/null
+++ b/spec/frontend/helpers/emoji.js
@@ -0,0 +1,88 @@
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import { initEmojiMap, EMOJI_VERSION } from '~/emoji';
+
+export const emojiFixtureMap = {
+ atom: {
+ moji: '⚛',
+ description: 'atom symbol',
+ unicodeVersion: '4.1',
+ aliases: ['atom_symbol'],
+ },
+ bomb: {
+ moji: '💣',
+ unicodeVersion: '6.0',
+ description: 'bomb',
+ },
+ construction_worker_tone5: {
+ moji: '👷🏿',
+ unicodeVersion: '8.0',
+ description: 'construction worker tone 5',
+ },
+ five: {
+ moji: '5️⃣',
+ unicodeVersion: '3.0',
+ description: 'keycap digit five',
+ },
+ grey_question: {
+ moji: '❔',
+ unicodeVersion: '6.0',
+ description: 'white question mark ornament',
+ },
+
+ // used for regression tests
+ // black_heart MUST come before heart
+ // custard MUST come before star
+ black_heart: {
+ moji: '🖤',
+ unicodeVersion: '1.1',
+ description: 'black heart',
+ },
+ heart: {
+ moji: '❤',
+ unicodeVersion: '1.1',
+ description: 'heavy black heart',
+ },
+ custard: {
+ moji: '🍮',
+ unicodeVersion: '6.0',
+ description: 'custard',
+ },
+ star: {
+ moji: '⭐',
+ unicodeVersion: '5.1',
+ description: 'white medium star',
+ },
+};
+
+Object.keys(emojiFixtureMap).forEach(k => {
+ emojiFixtureMap[k].name = k;
+ if (!emojiFixtureMap[k].aliases) {
+ emojiFixtureMap[k].aliases = [];
+ }
+});
+
+export async function initEmojiMock() {
+ const emojiData = Object.fromEntries(
+ Object.values(emojiFixtureMap).map(m => {
+ const { name: n, moji: e, unicodeVersion: u, category: c, description: d } = m;
+ return [n, { c, e, d, u }];
+ }),
+ );
+
+ const mock = new MockAdapter(axios);
+ mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, JSON.stringify(emojiData));
+
+ await initEmojiMap();
+
+ return mock;
+}
+
+export function describeEmojiFields(label, tests) {
+ describe.each`
+ field | accessor
+ ${'name'} | ${e => e.name}
+ ${'alias'} | ${e => e.aliases[0]}
+ ${'description'} | ${e => e.description}
+ `(label, tests);
+}
diff --git a/spec/frontend/helpers/experimentation_helper.js b/spec/frontend/helpers/experimentation_helper.js
new file mode 100644
index 00000000000..c08c25155e8
--- /dev/null
+++ b/spec/frontend/helpers/experimentation_helper.js
@@ -0,0 +1,14 @@
+import { merge } from 'lodash';
+
+export function withGonExperiment(experimentKey, value = true) {
+ let origGon;
+
+ beforeEach(() => {
+ origGon = window.gon;
+ window.gon = merge({}, window.gon || {}, { experiments: { [experimentKey]: value } });
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
+ });
+}
diff --git a/spec/frontend/helpers/keep_alive_component_helper.js b/spec/frontend/helpers/keep_alive_component_helper.js
new file mode 100644
index 00000000000..54f40bf9093
--- /dev/null
+++ b/spec/frontend/helpers/keep_alive_component_helper.js
@@ -0,0 +1,29 @@
+import Vue from 'vue';
+
+export function keepAlive(KeptAliveComponent) {
+ return Vue.extend({
+ components: {
+ KeptAliveComponent,
+ },
+ data() {
+ return {
+ view: 'KeptAliveComponent',
+ };
+ },
+ methods: {
+ async activate() {
+ this.view = 'KeptAliveComponent';
+ await this.$nextTick();
+ },
+ async deactivate() {
+ this.view = 'div';
+ await this.$nextTick();
+ },
+ async reactivate() {
+ await this.deactivate();
+ await this.activate();
+ },
+ },
+ template: `<keep-alive><component :is="view"></component></keep-alive>`,
+ });
+}
diff --git a/spec/frontend/helpers/keep_alive_component_helper_spec.js b/spec/frontend/helpers/keep_alive_component_helper_spec.js
new file mode 100644
index 00000000000..dcccc14f396
--- /dev/null
+++ b/spec/frontend/helpers/keep_alive_component_helper_spec.js
@@ -0,0 +1,32 @@
+import { mount } from '@vue/test-utils';
+import { keepAlive } from './keep_alive_component_helper';
+
+const component = {
+ template: '<div>Test Component</div>',
+};
+
+describe('keepAlive', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = mount(keepAlive(component));
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('converts a component to a keep-alive component', async () => {
+ const { element } = wrapper.find(component);
+
+ await wrapper.vm.deactivate();
+ expect(wrapper.find(component).exists()).toBe(false);
+
+ await wrapper.vm.activate();
+
+ // assert that when the component is destroyed and re-rendered, the
+ // newly rendered component has the reference to the old component
+ // (i.e. the old component was deactivated and activated)
+ expect(wrapper.find(component).element).toBe(element);
+ });
+});
diff --git a/spec/frontend/helpers/local_storage_helper.js b/spec/frontend/helpers/local_storage_helper.js
index cd39b660bfd..0318b80aaef 100644
--- a/spec/frontend/helpers/local_storage_helper.js
+++ b/spec/frontend/helpers/local_storage_helper.js
@@ -35,7 +35,7 @@ export const createLocalStorageSpy = () => {
clear: jest.fn(() => {
storage = {};
}),
- getItem: jest.fn(key => storage[key]),
+ getItem: jest.fn(key => (key in storage ? storage[key] : null)),
setItem: jest.fn((key, value) => {
storage[key] = value;
}),
diff --git a/spec/frontend/helpers/local_storage_helper_spec.js b/spec/frontend/helpers/local_storage_helper_spec.js
index 6b44ea3a4c3..5d9961e7631 100644
--- a/spec/frontend/helpers/local_storage_helper_spec.js
+++ b/spec/frontend/helpers/local_storage_helper_spec.js
@@ -18,11 +18,11 @@ describe('localStorage helper', () => {
localStorage.removeItem('test', 'testing');
- expect(localStorage.getItem('test')).toBeUndefined();
+ expect(localStorage.getItem('test')).toBe(null);
expect(localStorage.getItem('test2')).toBe('testing');
localStorage.clear();
- expect(localStorage.getItem('test2')).toBeUndefined();
+ expect(localStorage.getItem('test2')).toBe(null);
});
});
diff --git a/spec/frontend/helpers/startup_css_helper_spec.js b/spec/frontend/helpers/startup_css_helper_spec.js
index 7b83f0aefca..1a88e80344e 100644
--- a/spec/frontend/helpers/startup_css_helper_spec.js
+++ b/spec/frontend/helpers/startup_css_helper_spec.js
@@ -1,4 +1,4 @@
-import { waitForCSSLoaded } from '../../../app/assets/javascripts/helpers/startup_css_helper';
+import { waitForCSSLoaded } from '~/helpers/startup_css_helper';
describe('waitForCSSLoaded', () => {
let mockedCallback;
diff --git a/spec/frontend/helpers/vue_test_utils_helper.js b/spec/frontend/helpers/vue_test_utils_helper.js
index 68326e37ae7..ead898f04d3 100644
--- a/spec/frontend/helpers/vue_test_utils_helper.js
+++ b/spec/frontend/helpers/vue_test_utils_helper.js
@@ -33,3 +33,10 @@ export const waitForMutation = (store, expectedMutationType) =>
}
});
});
+
+export const extendedWrapper = wrapper =>
+ Object.defineProperty(wrapper, 'findByTestId', {
+ value(id) {
+ return this.find(`[data-testid="${id}"]`);
+ },
+ });
diff --git a/spec/frontend/helpers/wait_for_text.js b/spec/frontend/helpers/wait_for_text.js
new file mode 100644
index 00000000000..6bed8a90a98
--- /dev/null
+++ b/spec/frontend/helpers/wait_for_text.js
@@ -0,0 +1,3 @@
+import { findByText } from '@testing-library/dom';
+
+export const waitForText = async (text, container = document) => findByText(container, text);
diff --git a/spec/frontend/ide/components/commit_sidebar/actions_spec.js b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
index a303e2b9bee..0003e13c92f 100644
--- a/spec/frontend/ide/components/commit_sidebar/actions_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
@@ -83,12 +83,12 @@ describe('IDE commit sidebar actions', () => {
});
});
- describe('commitToCurrentBranchText', () => {
+ describe('currentBranchText', () => {
it('escapes current branch', () => {
const injectedSrc = '<img src="x" />';
createComponent({ currentBranchId: injectedSrc });
- expect(vm.commitToCurrentBranchText).not.toContain(injectedSrc);
+ expect(vm.currentBranchText).not.toContain(injectedSrc);
});
});
diff --git a/spec/frontend/ide/components/commit_sidebar/form_spec.js b/spec/frontend/ide/components/commit_sidebar/form_spec.js
index 56667d6b03d..abd7e3bb8fc 100644
--- a/spec/frontend/ide/components/commit_sidebar/form_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/form_spec.js
@@ -7,7 +7,12 @@ import { createStore } from '~/ide/stores';
import consts from '~/ide/stores/modules/commit/constants';
import CommitForm from '~/ide/components/commit_sidebar/form.vue';
import { leftSidebarViews } from '~/ide/constants';
-import { createCodeownersCommitError, createUnexpectedCommitError } from '~/ide/lib/errors';
+import {
+ createCodeownersCommitError,
+ createUnexpectedCommitError,
+ createBranchChangedCommitError,
+ branchAlreadyExistsCommitError,
+} from '~/ide/lib/errors';
describe('IDE commit form', () => {
const Component = Vue.extend(CommitForm);
@@ -290,20 +295,30 @@ describe('IDE commit form', () => {
jest.spyOn(vm.$store, 'dispatch').mockReturnValue(Promise.resolve());
});
- it('updates commit action and commits', async () => {
- store.state.commit.commitError = createCodeownersCommitError('test message');
+ const commitActions = [
+ ['commit/updateCommitAction', consts.COMMIT_TO_NEW_BRANCH],
+ ['commit/commitChanges'],
+ ];
- await vm.$nextTick();
+ it.each`
+ commitError | expectedActions
+ ${createCodeownersCommitError} | ${commitActions}
+ ${createBranchChangedCommitError} | ${commitActions}
+ ${branchAlreadyExistsCommitError} | ${[['commit/addSuffixToBranchName'], ...commitActions]}
+ `(
+ 'updates commit action and commits for error: $commitError',
+ async ({ commitError, expectedActions }) => {
+ store.state.commit.commitError = commitError('test message');
- getByText(document.body, 'Create new branch').click();
+ await vm.$nextTick();
- await waitForPromises();
+ getByText(document.body, 'Create new branch').click();
- expect(vm.$store.dispatch.mock.calls).toEqual([
- ['commit/updateCommitAction', consts.COMMIT_TO_NEW_BRANCH],
- ['commit/commitChanges', undefined],
- ]);
- });
+ await waitForPromises();
+
+ expect(vm.$store.dispatch.mock.calls).toEqual(expectedActions);
+ },
+ );
});
});
diff --git a/spec/frontend/ide/components/ide_review_spec.js b/spec/frontend/ide/components/ide_review_spec.js
index c9ac2ac423d..bcc98669427 100644
--- a/spec/frontend/ide/components/ide_review_spec.js
+++ b/spec/frontend/ide/components/ide_review_spec.js
@@ -1,14 +1,19 @@
import Vue from 'vue';
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
import IdeReview from '~/ide/components/ide_review.vue';
+import EditorModeDropdown from '~/ide/components/editor_mode_dropdown.vue';
import { createStore } from '~/ide/stores';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { trimText } from '../../helpers/text_helper';
+import { keepAlive } from '../../helpers/keep_alive_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('IDE review mode', () => {
- const Component = Vue.extend(IdeReview);
- let vm;
+ let wrapper;
let store;
beforeEach(() => {
@@ -21,15 +26,53 @@ describe('IDE review mode', () => {
loading: false,
});
- vm = createComponentWithStore(Component, store).$mount();
+ wrapper = mount(keepAlive(IdeReview), {
+ store,
+ localVue,
+ });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders list of files', () => {
- expect(vm.$el.textContent).toContain('fileName');
+ expect(wrapper.text()).toContain('fileName');
+ });
+
+ describe('activated', () => {
+ let inititializeSpy;
+
+ beforeEach(async () => {
+ inititializeSpy = jest.spyOn(wrapper.find(IdeReview).vm, 'initialize');
+ store.state.viewer = 'editor';
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('re initializes the component', () => {
+ expect(inititializeSpy).toHaveBeenCalled();
+ });
+
+ it('updates viewer to "diff" by default', () => {
+ expect(store.state.viewer).toBe('diff');
+ });
+
+ describe('merge request is defined', () => {
+ beforeEach(async () => {
+ store.state.currentMergeRequestId = '1';
+ store.state.projects.abcproject.mergeRequests['1'] = {
+ iid: 123,
+ web_url: 'testing123',
+ };
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('updates viewer to "mrdiff"', async () => {
+ expect(store.state.viewer).toBe('mrdiff');
+ });
+ });
});
describe('merge request', () => {
@@ -40,32 +83,27 @@ describe('IDE review mode', () => {
web_url: 'testing123',
};
- return vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
it('renders edit dropdown', () => {
- expect(vm.$el.querySelector('.btn')).not.toBe(null);
+ expect(wrapper.find(EditorModeDropdown).exists()).toBe(true);
});
- it('renders merge request link & IID', () => {
+ it('renders merge request link & IID', async () => {
store.state.viewer = 'mrdiff';
- return vm.$nextTick(() => {
- const link = vm.$el.querySelector('.ide-review-sub-header');
+ await wrapper.vm.$nextTick();
- expect(link.querySelector('a').getAttribute('href')).toBe('testing123');
- expect(trimText(link.textContent)).toBe('Merge request (!123)');
- });
+ expect(trimText(wrapper.text())).toContain('Merge request (!123)');
});
- it('changes text to latest changes when viewer is not mrdiff', () => {
+ it('changes text to latest changes when viewer is not mrdiff', async () => {
store.state.viewer = 'diff';
- return vm.$nextTick(() => {
- expect(trimText(vm.$el.querySelector('.ide-review-sub-header').textContent)).toBe(
- 'Latest changes',
- );
- });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.text()).toContain('Latest changes');
});
});
});
diff --git a/spec/frontend/ide/components/ide_side_bar_spec.js b/spec/frontend/ide/components/ide_side_bar_spec.js
index 67257b40879..86e4e8d8f89 100644
--- a/spec/frontend/ide/components/ide_side_bar_spec.js
+++ b/spec/frontend/ide/components/ide_side_bar_spec.js
@@ -1,57 +1,88 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlSkeletonLoading } from '@gitlab/ui';
import { createStore } from '~/ide/stores';
-import ideSidebar from '~/ide/components/ide_side_bar.vue';
+import IdeSidebar from '~/ide/components/ide_side_bar.vue';
+import IdeTree from '~/ide/components/ide_tree.vue';
+import RepoCommitSection from '~/ide/components/repo_commit_section.vue';
import { leftSidebarViews } from '~/ide/constants';
import { projectData } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('IdeSidebar', () => {
- let vm;
+ let wrapper;
let store;
- beforeEach(() => {
+ function createComponent() {
store = createStore();
- const Component = Vue.extend(ideSidebar);
-
store.state.currentProjectId = 'abcproject';
store.state.projects.abcproject = projectData;
- vm = createComponentWithStore(Component, store).$mount();
- });
+ return mount(IdeSidebar, {
+ store,
+ localVue,
+ });
+ }
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
it('renders a sidebar', () => {
- expect(vm.$el.querySelector('.multi-file-commit-panel-inner')).not.toBeNull();
+ wrapper = createComponent();
+
+ expect(wrapper.find('[data-testid="ide-side-bar-inner"]').exists()).toBe(true);
});
- it('renders loading icon component', done => {
- vm.$store.state.loading = true;
+ it('renders loading components', async () => {
+ wrapper = createComponent();
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.multi-file-loading-container')).not.toBeNull();
- expect(vm.$el.querySelectorAll('.multi-file-loading-container').length).toBe(3);
+ store.state.loading = true;
- done();
- });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.findAll(GlSkeletonLoading)).toHaveLength(3);
});
describe('activityBarComponent', () => {
it('renders tree component', () => {
- expect(vm.$el.querySelector('.ide-file-list')).not.toBeNull();
+ wrapper = createComponent();
+
+ expect(wrapper.find(IdeTree).exists()).toBe(true);
});
- it('renders commit component', done => {
- vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
+ it('renders commit component', async () => {
+ wrapper = createComponent();
+
+ store.state.currentActivityView = leftSidebarViews.commit.name;
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.multi-file-commit-panel-section')).not.toBeNull();
+ await wrapper.vm.$nextTick();
- done();
- });
+ expect(wrapper.find(RepoCommitSection).exists()).toBe(true);
});
});
+
+ it('keeps the current activity view components alive', async () => {
+ wrapper = createComponent();
+
+ const ideTreeComponent = wrapper.find(IdeTree).element;
+
+ store.state.currentActivityView = leftSidebarViews.commit.name;
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(IdeTree).exists()).toBe(false);
+ expect(wrapper.find(RepoCommitSection).exists()).toBe(true);
+
+ store.state.currentActivityView = leftSidebarViews.edit.name;
+
+ await wrapper.vm.$nextTick();
+
+ // reference to the elements remains the same, meaning the components were kept alive
+ expect(wrapper.find(IdeTree).element).toEqual(ideTreeComponent);
+ });
});
diff --git a/spec/frontend/ide/components/ide_tree_list_spec.js b/spec/frontend/ide/components/ide_tree_list_spec.js
index 4593ef6049b..dd57a5c5f4d 100644
--- a/spec/frontend/ide/components/ide_tree_list_spec.js
+++ b/spec/frontend/ide/components/ide_tree_list_spec.js
@@ -38,15 +38,9 @@ describe('IDE tree list', () => {
beforeEach(() => {
bootstrapWithTree();
- jest.spyOn(vm, 'updateViewer');
-
vm.$mount();
});
- it('updates viewer on mount', () => {
- expect(vm.updateViewer).toHaveBeenCalledWith('edit');
- });
-
it('renders loading indicator', done => {
store.state.trees['abcproject/master'].loading = true;
@@ -67,8 +61,6 @@ describe('IDE tree list', () => {
beforeEach(() => {
bootstrapWithTree(emptyBranchTree);
- jest.spyOn(vm, 'updateViewer');
-
vm.$mount();
});
diff --git a/spec/frontend/ide/components/ide_tree_spec.js b/spec/frontend/ide/components/ide_tree_spec.js
index 899daa0bf57..ad00dec2e48 100644
--- a/spec/frontend/ide/components/ide_tree_spec.js
+++ b/spec/frontend/ide/components/ide_tree_spec.js
@@ -1,19 +1,22 @@
import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
import IdeTree from '~/ide/components/ide_tree.vue';
import { createStore } from '~/ide/stores';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
+import { keepAlive } from '../../helpers/keep_alive_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
-describe('IdeRepoTree', () => {
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('IdeTree', () => {
let store;
- let vm;
+ let wrapper;
beforeEach(() => {
store = createStore();
- const IdeRepoTree = Vue.extend(IdeTree);
-
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
store.state.projects.abcproject = { ...projectData };
@@ -22,14 +25,36 @@ describe('IdeRepoTree', () => {
loading: false,
});
- vm = createComponentWithStore(IdeRepoTree, store).$mount();
+ wrapper = mount(keepAlive(IdeTree), {
+ store,
+ localVue,
+ });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders list of files', () => {
- expect(vm.$el.textContent).toContain('fileName');
+ expect(wrapper.text()).toContain('fileName');
+ });
+
+ describe('activated', () => {
+ let inititializeSpy;
+
+ beforeEach(async () => {
+ inititializeSpy = jest.spyOn(wrapper.find(IdeTree).vm, 'initialize');
+ store.state.viewer = 'diff';
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('re initializes the component', () => {
+ expect(inititializeSpy).toHaveBeenCalled();
+ });
+
+ it('updates viewer to "editor" by default', () => {
+ expect(store.state.viewer).toBe('editor');
+ });
});
});
diff --git a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
index a65d9e6f78b..faa70982fac 100644
--- a/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
+++ b/spec/frontend/ide/components/jobs/__snapshots__/stage_spec.js.snap
@@ -16,8 +16,6 @@ exports[`IDE pipeline stage renders stage details & icon 1`] = `
<strong
class="gl-ml-3 text-truncate"
data-container="body"
- data-original-title=""
- title=""
>
build
diff --git a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
index 42526590ebb..57174181a3d 100644
--- a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
@@ -31,7 +31,7 @@ describe('IDE job log scroll button', () => {
});
it('returns proper title', () => {
- expect(wrapper.attributes('data-original-title')).toBe(title);
+ expect(wrapper.attributes('title')).toBe(title);
});
});
diff --git a/spec/frontend/ide/components/new_dropdown/upload_spec.js b/spec/frontend/ide/components/new_dropdown/upload_spec.js
index ae497106f73..3f3784dbb3a 100644
--- a/spec/frontend/ide/components/new_dropdown/upload_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/upload_spec.js
@@ -59,14 +59,11 @@ describe('new dropdown upload', () => {
result: 'base64,cGxhaW4gdGV4dA==',
};
const binaryTarget = {
- result: 'base64,w4I=',
+ result: 'base64,8PDw8A==', // ðððð
};
- const textFile = new File(['plain text'], 'textFile');
- const binaryFile = {
- name: 'binaryFile',
- type: 'image/png',
- };
+ const textFile = new File(['plain text'], 'textFile');
+ const binaryFile = new File(['😺'], 'binaryFile');
beforeEach(() => {
jest.spyOn(FileReader.prototype, 'readAsText');
@@ -92,16 +89,16 @@ describe('new dropdown upload', () => {
.catch(done.fail);
});
- it('splits content on base64 if binary', () => {
+ it('creates a blob URL for the content if binary', () => {
vm.createFile(binaryTarget, binaryFile);
- expect(FileReader.prototype.readAsText).not.toHaveBeenCalledWith(textFile);
+ expect(FileReader.prototype.readAsText).not.toHaveBeenCalled();
expect(vm.$emit).toHaveBeenCalledWith('create', {
name: binaryFile.name,
type: 'blob',
- content: binaryTarget.result.split('base64,')[1],
- rawPath: binaryTarget.result,
+ content: 'ðððð',
+ rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
});
});
});
diff --git a/spec/frontend/ide/components/repo_commit_section_spec.js b/spec/frontend/ide/components/repo_commit_section_spec.js
index 3b837622720..096079308cd 100644
--- a/spec/frontend/ide/components/repo_commit_section_spec.js
+++ b/spec/frontend/ide/components/repo_commit_section_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { createStore } from '~/ide/stores';
import { createRouter } from '~/ide/ide_router';
+import { keepAlive } from '../../helpers/keep_alive_component_helper';
import RepoCommitSection from '~/ide/components/repo_commit_section.vue';
import EmptyState from '~/ide/components/commit_sidebar/empty_state.vue';
import { stageKeys } from '~/ide/constants';
@@ -14,7 +15,7 @@ describe('RepoCommitSection', () => {
let store;
function createComponent() {
- wrapper = mount(RepoCommitSection, { store });
+ wrapper = mount(keepAlive(RepoCommitSection), { store });
}
function setupDefaultState() {
@@ -64,6 +65,7 @@ describe('RepoCommitSection', () => {
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
describe('empty state', () => {
@@ -168,4 +170,21 @@ describe('RepoCommitSection', () => {
expect(wrapper.find(EmptyState).exists()).toBe(false);
});
});
+
+ describe('activated', () => {
+ let inititializeSpy;
+
+ beforeEach(async () => {
+ createComponent();
+
+ inititializeSpy = jest.spyOn(wrapper.find(RepoCommitSection).vm, 'initialize');
+ store.state.viewer = 'diff';
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('re initializes the component', () => {
+ expect(inititializeSpy).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/ide/lib/errors_spec.js b/spec/frontend/ide/lib/errors_spec.js
index 8c3fb378302..733d5a5da3c 100644
--- a/spec/frontend/ide/lib/errors_spec.js
+++ b/spec/frontend/ide/lib/errors_spec.js
@@ -2,6 +2,7 @@ import {
createUnexpectedCommitError,
createCodeownersCommitError,
createBranchChangedCommitError,
+ branchAlreadyExistsCommitError,
parseCommitError,
} from '~/ide/lib/errors';
@@ -21,35 +22,22 @@ describe('~/ide/lib/errors', () => {
},
});
- describe('createCodeownersCommitError', () => {
- it('uses given message', () => {
- expect(createCodeownersCommitError(TEST_MESSAGE)).toEqual({
- title: 'CODEOWNERS rule violation',
- messageHTML: TEST_MESSAGE,
- canCreateBranch: true,
- });
- });
+ const NEW_BRANCH_SUFFIX = `<br/><br/>Would you like to create a new branch?`;
+ const AUTOGENERATE_SUFFIX = `<br/><br/>Would you like to try auto-generating a branch name?`;
- it('escapes special chars', () => {
- expect(createCodeownersCommitError(TEST_SPECIAL)).toEqual({
- title: 'CODEOWNERS rule violation',
- messageHTML: TEST_SPECIAL_ESCAPED,
- canCreateBranch: true,
- });
- });
- });
-
- describe('createBranchChangedCommitError', () => {
- it.each`
- message | expectedMessage
- ${TEST_MESSAGE} | ${`${TEST_MESSAGE}<br/><br/>Would you like to create a new branch?`}
- ${TEST_SPECIAL} | ${`${TEST_SPECIAL_ESCAPED}<br/><br/>Would you like to create a new branch?`}
- `('uses given message="$message"', ({ message, expectedMessage }) => {
- expect(createBranchChangedCommitError(message)).toEqual({
- title: 'Branch changed',
- messageHTML: expectedMessage,
- canCreateBranch: true,
- });
+ it.each`
+ fn | title | message | messageHTML
+ ${createCodeownersCommitError} | ${'CODEOWNERS rule violation'} | ${TEST_MESSAGE} | ${TEST_MESSAGE}
+ ${createCodeownersCommitError} | ${'CODEOWNERS rule violation'} | ${TEST_SPECIAL} | ${TEST_SPECIAL_ESCAPED}
+ ${branchAlreadyExistsCommitError} | ${'Branch already exists'} | ${TEST_MESSAGE} | ${`${TEST_MESSAGE}${AUTOGENERATE_SUFFIX}`}
+ ${branchAlreadyExistsCommitError} | ${'Branch already exists'} | ${TEST_SPECIAL} | ${`${TEST_SPECIAL_ESCAPED}${AUTOGENERATE_SUFFIX}`}
+ ${createBranchChangedCommitError} | ${'Branch changed'} | ${TEST_MESSAGE} | ${`${TEST_MESSAGE}${NEW_BRANCH_SUFFIX}`}
+ ${createBranchChangedCommitError} | ${'Branch changed'} | ${TEST_SPECIAL} | ${`${TEST_SPECIAL_ESCAPED}${NEW_BRANCH_SUFFIX}`}
+ `('$fn escapes and uses given message="$message"', ({ fn, title, message, messageHTML }) => {
+ expect(fn(message)).toEqual({
+ title,
+ messageHTML,
+ primaryAction: { text: 'Create new branch', callback: expect.any(Function) },
});
});
@@ -60,7 +48,7 @@ describe('~/ide/lib/errors', () => {
${{}} | ${createUnexpectedCommitError()}
${{ response: {} }} | ${createUnexpectedCommitError()}
${{ response: { data: {} } }} | ${createUnexpectedCommitError()}
- ${createResponseError('test')} | ${createUnexpectedCommitError()}
+ ${createResponseError(TEST_MESSAGE)} | ${createUnexpectedCommitError(TEST_MESSAGE)}
${createResponseError(CODEOWNERS_MESSAGE)} | ${createCodeownersCommitError(CODEOWNERS_MESSAGE)}
${createResponseError(CHANGED_MESSAGE)} | ${createBranchChangedCommitError(CHANGED_MESSAGE)}
`('parses message into error object with "$message"', ({ message, expectation }) => {
diff --git a/spec/frontend/ide/lib/languages/hcl_spec.js b/spec/frontend/ide/lib/languages/hcl_spec.js
new file mode 100644
index 00000000000..a39673a3225
--- /dev/null
+++ b/spec/frontend/ide/lib/languages/hcl_spec.js
@@ -0,0 +1,290 @@
+import { editor } from 'monaco-editor';
+import { registerLanguages } from '~/ide/utils';
+import hcl from '~/ide/lib/languages/hcl';
+
+describe('tokenization for .tf files', () => {
+ beforeEach(() => {
+ registerLanguages(hcl);
+ });
+
+ it.each([
+ ['// Foo', [[{ language: 'hcl', offset: 0, type: 'comment.hcl' }]]],
+ ['/* Bar */', [[{ language: 'hcl', offset: 0, type: 'comment.hcl' }]]],
+ ['/*', [[{ language: 'hcl', offset: 0, type: 'comment.hcl' }]]],
+ [
+ 'foo = "bar"',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'string.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'variable "foo" {',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'type.hcl' },
+ { language: 'hcl', offset: 8, type: '' },
+ { language: 'hcl', offset: 9, type: 'string.hcl' },
+ { language: 'hcl', offset: 14, type: '' },
+ { language: 'hcl', offset: 15, type: 'delimiter.curly.hcl' },
+ ],
+ ],
+ ],
+ [
+ // eslint-disable-next-line no-template-curly-in-string
+ ' api_key = "${var.foo}"',
+ [
+ [
+ { language: 'hcl', offset: 0, type: '' },
+ { language: 'hcl', offset: 2, type: 'variable.hcl' },
+ { language: 'hcl', offset: 9, type: '' },
+ { language: 'hcl', offset: 10, type: 'operator.hcl' },
+ { language: 'hcl', offset: 11, type: '' },
+ { language: 'hcl', offset: 12, type: 'string.hcl' },
+ { language: 'hcl', offset: 13, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 15, type: 'keyword.var.hcl' },
+ { language: 'hcl', offset: 18, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 19, type: 'variable.hcl' },
+ { language: 'hcl', offset: 22, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 23, type: 'string.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'resource "aws_security_group" "firewall" {',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'type.hcl' },
+ { language: 'hcl', offset: 8, type: '' },
+ { language: 'hcl', offset: 9, type: 'string.hcl' },
+ { language: 'hcl', offset: 29, type: '' },
+ { language: 'hcl', offset: 30, type: 'string.hcl' },
+ { language: 'hcl', offset: 40, type: '' },
+ { language: 'hcl', offset: 41, type: 'delimiter.curly.hcl' },
+ ],
+ ],
+ ],
+ [
+ ' network_interface {',
+ [
+ [
+ { language: 'hcl', offset: 0, type: '' },
+ { language: 'hcl', offset: 2, type: 'identifier.hcl' },
+ { language: 'hcl', offset: 20, type: 'delimiter.curly.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'foo = [1, 2, "foo"]',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'delimiter.square.hcl' },
+ { language: 'hcl', offset: 7, type: 'number.hcl' },
+ { language: 'hcl', offset: 8, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 9, type: '' },
+ { language: 'hcl', offset: 10, type: 'number.hcl' },
+ { language: 'hcl', offset: 11, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 12, type: '' },
+ { language: 'hcl', offset: 13, type: 'string.hcl' },
+ { language: 'hcl', offset: 18, type: 'delimiter.square.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'resource "foo" "bar" {}',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'type.hcl' },
+ { language: 'hcl', offset: 8, type: '' },
+ { language: 'hcl', offset: 9, type: 'string.hcl' },
+ { language: 'hcl', offset: 14, type: '' },
+ { language: 'hcl', offset: 15, type: 'string.hcl' },
+ { language: 'hcl', offset: 20, type: '' },
+ { language: 'hcl', offset: 21, type: 'delimiter.curly.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'foo = "bar"',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'string.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'bar = 7',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'number.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'baz = [1,2,3]',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'delimiter.square.hcl' },
+ { language: 'hcl', offset: 7, type: 'number.hcl' },
+ { language: 'hcl', offset: 8, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 9, type: 'number.hcl' },
+ { language: 'hcl', offset: 10, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 11, type: 'number.hcl' },
+ { language: 'hcl', offset: 12, type: 'delimiter.square.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'foo = -12',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'operator.hcl' },
+ { language: 'hcl', offset: 7, type: 'number.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'bar = 3.14159',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'number.float.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'foo = true',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'keyword.true.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'foo = false',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'keyword.false.hcl' },
+ ],
+ ],
+ ],
+ [
+ // eslint-disable-next-line no-template-curly-in-string
+ 'bar = "${file("bing/bong.txt")}"',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'operator.hcl' },
+ { language: 'hcl', offset: 5, type: '' },
+ { language: 'hcl', offset: 6, type: 'string.hcl' },
+ { language: 'hcl', offset: 7, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 9, type: 'type.hcl' },
+ { language: 'hcl', offset: 13, type: 'delimiter.parenthesis.hcl' },
+ { language: 'hcl', offset: 14, type: 'string.hcl' },
+ { language: 'hcl', offset: 29, type: 'delimiter.parenthesis.hcl' },
+ { language: 'hcl', offset: 30, type: 'delimiter.hcl' },
+ { language: 'hcl', offset: 31, type: 'string.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'a = 1e-10',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 1, type: '' },
+ { language: 'hcl', offset: 2, type: 'operator.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'number.float.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'b = 1e+10',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 1, type: '' },
+ { language: 'hcl', offset: 2, type: 'operator.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'number.float.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'c = 1e10',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 1, type: '' },
+ { language: 'hcl', offset: 2, type: 'operator.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'number.float.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'd = 1.2e-10',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 1, type: '' },
+ { language: 'hcl', offset: 2, type: 'operator.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'number.float.hcl' },
+ ],
+ ],
+ ],
+ [
+ 'e = 1.2e+10',
+ [
+ [
+ { language: 'hcl', offset: 0, type: 'variable.hcl' },
+ { language: 'hcl', offset: 1, type: '' },
+ { language: 'hcl', offset: 2, type: 'operator.hcl' },
+ { language: 'hcl', offset: 3, type: '' },
+ { language: 'hcl', offset: 4, type: 'number.float.hcl' },
+ ],
+ ],
+ ],
+ ])('%s', (string, tokens) => {
+ expect(editor.tokenize(string, 'hcl')).toEqual(tokens);
+ });
+});
diff --git a/spec/frontend/ide/stores/actions/file_spec.js b/spec/frontend/ide/stores/actions/file_spec.js
index 974c0715c06..8f7fcc25cf0 100644
--- a/spec/frontend/ide/stores/actions/file_spec.js
+++ b/spec/frontend/ide/stores/actions/file_spec.js
@@ -291,6 +291,20 @@ describe('IDE store file actions', () => {
expect(store.state.openFiles[0].name).toBe(localFile.name);
});
});
+
+ it('does not toggle loading if toggleLoading=false', () => {
+ expect(localFile.loading).toBe(false);
+
+ return store
+ .dispatch('getFileData', {
+ path: localFile.path,
+ makeFileActive: false,
+ toggleLoading: false,
+ })
+ .then(() => {
+ expect(localFile.loading).toBe(true);
+ });
+ });
});
describe('Re-named success', () => {
diff --git a/spec/frontend/ide/stores/getters_spec.js b/spec/frontend/ide/stores/getters_spec.js
index e24f08fa802..5ae87f5f9cd 100644
--- a/spec/frontend/ide/stores/getters_spec.js
+++ b/spec/frontend/ide/stores/getters_spec.js
@@ -449,16 +449,16 @@ describe('IDE store getters', () => {
describe('getAvailableFileName', () => {
it.each`
path | newPath
- ${'foo'} | ${'foo_1'}
+ ${'foo'} | ${'foo-1'}
${'foo__93.png'} | ${'foo__94.png'}
- ${'foo/bar.png'} | ${'foo/bar_1.png'}
+ ${'foo/bar.png'} | ${'foo/bar-1.png'}
${'foo/bar--34.png'} | ${'foo/bar--35.png'}
${'foo/bar 2.png'} | ${'foo/bar 3.png'}
${'foo/bar-621.png'} | ${'foo/bar-622.png'}
- ${'jquery.min.js'} | ${'jquery_1.min.js'}
+ ${'jquery.min.js'} | ${'jquery-1.min.js'}
${'my_spec_22.js.snap'} | ${'my_spec_23.js.snap'}
- ${'subtitles5.mp4.srt'} | ${'subtitles_6.mp4.srt'}
- ${'sample_file.mp3'} | ${'sample_file_1.mp3'}
+ ${'subtitles5.mp4.srt'} | ${'subtitles-6.mp4.srt'}
+ ${'sample-file.mp3'} | ${'sample-file-1.mp3'}
${'Screenshot 2020-05-26 at 10.53.08 PM.png'} | ${'Screenshot 2020-05-26 at 11.53.08 PM.png'}
`('suffixes the path with a number if the path already exists', ({ path, newPath }) => {
localState.entries[path] = file();
diff --git a/spec/frontend/ide/stores/modules/commit/actions_spec.js b/spec/frontend/ide/stores/modules/commit/actions_spec.js
index babc50e54f1..cfe2bddf76c 100644
--- a/spec/frontend/ide/stores/modules/commit/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/actions_spec.js
@@ -76,59 +76,38 @@ describe('IDE commit module actions', () => {
.then(done)
.catch(done.fail);
});
+ });
- it('sets shouldCreateMR to true if "Create new MR" option is visible', done => {
- Object.assign(store.state, {
- shouldHideNewMrOption: false,
- });
+ describe('updateBranchName', () => {
+ let originalGon;
- testAction(
- actions.updateCommitAction,
- {},
- store.state,
- [
- {
- type: mutationTypes.UPDATE_COMMIT_ACTION,
- payload: { commitAction: expect.anything() },
- },
- { type: mutationTypes.TOGGLE_SHOULD_CREATE_MR, payload: true },
- ],
- [],
- done,
- );
+ beforeEach(() => {
+ originalGon = window.gon;
+ window.gon = { current_username: 'johndoe' };
+
+ store.state.currentBranchId = 'master';
});
- it('sets shouldCreateMR to false if "Create new MR" option is hidden', done => {
- Object.assign(store.state, {
- shouldHideNewMrOption: true,
- });
+ afterEach(() => {
+ window.gon = originalGon;
+ });
- testAction(
- actions.updateCommitAction,
- {},
- store.state,
- [
- {
- type: mutationTypes.UPDATE_COMMIT_ACTION,
- payload: { commitAction: expect.anything() },
- },
- { type: mutationTypes.TOGGLE_SHOULD_CREATE_MR, payload: false },
- ],
- [],
- done,
- );
+ it('updates store with new branch name', async () => {
+ await store.dispatch('commit/updateBranchName', 'branch-name');
+
+ expect(store.state.commit.newBranchName).toBe('branch-name');
});
});
- describe('updateBranchName', () => {
- it('updates store with new branch name', done => {
- store
- .dispatch('commit/updateBranchName', 'branch-name')
- .then(() => {
- expect(store.state.commit.newBranchName).toBe('branch-name');
- })
- .then(done)
- .catch(done.fail);
+ describe('addSuffixToBranchName', () => {
+ it('adds suffix to branchName', async () => {
+ jest.spyOn(Math, 'random').mockReturnValue(0.391352525);
+
+ store.state.commit.newBranchName = 'branch-name';
+
+ await store.dispatch('commit/addSuffixToBranchName');
+
+ expect(store.state.commit.newBranchName).toBe('branch-name-39135');
});
});
@@ -318,13 +297,16 @@ describe('IDE commit module actions', () => {
currentBranchId: 'master',
projects: {
abcproject: {
+ default_branch: 'master',
web_url: 'webUrl',
branches: {
master: {
+ name: 'master',
workingReference: '1',
commit: {
id: TEST_COMMIT_SHA,
},
+ can_push: true,
},
},
userPermissions: {
@@ -499,6 +481,16 @@ describe('IDE commit module actions', () => {
.catch(done.fail);
});
+ it('does not redirect to merge request page if shouldCreateMR is checked, but branch is the default branch', async () => {
+ jest.spyOn(eventHub, '$on').mockImplementation();
+
+ store.state.commit.commitAction = consts.COMMIT_TO_CURRENT_BRANCH;
+ store.state.commit.shouldCreateMR = true;
+
+ await store.dispatch('commit/commitChanges');
+ expect(visitUrl).not.toHaveBeenCalled();
+ });
+
it('resets changed files before redirecting', () => {
jest.spyOn(eventHub, '$on').mockImplementation();
diff --git a/spec/frontend/ide/stores/mutations/file_spec.js b/spec/frontend/ide/stores/mutations/file_spec.js
index b53e40be980..d303de6e9ef 100644
--- a/spec/frontend/ide/stores/mutations/file_spec.js
+++ b/spec/frontend/ide/stores/mutations/file_spec.js
@@ -39,20 +39,34 @@ describe('IDE store file mutations', () => {
});
describe('TOGGLE_FILE_OPEN', () => {
- beforeEach(() => {
+ it('adds into opened files', () => {
mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
- });
- it('adds into opened files', () => {
expect(localFile.opened).toBeTruthy();
expect(localState.openFiles.length).toBe(1);
});
- it('removes from opened files', () => {
+ describe('if already open', () => {
+ it('removes from opened files', () => {
+ mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
+ mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
+
+ expect(localFile.opened).toBeFalsy();
+ expect(localState.openFiles.length).toBe(0);
+ });
+ });
+
+ it.each`
+ entry | loading
+ ${{ opened: false }} | ${true}
+ ${{ opened: false, tempFile: true }} | ${false}
+ ${{ opened: true }} | ${false}
+ `('for state: $entry, sets loading=$loading', ({ entry, loading }) => {
+ Object.assign(localFile, entry);
+
mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
- expect(localFile.opened).toBeFalsy();
- expect(localState.openFiles.length).toBe(0);
+ expect(localFile.loading).toBe(loading);
});
});
diff --git a/spec/frontend/ide/stores/utils_spec.js b/spec/frontend/ide/stores/utils_spec.js
index d1eb4304c79..b185013050e 100644
--- a/spec/frontend/ide/stores/utils_spec.js
+++ b/spec/frontend/ide/stores/utils_spec.js
@@ -46,7 +46,7 @@ describe('Multi-file store utils', () => {
path: 'added',
tempFile: true,
content: 'new file content',
- rawPath: 'data:image/png;base64,abc',
+ rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
lastCommitSha: '123456789',
},
{ ...file('deletedFile'), path: 'deletedFile', deleted: true },
@@ -77,7 +77,8 @@ describe('Multi-file store utils', () => {
{
action: commitActionTypes.create,
file_path: 'added',
- content: 'new file content',
+ // atob("new file content")
+ content: 'bmV3IGZpbGUgY29udGVudA==',
encoding: 'base64',
last_commit_id: '123456789',
previous_path: undefined,
@@ -117,7 +118,7 @@ describe('Multi-file store utils', () => {
path: 'added',
tempFile: true,
content: 'new file content',
- rawPath: 'data:image/png;base64,abc',
+ rawPath: 'blob:https://gitlab.com/048c7ac1-98de-4a37-ab1b-0206d0ea7e1b',
lastCommitSha: '123456789',
},
],
@@ -148,7 +149,8 @@ describe('Multi-file store utils', () => {
{
action: commitActionTypes.create,
file_path: 'added',
- content: 'new file content',
+ // atob("new file content")
+ content: 'bmV3IGZpbGUgY29udGVudA==',
encoding: 'base64',
last_commit_id: '123456789',
previous_path: undefined,
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index 97dc8217ecc..6cd2128d356 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -9,6 +9,7 @@ import {
getPathParents,
getPathParent,
readFileAsDataURL,
+ addNumericSuffix,
} from '~/ide/utils';
describe('WebIDE utils', () => {
@@ -291,4 +292,43 @@ describe('WebIDE utils', () => {
});
});
});
+
+ /*
+ * hello-2425 -> hello-2425
+ * hello.md -> hello-1.md
+ * hello_2.md -> hello_3.md
+ * hello_ -> hello_1
+ * master-patch-22432 -> master-patch-22433
+ * patch_332 -> patch_333
+ */
+
+ describe('addNumericSuffix', () => {
+ it.each`
+ input | output
+ ${'hello'} | ${'hello-1'}
+ ${'hello2'} | ${'hello-3'}
+ ${'hello.md'} | ${'hello-1.md'}
+ ${'hello_2.md'} | ${'hello_3.md'}
+ ${'hello_'} | ${'hello_1'}
+ ${'master-patch-22432'} | ${'master-patch-22433'}
+ ${'patch_332'} | ${'patch_333'}
+ `('adds a numeric suffix to a given filename/branch name: $input', ({ input, output }) => {
+ expect(addNumericSuffix(input)).toBe(output);
+ });
+
+ it.each`
+ input | output
+ ${'hello'} | ${'hello-39135'}
+ ${'hello2'} | ${'hello-39135'}
+ ${'hello.md'} | ${'hello-39135.md'}
+ ${'hello_2.md'} | ${'hello_39135.md'}
+ ${'hello_'} | ${'hello_39135'}
+ ${'master-patch-22432'} | ${'master-patch-39135'}
+ ${'patch_332'} | ${'patch_39135'}
+ `('adds a random suffix if randomize=true is passed for name: $input', ({ input, output }) => {
+ jest.spyOn(Math, 'random').mockReturnValue(0.391352525);
+
+ expect(addNumericSuffix(input, true)).toBe(output);
+ });
+ });
});
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index 307806e0a8a..709f66bb352 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -1,28 +1,28 @@
import { mount } from '@vue/test-utils';
-import {
- GlAlert,
- GlLoadingIcon,
- GlTable,
- GlAvatar,
- GlPagination,
- GlSearchBoxByType,
- GlTab,
- GlTabs,
- GlBadge,
- GlEmptyState,
-} from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon, GlTable, GlAvatar, GlEmptyState } from '@gitlab/ui';
+import Tracking from '~/tracking';
import { visitUrl, joinPaths, mergeUrlParams } from '~/lib/utils/url_utility';
import IncidentsList from '~/incidents/components/incidents_list.vue';
import SeverityToken from '~/sidebar/components/severity/severity.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import { I18N, INCIDENT_STATUS_TABS } from '~/incidents/constants';
+import {
+ I18N,
+ TH_CREATED_AT_TEST_ID,
+ TH_SEVERITY_TEST_ID,
+ TH_PUBLISHED_TEST_ID,
+ trackIncidentCreateNewOptions,
+ trackIncidentListViewsOptions,
+} from '~/incidents/constants';
import mockIncidents from '../mocks/incidents.json';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
- joinPaths: jest.fn().mockName('joinPaths'),
- mergeUrlParams: jest.fn().mockName('mergeUrlParams'),
+ joinPaths: jest.fn(),
+ mergeUrlParams: jest.fn(),
+ setUrlParams: jest.fn(),
+ updateHistory: jest.fn(),
}));
+jest.mock('~/tracking');
describe('Incidents List', () => {
let wrapper;
@@ -41,23 +41,22 @@ describe('Incidents List', () => {
const findAlert = () => wrapper.find(GlAlert);
const findLoader = () => wrapper.find(GlLoadingIcon);
const findTimeAgo = () => wrapper.findAll(TimeAgoTooltip);
- const findDateColumnHeader = () =>
- wrapper.find('[data-testid="incident-management-created-at-sort"]');
- const findSearch = () => wrapper.find(GlSearchBoxByType);
- const findAssingees = () => wrapper.findAll('[data-testid="incident-assignees"]');
+ const findAssignees = () => wrapper.findAll('[data-testid="incident-assignees"]');
+ const findIncidentSlaHeader = () => wrapper.find('[data-testid="incident-management-sla"]');
const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
const findClosedIcon = () => wrapper.findAll("[data-testid='incident-closed']");
- const findPagination = () => wrapper.find(GlPagination);
- const findStatusFilterTabs = () => wrapper.findAll(GlTab);
- const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
- const findStatusTabs = () => wrapper.find(GlTabs);
const findEmptyState = () => wrapper.find(GlEmptyState);
const findSeverity = () => wrapper.findAll(SeverityToken);
+ const findIncidentSla = () => wrapper.findAll("[data-testid='incident-sla']");
- function mountComponent({ data = { incidents: [], incidentsCount: {} }, loading = false }) {
+ function mountComponent({ data = {}, loading = false, provide = {} } = {}) {
wrapper = mount(IncidentsList, {
data() {
- return data;
+ return {
+ incidents: [],
+ incidentsCount: {},
+ ...data,
+ };
},
mocks: {
$apollo: {
@@ -73,14 +72,20 @@ describe('Incidents List', () => {
newIssuePath,
incidentTemplateName,
incidentType,
- issuePath: '/project/isssues',
+ issuePath: '/project/issues',
publishedAvailable: true,
emptyListSvgPath,
+ textQuery: '',
+ authorUsernameQuery: '',
+ assigneeUsernameQuery: '',
+ slaFeatureAvailable: true,
+ ...provide,
},
stubs: {
GlButton: true,
GlAvatar: true,
GlEmptyState: true,
+ ServiceLevelAgreementCell: true,
},
});
}
@@ -153,14 +158,14 @@ describe('Incidents List', () => {
describe('Assignees', () => {
it('shows Unassigned when there are no assignees', () => {
expect(
- findAssingees()
+ findAssignees()
.at(0)
.text(),
).toBe(I18N.unassigned);
});
it('renders an avatar component when there is an assignee', () => {
- const avatar = findAssingees()
+ const avatar = findAssignees()
.at(1)
.find(GlAvatar);
const { src, label } = avatar.attributes();
@@ -171,13 +176,6 @@ describe('Incidents List', () => {
expect(src).toBe(avatarUrl);
});
- it('contains a link to the issue details', () => {
- findTableRows()
- .at(0)
- .trigger('click');
- expect(visitUrl).toHaveBeenCalledWith(joinPaths(`/project/isssues/`, mockIncidents[0].iid));
- });
-
it('renders a closed icon for closed incidents', () => {
expect(findClosedIcon().length).toBe(
mockIncidents.filter(({ state }) => state === 'closed').length,
@@ -188,6 +186,44 @@ describe('Incidents List', () => {
it('renders severity per row', () => {
expect(findSeverity().length).toBe(mockIncidents.length);
});
+
+ it('contains a link to the incident details page', async () => {
+ findTableRows()
+ .at(0)
+ .trigger('click');
+ expect(visitUrl).toHaveBeenCalledWith(
+ joinPaths(`/project/issues/incident`, mockIncidents[0].iid),
+ );
+ });
+
+ describe('Incident SLA field', () => {
+ it('displays the column when the feature is available', () => {
+ mountComponent({
+ data: { incidents: { list: mockIncidents } },
+ provide: { slaFeatureAvailable: true },
+ });
+
+ expect(findIncidentSlaHeader().text()).toContain('Time to SLA');
+ });
+
+ it('does not display the column when the feature is not available', () => {
+ mountComponent({
+ data: { incidents: { list: mockIncidents } },
+ provide: { slaFeatureAvailable: false },
+ });
+
+ expect(findIncidentSlaHeader().exists()).toBe(false);
+ });
+
+ it('renders an SLA for each incident', () => {
+ mountComponent({
+ data: { incidents: { list: mockIncidents } },
+ provide: { slaFeatureAvailable: true },
+ });
+
+ expect(findIncidentSla().length).toBe(mockIncidents.length);
+ });
+ });
});
describe('Create Incident', () => {
@@ -198,7 +234,7 @@ describe('Incidents List', () => {
});
});
- it('shows the button linking to new incidents page with prefilled incident template when clicked', () => {
+ it('shows the button linking to new incidents page with pre-filled incident template when clicked', () => {
expect(findCreateIncidentBtn().exists()).toBe(true);
findCreateIncidentBtn().trigger('click');
expect(mergeUrlParams).toHaveBeenCalledWith(
@@ -207,11 +243,10 @@ describe('Incidents List', () => {
);
});
- it('sets button loading on click', () => {
+ it('sets button loading on click', async () => {
findCreateIncidentBtn().vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(findCreateIncidentBtn().attributes('loading')).toBe('true');
- });
+ await wrapper.vm.$nextTick();
+ expect(findCreateIncidentBtn().attributes('loading')).toBe('true');
});
it("doesn't show the button when list is empty", () => {
@@ -221,175 +256,62 @@ describe('Incidents List', () => {
});
expect(findCreateIncidentBtn().exists()).toBe(false);
});
+
+ it('should track create new incident button', async () => {
+ findCreateIncidentBtn().vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ expect(Tracking.event).toHaveBeenCalled();
+ });
});
- describe('Pagination', () => {
+ describe('sorting the incident list by column', () => {
beforeEach(() => {
mountComponent({
- data: {
- incidents: {
- list: mockIncidents,
- pageInfo: { hasNextPage: true, hasPreviousPage: true },
- },
- incidentsCount,
- errored: false,
- },
+ data: { incidents: { list: mockIncidents }, incidentsCount },
loading: false,
});
});
- it('should render pagination', () => {
- expect(wrapper.find(GlPagination).exists()).toBe(true);
- });
-
- describe('prevPage', () => {
- it('returns prevPage button', () => {
- findPagination().vm.$emit('input', 3);
-
- return wrapper.vm.$nextTick(() => {
- expect(
- findPagination()
- .findAll('.page-item')
- .at(0)
- .text(),
- ).toBe('Prev');
- });
- });
-
- it('returns prevPage number', () => {
- findPagination().vm.$emit('input', 3);
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.prevPage).toBe(2);
- });
- });
-
- it('returns 0 when it is the first page', () => {
- findPagination().vm.$emit('input', 1);
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.prevPage).toBe(0);
- });
- });
- });
-
- describe('nextPage', () => {
- it('returns nextPage button', () => {
- findPagination().vm.$emit('input', 3);
-
- return wrapper.vm.$nextTick(() => {
- expect(
- findPagination()
- .findAll('.page-item')
- .at(1)
- .text(),
- ).toBe('Next');
- });
- });
-
- it('returns nextPage number', () => {
- mountComponent({
- data: {
- incidents: {
- list: [...mockIncidents, ...mockIncidents, ...mockIncidents],
- pageInfo: { hasNextPage: true, hasPreviousPage: true },
- },
- incidentsCount,
- errored: false,
- },
- loading: false,
- });
- findPagination().vm.$emit('input', 1);
-
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.nextPage).toBe(2);
- });
- });
-
- it('returns `null` when currentPage is already last page', () => {
- findStatusTabs().vm.$emit('input', 1);
- findPagination().vm.$emit('input', 1);
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.nextPage).toBeNull();
- });
- });
- });
-
- describe('Search', () => {
- beforeEach(() => {
- mountComponent({
- data: {
- incidents: {
- list: mockIncidents,
- pageInfo: { hasNextPage: true, hasPreviousPage: true },
- },
- incidentsCount,
- errored: false,
- },
- loading: false,
- });
- });
-
- it('renders the search component for incidents', () => {
- expect(findSearch().exists()).toBe(true);
- });
-
- it('sets the `searchTerm` graphql variable', () => {
- const SEARCH_TERM = 'Simple Incident';
-
- findSearch().vm.$emit('input', SEARCH_TERM);
-
- expect(wrapper.vm.$data.searchTerm).toBe(SEARCH_TERM);
- });
- });
-
- describe('Status Filter Tabs', () => {
- beforeEach(() => {
- mountComponent({
- data: { incidents: { list: mockIncidents }, incidentsCount },
- loading: false,
- stubs: {
- GlTab: true,
- },
- });
- });
-
- it('should display filter tabs', () => {
- const tabs = findStatusFilterTabs().wrappers;
-
- tabs.forEach((tab, i) => {
- expect(tab.attributes('data-testid')).toContain(INCIDENT_STATUS_TABS[i].status);
- });
- });
-
- it('should display filter tabs with alerts count badge for each status', () => {
- const tabs = findStatusFilterTabs().wrappers;
- const badges = findStatusFilterBadge();
+ const descSort = 'descending';
+ const ascSort = 'ascending';
+ const noneSort = 'none';
- tabs.forEach((tab, i) => {
- const status = INCIDENT_STATUS_TABS[i].status.toLowerCase();
- expect(tab.attributes('data-testid')).toContain(INCIDENT_STATUS_TABS[i].status);
- expect(badges.at(i).text()).toContain(incidentsCount[status]);
- });
- });
+ it.each`
+ selector | initialSort | firstSort | nextSort
+ ${TH_CREATED_AT_TEST_ID} | ${descSort} | ${ascSort} | ${descSort}
+ ${TH_SEVERITY_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
+ ${TH_PUBLISHED_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
+ `('updates sort with new direction', async ({ selector, initialSort, firstSort, nextSort }) => {
+ const [[attr, value]] = Object.entries(selector);
+ const columnHeader = () => wrapper.find(`[${attr}="${value}"]`);
+ expect(columnHeader().attributes('aria-sort')).toBe(initialSort);
+ columnHeader().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(columnHeader().attributes('aria-sort')).toBe(firstSort);
+ columnHeader().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(columnHeader().attributes('aria-sort')).toBe(nextSort);
});
});
- describe('sorting the incident list by column', () => {
+ describe('Snowplow tracking', () => {
beforeEach(() => {
mountComponent({
- data: { incidents: { list: mockIncidents }, incidentsCount },
+ data: { incidents: { list: mockIncidents }, incidentsCount: {} },
loading: false,
});
});
- it('updates sort with new direction and column key', () => {
- expect(findDateColumnHeader().attributes('aria-sort')).toBe('descending');
+ it('should track incident list views', () => {
+ const { category, action } = trackIncidentListViewsOptions;
+ expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ });
- findDateColumnHeader().trigger('click');
- return wrapper.vm.$nextTick(() => {
- expect(findDateColumnHeader().attributes('aria-sort')).toBe('ascending');
- });
+ it('should track incident creation events', async () => {
+ findCreateIncidentBtn().vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ const { category, action } = trackIncidentCreateNewOptions;
+ expect(Tracking.event).toHaveBeenCalledWith(category, action);
});
});
});
diff --git a/spec/frontend/incidents/mocks/incidents.json b/spec/frontend/incidents/mocks/incidents.json
index 42b3d6d3eb6..07c87a5d43d 100644
--- a/spec/frontend/incidents/mocks/incidents.json
+++ b/spec/frontend/incidents/mocks/incidents.json
@@ -5,7 +5,8 @@
"createdAt": "2020-06-03T15:46:08Z",
"assignees": {},
"state": "opened",
- "severity": "CRITICAL"
+ "severity": "CRITICAL",
+ "slaDueAt": "2020-06-04T12:46:08Z"
},
{
"iid": "14",
@@ -22,7 +23,8 @@
]
},
"state": "opened",
- "severity": "HIGH"
+ "severity": "HIGH",
+ "slaDueAt": null
},
{
"iid": "13",
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
index cab2165b5db..e4620590e62 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -93,23 +93,20 @@ exports[`Alert integration settings form default state should match the default
</gl-form-checkbox-stub>
</gl-form-group-stub>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-no-auto-disable"
+ data-qa-selector="save_changes_button"
+ icon=""
+ size="medium"
+ type="submit"
+ variant="success"
>
- <gl-button-stub
- category="primary"
- class="js-no-auto-disable"
- data-qa-selector="save_changes_button"
- icon=""
- size="medium"
- type="submit"
- variant="success"
- >
-
- Save changes
- </gl-button-stub>
- </div>
+ Save changes
+
+ </gl-button-stub>
</form>
</div>
`;
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
index 3ad4c13382d..072e611b9a4 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
@@ -18,6 +18,7 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
</h4>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-settings-toggle"
icon=""
@@ -57,6 +58,8 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
/>
</gl-tab-stub>
<!---->
+
+ <!---->
</gl-tabs-stub>
</div>
</section>
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index 78bb238fcb6..273356151fc 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -23,7 +23,6 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
<gl-form-group-stub
class="col-8 col-md-9 gl-p-0"
label="Webhook URL"
- label-class="label-bold"
label-for="url"
>
<gl-form-input-group-stub
@@ -42,24 +41,21 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
/>
</div>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-mt-3"
+ data-testid="webhook-reset-btn"
+ icon=""
+ role="button"
+ size="medium"
+ tabindex="0"
+ variant="default"
>
- <gl-button-stub
- category="primary"
- class="gl-mt-3"
- data-testid="webhook-reset-btn"
- icon=""
- role="button"
- size="medium"
- tabindex="0"
- variant="default"
- >
-
- Reset webhook URL
- </gl-button-stub>
- </div>
+ Reset webhook URL
+
+ </gl-button-stub>
<gl-modal-stub
modalclass=""
@@ -76,22 +72,19 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
</gl-modal-stub>
</gl-form-group-stub>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-no-auto-disable"
+ icon=""
+ size="medium"
+ type="submit"
+ variant="success"
>
- <gl-button-stub
- category="primary"
- class="js-no-auto-disable"
- icon=""
- size="medium"
- type="submit"
- variant="success"
- >
-
- Save changes
- </gl-button-stub>
- </div>
+ Save changes
+
+ </gl-button-stub>
</form>
</div>
`;
diff --git a/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js b/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
index c56b9ed2a69..11b9eda2585 100644
--- a/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
+++ b/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
@@ -6,7 +6,12 @@ describe('IncidentsSettingTabs', () => {
let wrapper;
beforeEach(() => {
- wrapper = shallowMount(IncidentsSettingTabs);
+ wrapper = shallowMount(IncidentsSettingTabs, {
+ provide: {
+ service: {},
+ serviceLevelAgreementSettings: {},
+ },
+ });
});
afterEach(() => {
diff --git a/spec/frontend/integrations/edit/components/confirmation_modal_spec.js b/spec/frontend/integrations/edit/components/confirmation_modal_spec.js
new file mode 100644
index 00000000000..02f311f579f
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/confirmation_modal_spec.js
@@ -0,0 +1,51 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import { createStore } from '~/integrations/edit/store';
+
+import ConfirmationModal from '~/integrations/edit/components/confirmation_modal.vue';
+
+describe('ConfirmationModal', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(ConfirmationModal, {
+ store: createStore(),
+ });
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ const findGlModal = () => wrapper.find(GlModal);
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders GlModal with correct copy', () => {
+ expect(findGlModal().exists()).toBe(true);
+ expect(findGlModal().attributes('title')).toBe('Save settings?');
+ expect(findGlModal().text()).toContain(
+ 'Saving will update the default settings for all projects that are not using custom settings.',
+ );
+ expect(findGlModal().text()).toContain(
+ 'Projects using custom settings will not be impacted unless the project owner chooses to use instance-level defaults.',
+ );
+ });
+
+ it('emits `submit` event when `primary` event is emitted on GlModal', async () => {
+ expect(wrapper.emitted().submit).toBeUndefined();
+
+ findGlModal().vm.$emit('primary');
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted().submit).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index eeb5d21d62c..efcc727277a 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -4,6 +4,7 @@ import { createStore } from '~/integrations/edit/store';
import IntegrationForm from '~/integrations/edit/components/integration_form.vue';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
import ActiveCheckbox from '~/integrations/edit/components/active_checkbox.vue';
+import ConfirmationModal from '~/integrations/edit/components/confirmation_modal.vue';
import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_fields.vue';
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
@@ -22,6 +23,7 @@ describe('IntegrationForm', () => {
stubs: {
OverrideDropdown,
ActiveCheckbox,
+ ConfirmationModal,
JiraTriggerFields,
TriggerFields,
},
@@ -40,6 +42,7 @@ describe('IntegrationForm', () => {
const findOverrideDropdown = () => wrapper.find(OverrideDropdown);
const findActiveCheckbox = () => wrapper.find(ActiveCheckbox);
+ const findConfirmationModal = () => wrapper.find(ConfirmationModal);
const findJiraTriggerFields = () => wrapper.find(JiraTriggerFields);
const findJiraIssuesFields = () => wrapper.find(JiraIssuesFields);
const findTriggerFields = () => wrapper.find(TriggerFields);
@@ -63,6 +66,26 @@ describe('IntegrationForm', () => {
});
});
+ describe('integrationLevel is instance', () => {
+ it('renders ConfirmationModal', () => {
+ createComponent({
+ integrationLevel: 'instance',
+ });
+
+ expect(findConfirmationModal().exists()).toBe(true);
+ });
+ });
+
+ describe('integrationLevel is not instance', () => {
+ it('does not render ConfirmationModal', () => {
+ createComponent({
+ integrationLevel: 'project',
+ });
+
+ expect(findConfirmationModal().exists()).toBe(false);
+ });
+ });
+
describe('type is "slack"', () => {
beforeEach(() => {
createComponent({ type: 'slack' });
diff --git a/spec/frontend/integrations/edit/mock_data.js b/spec/frontend/integrations/edit/mock_data.js
index 821972b7698..27ba0768331 100644
--- a/spec/frontend/integrations/edit/mock_data.js
+++ b/spec/frontend/integrations/edit/mock_data.js
@@ -2,6 +2,7 @@ export const mockIntegrationProps = {
id: 25,
initialActivated: true,
showActive: true,
+ editable: true,
triggerFieldsProps: {
initialTriggerCommit: false,
initialTriggerMergeRequest: false,
diff --git a/spec/frontend/invite_member/components/invite_member_modal_spec.js b/spec/frontend/invite_member/components/invite_member_modal_spec.js
new file mode 100644
index 00000000000..1d0adb3ab4c
--- /dev/null
+++ b/spec/frontend/invite_member/components/invite_member_modal_spec.js
@@ -0,0 +1,63 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
+import InviteMemberModal from '~/invite_member/components/invite_member_modal.vue';
+
+const memberPath = 'member_path';
+
+const createComponent = () => {
+ return shallowMount(InviteMemberModal, {
+ provide: {
+ membersPath: memberPath,
+ },
+ stubs: {
+ 'gl-emoji': '<img/>',
+ 'gl-modal': '<div><slot name="modal-title"></slot><slot></slot></div>',
+ },
+ });
+};
+
+describe('InviteMemberModal', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findLink = () => wrapper.find(GlLink);
+
+ describe('rendering the modal', () => {
+ it('renders the modal with the correct title', () => {
+ expect(wrapper.text()).toContain("Oops, this feature isn't ready yet");
+ });
+
+ describe('rendering the see who link', () => {
+ it('renders the correct link', () => {
+ expect(findLink().attributes('href')).toBe(memberPath);
+ });
+ });
+ });
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('send an event when go to pipelines is clicked', () => {
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+
+ triggerEvent(findLink().element);
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_who_can_invite_link', {
+ label: 'invite_members_message',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/invite_member/components/invite_member_trigger_mock_data.js b/spec/frontend/invite_member/components/invite_member_trigger_mock_data.js
new file mode 100644
index 00000000000..9b34a8027e9
--- /dev/null
+++ b/spec/frontend/invite_member/components/invite_member_trigger_mock_data.js
@@ -0,0 +1,7 @@
+const triggerProvides = {
+ displayText: 'Invite member',
+ event: 'click_invite_members_version_b',
+ label: 'edit_assignee',
+};
+
+export default triggerProvides;
diff --git a/spec/frontend/invite_member/components/invite_member_trigger_spec.js b/spec/frontend/invite_member/components/invite_member_trigger_spec.js
new file mode 100644
index 00000000000..57b8918e3da
--- /dev/null
+++ b/spec/frontend/invite_member/components/invite_member_trigger_spec.js
@@ -0,0 +1,48 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink } from '@gitlab/ui';
+import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
+import InviteMemberTrigger from '~/invite_member/components/invite_member_trigger.vue';
+import triggerProvides from './invite_member_trigger_mock_data';
+
+const createComponent = () => {
+ return shallowMount(InviteMemberTrigger, { provide: triggerProvides });
+};
+
+describe('InviteMemberTrigger', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findLink = () => wrapper.find(GlLink);
+
+ describe('displayText', () => {
+ it('includes the correct displayText for the link', () => {
+ expect(findLink().text()).toBe(triggerProvides.displayText);
+ });
+ });
+
+ describe('tracking', () => {
+ let trackingSpy;
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('send an event when go to pipelines is clicked', () => {
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+
+ triggerEvent(findLink().element);
+
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', triggerProvides.event, {
+ label: triggerProvides.label,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
new file mode 100644
index 00000000000..0be0fbbde2d
--- /dev/null
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -0,0 +1,115 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem, GlDatepicker, GlSprintf, GlLink } from '@gitlab/ui';
+import Api from '~/api';
+import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue';
+
+const groupId = '1';
+const groupName = 'testgroup';
+const accessLevels = { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 };
+const defaultAccessLevel = '10';
+const helpLink = 'https://example.com';
+
+const createComponent = () => {
+ return shallowMount(InviteMembersModal, {
+ propsData: {
+ groupId,
+ groupName,
+ accessLevels,
+ defaultAccessLevel,
+ helpLink,
+ },
+ stubs: {
+ GlSprintf,
+ 'gl-modal': '<div><slot name="modal-footer"></slot><slot></slot></div>',
+ },
+ });
+};
+
+describe('InviteMembersModal', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDatepicker = () => wrapper.find(GlDatepicker);
+ const findLink = () => wrapper.find(GlLink);
+ const findCancelButton = () => wrapper.find({ ref: 'cancelButton' });
+ const findInviteButton = () => wrapper.find({ ref: 'inviteButton' });
+
+ describe('rendering the modal', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('renders the modal with the correct title', () => {
+ expect(wrapper.attributes('title')).toBe('Invite team members');
+ });
+
+ it('renders the Cancel button text correctly', () => {
+ expect(findCancelButton().text()).toBe('Cancel');
+ });
+
+ it('renders the Invite button text correctly', () => {
+ expect(findInviteButton().text()).toBe('Invite');
+ });
+
+ describe('rendering the access levels dropdown', () => {
+ it('sets the default dropdown text to the default access level name', () => {
+ expect(findDropdown().attributes('text')).toBe('Guest');
+ });
+
+ it('renders dropdown items for each accessLevel', () => {
+ expect(findDropdownItems()).toHaveLength(5);
+ });
+ });
+
+ describe('rendering the help link', () => {
+ it('renders the correct link', () => {
+ expect(findLink().attributes('href')).toBe(helpLink);
+ });
+ });
+
+ describe('rendering the access expiration date field', () => {
+ it('renders the datepicker', () => {
+ expect(findDatepicker()).toExist();
+ });
+ });
+ });
+
+ describe('submitting the invite form', () => {
+ const postData = {
+ user_id: '1',
+ access_level: '10',
+ expires_at: new Date(),
+ format: 'json',
+ };
+
+ beforeEach(() => {
+ wrapper = createComponent();
+
+ jest.spyOn(Api, 'inviteGroupMember').mockResolvedValue({ data: postData });
+ wrapper.vm.$toast = { show: jest.fn() };
+
+ wrapper.vm.submitForm(postData);
+ });
+
+ it('calls Api inviteGroupMember with the correct params', () => {
+ expect(Api.inviteGroupMember).toHaveBeenCalledWith(groupId, postData);
+ });
+
+ describe('when the invite was sent successfully', () => {
+ const toastMessageSuccessful = 'Users were succesfully added';
+
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
+ toastMessageSuccessful,
+ wrapper.vm.toastOptions,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
new file mode 100644
index 00000000000..450d37a9748
--- /dev/null
+++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
@@ -0,0 +1,58 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon, GlLink } from '@gitlab/ui';
+import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
+
+const displayText = 'Invite team members';
+const icon = 'plus';
+
+const createComponent = (props = {}) => {
+ return shallowMount(InviteMembersTrigger, {
+ propsData: {
+ displayText,
+ ...props,
+ },
+ });
+};
+
+describe('InviteMembersTrigger', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('displayText', () => {
+ const findLink = () => wrapper.find(GlLink);
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('includes the correct displayText for the link', () => {
+ expect(findLink().text()).toBe(displayText);
+ });
+ });
+
+ describe('icon', () => {
+ const findIcon = () => wrapper.find(GlIcon);
+
+ it('includes the correct icon when an icon is sent', () => {
+ wrapper = createComponent({ icon });
+
+ expect(findIcon().attributes('name')).toBe(icon);
+ });
+
+ it('does not include an icon when icon is not sent', () => {
+ wrapper = createComponent();
+
+ expect(findIcon().exists()).toBe(false);
+ });
+
+ it('does not include an icon when empty string is sent', () => {
+ wrapper = createComponent({ icon: '' });
+
+ expect(findIcon().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
index bfbe4ec8e70..17a195df494 100644
--- a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
+++ b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
@@ -48,7 +48,10 @@ describe('AddIssuableForm', () => {
const input = findFormInput(wrapper);
if (input) input.blur();
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('with data', () => {
diff --git a/spec/frontend/issuable/related_issues/components/issue_token_spec.js b/spec/frontend/issuable/related_issues/components/issue_token_spec.js
index 553721fa783..f2cb9042ba6 100644
--- a/spec/frontend/issuable/related_issues/components/issue_token_spec.js
+++ b/spec/frontend/issuable/related_issues/components/issue_token_spec.js
@@ -1,241 +1,146 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import { PathIdSeparator } from '~/related_issues/constants';
-import issueToken from '~/related_issues/components/issue_token.vue';
+import IssueToken from '~/related_issues/components/issue_token.vue';
describe('IssueToken', () => {
const idKey = 200;
const displayReference = 'foo/bar#123';
- const title = 'some title';
- const pathIdSeparator = PathIdSeparator.Issue;
const eventNamespace = 'pendingIssuable';
- let IssueToken;
- let vm;
+ const path = '/foo/bar/issues/123';
+ const pathIdSeparator = PathIdSeparator.Issue;
+ const title = 'some title';
- beforeEach(() => {
- IssueToken = Vue.extend(issueToken);
- });
+ let wrapper;
+
+ const defaultProps = {
+ idKey,
+ displayReference,
+ pathIdSeparator,
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(IssueToken, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
afterEach(() => {
- if (vm) {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
}
});
+ const findLink = () => wrapper.find({ ref: 'link' });
+ const findReference = () => wrapper.find({ ref: 'reference' });
+ const findReferenceIcon = () => wrapper.find('[data-testid="referenceIcon"]');
+ const findRemoveBtn = () => wrapper.find('[data-testid="removeBtn"]');
+ const findTitle = () => wrapper.find({ ref: 'title' });
+
describe('with reference supplied', () => {
beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- },
- }).$mount();
+ createComponent();
});
it('shows reference', () => {
- expect(vm.$el.textContent.trim()).toEqual(displayReference);
+ expect(wrapper.text()).toContain(displayReference);
});
it('does not link without path specified', () => {
- expect(vm.$refs.link.tagName.toLowerCase()).toEqual('span');
- expect(vm.$refs.link.getAttribute('href')).toBeNull();
+ expect(findLink().element.tagName).toBe('SPAN');
+ expect(findLink().attributes('href')).toBeUndefined();
});
});
describe('with reference and title supplied', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- title,
- },
- }).$mount();
- });
-
it('shows reference and title', () => {
- expect(vm.$refs.reference.textContent.trim()).toEqual(displayReference);
- expect(vm.$refs.title.textContent.trim()).toEqual(title);
- });
- });
-
- describe('with path supplied', () => {
- const path = '/foo/bar/issues/123';
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- title,
- path,
- },
- }).$mount();
- });
+ createComponent({
+ title,
+ });
- it('links reference and title', () => {
- expect(vm.$refs.link.getAttribute('href')).toEqual(path);
+ expect(findReference().text()).toBe(displayReference);
+ expect(findTitle().text()).toBe(title);
});
});
- describe('with state supplied', () => {
- describe("`state: 'opened'`", () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- state: 'opened',
- },
- }).$mount();
+ describe('with path and title supplied', () => {
+ it('links reference and title', () => {
+ createComponent({
+ path,
+ title,
});
- it('shows green circle icon', () => {
- expect(vm.$el.querySelector('.issue-token-state-icon-open.fa.fa-circle-o')).toBeDefined();
- });
- });
-
- describe("`state: 'reopened'`", () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- state: 'reopened',
- },
- }).$mount();
- });
-
- it('shows green circle icon', () => {
- expect(vm.$el.querySelector('.issue-token-state-icon-open.fa.fa-circle-o')).toBeDefined();
- });
+ expect(findLink().attributes('href')).toBe(path);
});
+ });
- describe("`state: 'closed'`", () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- state: 'closed',
- },
- }).$mount();
+ describe('with state supplied', () => {
+ it.each`
+ state | icon | cssClass
+ ${'opened'} | ${'issue-open-m'} | ${'issue-token-state-icon-open'}
+ ${'reopened'} | ${'issue-open-m'} | ${'issue-token-state-icon-open'}
+ ${'closed'} | ${'issue-close'} | ${'issue-token-state-icon-closed'}
+ `('shows "$icon" icon when "$state"', ({ state, icon, cssClass }) => {
+ createComponent({
+ path,
+ state,
});
- it('shows red minus icon', () => {
- expect(vm.$el.querySelector('.issue-token-state-icon-closed.fa.fa-minus')).toBeDefined();
- });
+ expect(findReferenceIcon().props('name')).toBe(icon);
+ expect(findReferenceIcon().classes()).toContain(cssClass);
});
});
describe('with reference, title, state', () => {
const state = 'opened';
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- title,
- state,
- },
- }).$mount();
- });
it('shows reference, title, and state', () => {
- const stateIcon = vm.$refs.reference.querySelector('svg');
+ createComponent({
+ title,
+ state,
+ });
- expect(stateIcon.getAttribute('aria-label')).toEqual(state);
- expect(vm.$refs.reference.textContent.trim()).toEqual(displayReference);
- expect(vm.$refs.title.textContent.trim()).toEqual(title);
+ expect(findReferenceIcon().attributes('aria-label')).toBe(state);
+ expect(findReference().text()).toBe(displayReference);
+ expect(findTitle().text()).toBe(title);
});
});
describe('with canRemove', () => {
describe('`canRemove: false` (default)', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- },
- }).$mount();
- });
-
it('does not have remove button', () => {
- expect(vm.$el.querySelector('.issue-token-remove-button')).toBeNull();
+ createComponent();
+
+ expect(findRemoveBtn().exists()).toBe(false);
});
});
describe('`canRemove: true`', () => {
beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- canRemove: true,
- },
- }).$mount();
+ createComponent({
+ eventNamespace,
+ canRemove: true,
+ });
});
it('has remove button', () => {
- expect(vm.$el.querySelector('.issue-token-remove-button')).toBeDefined();
+ expect(findRemoveBtn().exists()).toBe(true);
});
- });
- });
-
- describe('methods', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- },
- }).$mount();
- });
- it('when getting checked', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.onRemoveRequest();
+ it('emits event when clicked', () => {
+ findRemoveBtn().trigger('click');
- expect(vm.$emit).toHaveBeenCalledWith('pendingIssuableRemoveRequest', vm.idKey);
- });
- });
+ const emitted = wrapper.emitted(`${eventNamespace}RemoveRequest`);
- describe('tooltip', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- canRemove: true,
- },
- }).$mount();
- });
-
- it('should not be escaped', () => {
- const { originalTitle } = vm.$refs.removeButton.dataset;
+ expect(emitted).toHaveLength(1);
+ expect(emitted[0]).toEqual([idKey]);
+ });
- expect(originalTitle).toEqual(`Remove ${displayReference}`);
+ it('tooltip should not be escaped', () => {
+ expect(findRemoveBtn().attributes('data-original-title')).toBe(
+ `Remove ${displayReference}`,
+ );
+ });
});
});
});
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
index 0f88e4d71fe..b758b85beef 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
@@ -18,7 +18,10 @@ describe('RelatedIssuesBlock', () => {
const findIssueCountBadgeAddButton = () => wrapper.find(GlButton);
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('with defaults', () => {
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
index 6cf0b9d21ea..39bc244297b 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
@@ -14,7 +14,10 @@ describe('RelatedIssuesList', () => {
let wrapper;
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('with defaults', () => {
diff --git a/spec/frontend/issuable_create/components/issuable_form_spec.js b/spec/frontend/issuable_create/components/issuable_form_spec.js
index e2c6b4d9521..e489d1dae3e 100644
--- a/spec/frontend/issuable_create/components/issuable_form_spec.js
+++ b/spec/frontend/issuable_create/components/issuable_form_spec.js
@@ -79,6 +79,7 @@ describe('IssuableForm', () => {
markdownDocsPath: wrapper.vm.descriptionHelpPath,
addSpacingClasses: false,
showSuggestPopover: true,
+ textareaValue: '',
});
expect(descriptionFieldEl.find('textarea').exists()).toBe(true);
expect(descriptionFieldEl.find('textarea').attributes('placeholder')).toBe(
diff --git a/spec/frontend/issuable_list/mock_data.js b/spec/frontend/issuable_list/mock_data.js
index f6f914a595d..8eab2ca6f94 100644
--- a/spec/frontend/issuable_list/mock_data.js
+++ b/spec/frontend/issuable_list/mock_data.js
@@ -30,13 +30,23 @@ export const mockScopedLabel = {
export const mockLabels = [mockRegularLabel, mockScopedLabel];
+export const mockCurrentUserTodo = {
+ id: 'gid://gitlab/Todo/489',
+ state: 'done',
+};
+
export const mockIssuable = {
iid: '30',
title: 'Dismiss Cipher with no integrity',
- description: null,
+ titleHtml: 'Dismiss Cipher with no integrity',
+ description: 'fortitudinis _fomentis_ dolor mitigari solet.',
+ descriptionHtml: 'fortitudinis <i>fomentis</i> dolor mitigari solet.',
+ state: 'opened',
createdAt: '2020-06-29T13:52:56Z',
updatedAt: '2020-09-10T11:41:13Z',
webUrl: 'http://0.0.0.0:3000/gitlab-org/gitlab-shell/-/issues/30',
+ blocked: false,
+ confidential: false,
author: mockAuthor,
labels: {
nodes: mockLabels,
diff --git a/spec/frontend/issuable_show/components/issuable_body_spec.js b/spec/frontend/issuable_show/components/issuable_body_spec.js
new file mode 100644
index 00000000000..0e4475e8103
--- /dev/null
+++ b/spec/frontend/issuable_show/components/issuable_body_spec.js
@@ -0,0 +1,140 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IssuableBody from '~/issuable_show/components/issuable_body.vue';
+
+import IssuableTitle from '~/issuable_show/components/issuable_title.vue';
+import IssuableDescription from '~/issuable_show/components/issuable_description.vue';
+import IssuableEditForm from '~/issuable_show/components/issuable_edit_form.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+
+jest.mock('~/autosave');
+
+const issuableBodyProps = {
+ ...mockIssuableShowProps,
+ issuable: mockIssuable,
+};
+
+const createComponent = (propsData = issuableBodyProps) =>
+ shallowMount(IssuableBody, {
+ propsData,
+ stubs: {
+ IssuableTitle,
+ IssuableDescription,
+ IssuableEditForm,
+ TimeAgoTooltip,
+ },
+ slots: {
+ 'status-badge': 'Open',
+ 'edit-form-actions': `
+ <button class="js-save">Save changes</button>
+ <button class="js-cancel">Cancel</button>
+ `,
+ },
+ });
+
+describe('IssuableBody', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ describe('isUpdated', () => {
+ it.each`
+ updatedAt | returnValue
+ ${mockIssuable.updatedAt} | ${true}
+ ${null} | ${false}
+ ${''} | ${false}
+ `(
+ 'returns $returnValue when value of `updateAt` prop is `$updatedAt`',
+ async ({ updatedAt, returnValue }) => {
+ wrapper.setProps({
+ issuable: {
+ ...mockIssuable,
+ updatedAt,
+ },
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.isUpdated).toBe(returnValue);
+ },
+ );
+ });
+
+ describe('updatedBy', () => {
+ it('returns value of `issuable.updatedBy`', () => {
+ expect(wrapper.vm.updatedBy).toBe(mockIssuable.updatedBy);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders issuable-title component', () => {
+ const titleEl = wrapper.find(IssuableTitle);
+
+ expect(titleEl.exists()).toBe(true);
+ expect(titleEl.props()).toMatchObject({
+ issuable: issuableBodyProps.issuable,
+ statusBadgeClass: issuableBodyProps.statusBadgeClass,
+ statusIcon: issuableBodyProps.statusIcon,
+ enableEdit: issuableBodyProps.enableEdit,
+ });
+ });
+
+ it('renders issuable-description component', () => {
+ const descriptionEl = wrapper.find(IssuableDescription);
+
+ expect(descriptionEl.exists()).toBe(true);
+ expect(descriptionEl.props('issuable')).toEqual(issuableBodyProps.issuable);
+ });
+
+ it('renders issuable edit info', () => {
+ const editedEl = wrapper.find('small');
+ const sanitizedText = editedEl
+ .text()
+ .replace(/\n/g, ' ')
+ .replace(/\s+/g, ' ');
+
+ expect(sanitizedText).toContain('Edited');
+ expect(sanitizedText).toContain('ago');
+ expect(sanitizedText).toContain(`by ${mockIssuable.updatedBy.name}`);
+ });
+
+ it('renders issuable-edit-form when `editFormVisible` prop is true', async () => {
+ wrapper.setProps({
+ editFormVisible: true,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ const editFormEl = wrapper.find(IssuableEditForm);
+ expect(editFormEl.exists()).toBe(true);
+ expect(editFormEl.props()).toMatchObject({
+ issuable: issuableBodyProps.issuable,
+ enableAutocomplete: issuableBodyProps.enableAutocomplete,
+ descriptionPreviewPath: issuableBodyProps.descriptionPreviewPath,
+ descriptionHelpPath: issuableBodyProps.descriptionHelpPath,
+ });
+ expect(editFormEl.find('button.js-save').exists()).toBe(true);
+ expect(editFormEl.find('button.js-cancel').exists()).toBe(true);
+ });
+
+ describe('events', () => {
+ it('component emits `edit-issuable` event bubbled via issuable-title', () => {
+ const issuableTitle = wrapper.find(IssuableTitle);
+
+ issuableTitle.vm.$emit('edit-issuable');
+
+ expect(wrapper.emitted('edit-issuable')).toBeTruthy();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuable_show/components/issuable_description_spec.js b/spec/frontend/issuable_show/components/issuable_description_spec.js
new file mode 100644
index 00000000000..1dd8348b098
--- /dev/null
+++ b/spec/frontend/issuable_show/components/issuable_description_spec.js
@@ -0,0 +1,41 @@
+import $ from 'jquery';
+import { shallowMount } from '@vue/test-utils';
+
+import IssuableDescription from '~/issuable_show/components/issuable_description.vue';
+
+import { mockIssuable } from '../mock_data';
+
+const createComponent = (issuable = mockIssuable) =>
+ shallowMount(IssuableDescription, {
+ propsData: { issuable },
+ });
+
+describe('IssuableDescription', () => {
+ let renderGFMSpy;
+ let wrapper;
+
+ beforeEach(() => {
+ renderGFMSpy = jest.spyOn($.fn, 'renderGFM');
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('mounted', () => {
+ it('calls `renderGFM`', () => {
+ expect(renderGFMSpy).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('methods', () => {
+ describe('renderGFM', () => {
+ it('calls `renderGFM` on container element', () => {
+ wrapper.vm.renderGFM();
+
+ expect(renderGFMSpy).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuable_show/components/issuable_edit_form_spec.js b/spec/frontend/issuable_show/components/issuable_edit_form_spec.js
new file mode 100644
index 00000000000..352e66cdffe
--- /dev/null
+++ b/spec/frontend/issuable_show/components/issuable_edit_form_spec.js
@@ -0,0 +1,122 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlFormInput } from '@gitlab/ui';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+
+import IssuableEditForm from '~/issuable_show/components/issuable_edit_form.vue';
+import IssuableEventHub from '~/issuable_show/event_hub';
+
+import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+
+const issuableEditFormProps = {
+ issuable: mockIssuable,
+ ...mockIssuableShowProps,
+};
+
+const createComponent = ({ propsData = issuableEditFormProps } = {}) =>
+ shallowMount(IssuableEditForm, {
+ propsData,
+ stubs: {
+ MarkdownField,
+ },
+ slots: {
+ 'edit-form-actions': `
+ <button class="js-save">Save changes</button>
+ <button class="js-cancel">Cancel</button>
+ `,
+ },
+ });
+
+describe('IssuableEditForm', () => {
+ let wrapper;
+ const assertEvent = eventSpy => {
+ expect(eventSpy).toHaveBeenNthCalledWith(1, 'update.issuable', expect.any(Function));
+ expect(eventSpy).toHaveBeenNthCalledWith(2, 'close.form', expect.any(Function));
+ };
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('created', () => {
+ it('binds `update.issuable` and `close.form` event listeners', () => {
+ const eventOnSpy = jest.spyOn(IssuableEventHub, '$on');
+ const wrapperTemp = createComponent();
+
+ assertEvent(eventOnSpy);
+
+ wrapperTemp.destroy();
+ });
+ });
+
+ describe('beforeDestroy', () => {
+ it('unbinds `update.issuable` and `close.form` event listeners', () => {
+ const wrapperTemp = createComponent();
+ const eventOffSpy = jest.spyOn(IssuableEventHub, '$off');
+
+ wrapperTemp.destroy();
+
+ assertEvent(eventOffSpy);
+ });
+ });
+
+ describe('methods', () => {
+ describe('initAutosave', () => {
+ it('initializes `autosaveTitle` and `autosaveDescription` props', () => {
+ expect(wrapper.vm.autosaveTitle).toBeDefined();
+ expect(wrapper.vm.autosaveDescription).toBeDefined();
+ });
+ });
+
+ describe('resetAutosave', () => {
+ it('calls `reset` on `autosaveTitle` and `autosaveDescription` props', () => {
+ jest.spyOn(wrapper.vm.autosaveTitle, 'reset').mockImplementation(jest.fn);
+ jest.spyOn(wrapper.vm.autosaveDescription, 'reset').mockImplementation(jest.fn);
+
+ wrapper.vm.resetAutosave();
+
+ expect(wrapper.vm.autosaveTitle.reset).toHaveBeenCalled();
+ expect(wrapper.vm.autosaveDescription.reset).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders title input field', () => {
+ const titleInputEl = wrapper.find('[data-testid="title"]');
+
+ expect(titleInputEl.exists()).toBe(true);
+ expect(titleInputEl.find(GlFormInput).attributes()).toMatchObject({
+ 'aria-label': 'Title',
+ placeholder: 'Title',
+ });
+ });
+
+ it('renders description textarea field', () => {
+ const descriptionEl = wrapper.find('[data-testid="description"]');
+
+ expect(descriptionEl.exists()).toBe(true);
+ expect(descriptionEl.find(MarkdownField).props()).toMatchObject({
+ markdownPreviewPath: issuableEditFormProps.descriptionPreviewPath,
+ markdownDocsPath: issuableEditFormProps.descriptionHelpPath,
+ enableAutocomplete: issuableEditFormProps.enableAutocomplete,
+ textareaValue: mockIssuable.description,
+ });
+ expect(descriptionEl.find('textarea').attributes()).toMatchObject({
+ 'data-supports-quick-actions': 'true',
+ 'aria-label': 'Description',
+ placeholder: 'Write a comment or drag your files here…',
+ });
+ });
+
+ it('renders form actions', () => {
+ const actionsEl = wrapper.find('[data-testid="actions"]');
+
+ expect(actionsEl.find('button.js-save').exists()).toBe(true);
+ expect(actionsEl.find('button.js-cancel').exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/issuable_show/components/issuable_header_spec.js b/spec/frontend/issuable_show/components/issuable_header_spec.js
new file mode 100644
index 00000000000..fad8ec8a891
--- /dev/null
+++ b/spec/frontend/issuable_show/components/issuable_header_spec.js
@@ -0,0 +1,132 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon, GlAvatarLabeled } from '@gitlab/ui';
+
+import IssuableHeader from '~/issuable_show/components/issuable_header.vue';
+
+import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+
+const issuableHeaderProps = {
+ ...mockIssuable,
+ ...mockIssuableShowProps,
+};
+
+const createComponent = (propsData = issuableHeaderProps) =>
+ shallowMount(IssuableHeader, {
+ propsData,
+ slots: {
+ 'status-badge': 'Open',
+ 'header-actions': `
+ <button class="js-close">Close issuable</button>
+ <a class="js-new" href="/gitlab-org/gitlab-shell/-/issues/new">New issuable</a>
+ `,
+ },
+ });
+
+describe('IssuableHeader', () => {
+ let wrapper;
+ const findByTestId = testId => wrapper.find(`[data-testid="${testId}"]`);
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ describe('authorId', () => {
+ it('returns numeric ID from GraphQL ID of `author` prop', () => {
+ expect(wrapper.vm.authorId).toBe(1);
+ });
+ });
+ });
+
+ describe('handleRightSidebarToggleClick', () => {
+ beforeEach(() => {
+ setFixtures('<button class="js-toggle-right-sidebar-button">Collapse sidebar</button>');
+ });
+
+ it('dispatches `click` event on sidebar toggle button', () => {
+ wrapper.vm.toggleSidebarButtonEl = document.querySelector('.js-toggle-right-sidebar-button');
+ jest.spyOn(wrapper.vm.toggleSidebarButtonEl, 'dispatchEvent').mockImplementation(jest.fn);
+
+ wrapper.vm.handleRightSidebarToggleClick();
+
+ expect(wrapper.vm.toggleSidebarButtonEl.dispatchEvent).toHaveBeenCalledWith(
+ expect.objectContaining({
+ type: 'click',
+ }),
+ );
+ });
+ });
+
+ describe('template', () => {
+ it('renders issuable status icon and text', () => {
+ const statusBoxEl = findByTestId('status');
+
+ expect(statusBoxEl.exists()).toBe(true);
+ expect(statusBoxEl.find(GlIcon).props('name')).toBe(mockIssuableShowProps.statusIcon);
+ expect(statusBoxEl.text()).toContain('Open');
+ });
+
+ it('renders blocked icon when issuable is blocked', async () => {
+ wrapper.setProps({
+ blocked: true,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ const blockedEl = findByTestId('blocked');
+
+ expect(blockedEl.exists()).toBe(true);
+ expect(blockedEl.find(GlIcon).props('name')).toBe('lock');
+ });
+
+ it('renders confidential icon when issuable is confidential', async () => {
+ wrapper.setProps({
+ confidential: true,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ const confidentialEl = findByTestId('confidential');
+
+ expect(confidentialEl.exists()).toBe(true);
+ expect(confidentialEl.find(GlIcon).props('name')).toBe('eye-slash');
+ });
+
+ it('renders issuable author avatar', () => {
+ const { username, name, webUrl, avatarUrl } = mockIssuable.author;
+ const avatarElAttrs = {
+ 'data-user-id': '1',
+ 'data-username': username,
+ 'data-name': name,
+ href: webUrl,
+ target: '_blank',
+ };
+ const avatarEl = findByTestId('avatar');
+ expect(avatarEl.exists()).toBe(true);
+ expect(avatarEl.attributes()).toMatchObject(avatarElAttrs);
+ expect(avatarEl.find(GlAvatarLabeled).attributes()).toMatchObject({
+ size: '24',
+ src: avatarUrl,
+ label: name,
+ });
+ });
+
+ it('renders sidebar toggle button', () => {
+ const toggleButtonEl = findByTestId('sidebar-toggle');
+
+ expect(toggleButtonEl.exists()).toBe(true);
+ expect(toggleButtonEl.props('icon')).toBe('chevron-double-lg-left');
+ });
+
+ it('renders header actions', () => {
+ const actionsEl = findByTestId('header-actions');
+
+ expect(actionsEl.find('button.js-close').exists()).toBe(true);
+ expect(actionsEl.find('a.js-new').exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/issuable_show/components/issuable_show_root_spec.js b/spec/frontend/issuable_show/components/issuable_show_root_spec.js
new file mode 100644
index 00000000000..112e4ccd340
--- /dev/null
+++ b/spec/frontend/issuable_show/components/issuable_show_root_spec.js
@@ -0,0 +1,123 @@
+import { shallowMount } from '@vue/test-utils';
+
+import IssuableShowRoot from '~/issuable_show/components/issuable_show_root.vue';
+
+import IssuableHeader from '~/issuable_show/components/issuable_header.vue';
+import IssuableBody from '~/issuable_show/components/issuable_body.vue';
+import IssuableSidebar from '~/issuable_sidebar/components/issuable_sidebar_root.vue';
+
+import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+
+const createComponent = (propsData = mockIssuableShowProps) =>
+ shallowMount(IssuableShowRoot, {
+ propsData,
+ stubs: {
+ IssuableHeader,
+ IssuableBody,
+ IssuableSidebar,
+ },
+ slots: {
+ 'status-badge': 'Open',
+ 'header-actions': `
+ <button class="js-close">Close issuable</button>
+ <a class="js-new" href="/gitlab-org/gitlab-shell/-/issues/new">New issuable</a>
+ `,
+ 'edit-form-actions': `
+ <button class="js-save">Save changes</button>
+ <button class="js-cancel">Cancel</button>
+ `,
+ 'right-sidebar-items': `
+ <div class="js-todo">
+ To Do <button class="js-add-todo">Add a To Do</button>
+ </div>
+ `,
+ },
+ });
+
+describe('IssuableShowRoot', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ const {
+ statusBadgeClass,
+ statusIcon,
+ enableEdit,
+ enableAutocomplete,
+ editFormVisible,
+ descriptionPreviewPath,
+ descriptionHelpPath,
+ } = mockIssuableShowProps;
+ const { blocked, confidential, createdAt, author } = mockIssuable;
+
+ it('renders component container element with class `issuable-show-container`', () => {
+ expect(wrapper.classes()).toContain('issuable-show-container');
+ });
+
+ it('renders issuable-header component', () => {
+ const issuableHeader = wrapper.find(IssuableHeader);
+
+ expect(issuableHeader.exists()).toBe(true);
+ expect(issuableHeader.props()).toMatchObject({
+ statusBadgeClass,
+ statusIcon,
+ blocked,
+ confidential,
+ createdAt,
+ author,
+ });
+ expect(issuableHeader.find('.issuable-status-box').text()).toContain('Open');
+ expect(issuableHeader.find('.detail-page-header-actions button.js-close').exists()).toBe(
+ true,
+ );
+ expect(issuableHeader.find('.detail-page-header-actions a.js-new').exists()).toBe(true);
+ });
+
+ it('renders issuable-body component', () => {
+ const issuableBody = wrapper.find(IssuableBody);
+
+ expect(issuableBody.exists()).toBe(true);
+ expect(issuableBody.props()).toMatchObject({
+ issuable: mockIssuable,
+ statusBadgeClass,
+ statusIcon,
+ enableEdit,
+ enableAutocomplete,
+ editFormVisible,
+ descriptionPreviewPath,
+ descriptionHelpPath,
+ });
+ });
+
+ it('renders issuable-sidebar component', () => {
+ const issuableSidebar = wrapper.find(IssuableSidebar);
+
+ expect(issuableSidebar.exists()).toBe(true);
+ });
+
+ describe('events', () => {
+ it('component emits `edit-issuable` event bubbled via issuable-body', () => {
+ const issuableBody = wrapper.find(IssuableBody);
+
+ issuableBody.vm.$emit('edit-issuable');
+
+ expect(wrapper.emitted('edit-issuable')).toBeTruthy();
+ });
+
+ it('component emits `sidebar-toggle` event bubbled via issuable-sidebar', () => {
+ const issuableSidebar = wrapper.find(IssuableSidebar);
+
+ issuableSidebar.vm.$emit('sidebar-toggle', true);
+
+ expect(wrapper.emitted('sidebar-toggle')).toBeTruthy();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/issuable_show/components/issuable_title_spec.js b/spec/frontend/issuable_show/components/issuable_title_spec.js
new file mode 100644
index 00000000000..e8621c763b3
--- /dev/null
+++ b/spec/frontend/issuable_show/components/issuable_title_spec.js
@@ -0,0 +1,100 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon, GlButton, GlIntersectionObserver } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+
+import IssuableTitle from '~/issuable_show/components/issuable_title.vue';
+
+import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+
+const issuableTitleProps = {
+ issuable: mockIssuable,
+ ...mockIssuableShowProps,
+};
+
+const createComponent = (propsData = issuableTitleProps) =>
+ shallowMount(IssuableTitle, {
+ propsData,
+ stubs: {
+ transition: true,
+ },
+ slots: {
+ 'status-badge': 'Open',
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+
+describe('IssuableTitle', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('methods', () => {
+ describe('handleTitleAppear', () => {
+ it('sets value of `stickyTitleVisible` prop to false', () => {
+ wrapper.find(GlIntersectionObserver).vm.$emit('appear');
+
+ expect(wrapper.vm.stickyTitleVisible).toBe(false);
+ });
+ });
+
+ describe('handleTitleDisappear', () => {
+ it('sets value of `stickyTitleVisible` prop to true', () => {
+ wrapper.find(GlIntersectionObserver).vm.$emit('disappear');
+
+ expect(wrapper.vm.stickyTitleVisible).toBe(true);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders issuable title', async () => {
+ const wrapperWithTitle = createComponent({
+ ...mockIssuableShowProps,
+ issuable: {
+ ...mockIssuable,
+ titleHtml: '<b>Sample</b> title',
+ },
+ });
+
+ await wrapperWithTitle.vm.$nextTick();
+ const titleEl = wrapperWithTitle.find('h2');
+
+ expect(titleEl.exists()).toBe(true);
+ expect(titleEl.html()).toBe('<h2 dir="auto" class="title qa-title"><b>Sample</b> title</h2>');
+
+ wrapperWithTitle.destroy();
+ });
+
+ it('renders edit button', () => {
+ const editButtonEl = wrapper.find(GlButton);
+ const tooltip = getBinding(editButtonEl.element, 'gl-tooltip');
+
+ expect(editButtonEl.exists()).toBe(true);
+ expect(editButtonEl.props('icon')).toBe('pencil');
+ expect(editButtonEl.attributes('title')).toBe('Edit title and description');
+ expect(tooltip).toBeDefined();
+ });
+
+ it('renders sticky header when `stickyTitleVisible` prop is true', async () => {
+ wrapper.setData({
+ stickyTitleVisible: true,
+ });
+
+ await wrapper.vm.$nextTick();
+ const stickyHeaderEl = wrapper.find('[data-testid="header"]');
+
+ expect(stickyHeaderEl.exists()).toBe(true);
+ expect(stickyHeaderEl.find(GlIcon).props('name')).toBe(issuableTitleProps.statusIcon);
+ expect(stickyHeaderEl.text()).toContain('Open');
+ expect(stickyHeaderEl.text()).toContain(issuableTitleProps.issuable.title);
+ });
+ });
+});
diff --git a/spec/frontend/issuable_show/mock_data.js b/spec/frontend/issuable_show/mock_data.js
new file mode 100644
index 00000000000..14e5febdc6b
--- /dev/null
+++ b/spec/frontend/issuable_show/mock_data.js
@@ -0,0 +1,34 @@
+import { mockIssuable as issuable } from '../issuable_list/mock_data';
+
+export const mockIssuable = {
+ ...issuable,
+ id: 'gid://gitlab/Issue/30',
+ title: 'Sample title',
+ titleHtml: 'Sample title',
+ description: '# Summary',
+ descriptionHtml:
+ '<h1 data-sourcepos="1:1-1:25" dir="auto">&#x000A;<a id="user-content-magnoque-it-lurida-deus" class="anchor" href="#magnoque-it-lurida-deus" aria-hidden="true"></a>Summary</h1>',
+ state: 'opened',
+ blocked: false,
+ confidential: false,
+ updatedBy: issuable.author,
+ currentUserTodos: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Todo/489',
+ state: 'done',
+ },
+ ],
+ },
+};
+
+export const mockIssuableShowProps = {
+ issuable: mockIssuable,
+ descriptionHelpPath: '/help/user/markdown',
+ descriptionPreviewPath: '/gitlab-org/gitlab-shell/preview_markdown',
+ editFormVisible: false,
+ enableAutocomplete: true,
+ enableEdit: true,
+ statusBadgeClass: 'status-box-open',
+ statusIcon: 'issue-open-m',
+};
diff --git a/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js b/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js
new file mode 100644
index 00000000000..7686dad4644
--- /dev/null
+++ b/spec/frontend/issuable_sidebar/components/issuable_sidebar_root_spec.js
@@ -0,0 +1,199 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
+import Cookies from 'js-cookie';
+
+import IssuableSidebarRoot from '~/issuable_sidebar/components/issuable_sidebar_root.vue';
+
+const createComponent = (expanded = true) =>
+ shallowMount(IssuableSidebarRoot, {
+ propsData: {
+ expanded,
+ },
+ slots: {
+ 'right-sidebar-items': `
+ <button class="js-todo">Todo</button>
+ `,
+ },
+ });
+
+describe('IssuableSidebarRoot', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('watch', () => {
+ describe('isExpanded', () => {
+ it('emits `sidebar-toggle` event on component', async () => {
+ wrapper.setData({
+ isExpanded: false,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted('sidebar-toggle')).toBeTruthy();
+ expect(wrapper.emitted('sidebar-toggle')[0]).toEqual([
+ {
+ expanded: false,
+ },
+ ]);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('updatePageContainerClass', () => {
+ beforeEach(() => {
+ setFixtures('<div class="layout-page"></div>');
+ });
+
+ it.each`
+ isExpanded | layoutPageClass
+ ${true} | ${'right-sidebar-expanded'}
+ ${false} | ${'right-sidebar-collapsed'}
+ `(
+ 'set class $layoutPageClass to container element when `isExpanded` prop is $isExpanded',
+ async ({ isExpanded, layoutPageClass }) => {
+ wrapper.setData({
+ isExpanded,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ wrapper.vm.updatePageContainerClass();
+
+ expect(document.querySelector('.layout-page').classList.contains(layoutPageClass)).toBe(
+ true,
+ );
+ },
+ );
+ });
+
+ describe('handleWindowResize', () => {
+ beforeEach(async () => {
+ wrapper.setData({
+ userExpanded: true,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it.each`
+ breakpoint | isExpandedValue
+ ${'xs'} | ${false}
+ ${'sm'} | ${false}
+ ${'md'} | ${false}
+ ${'lg'} | ${true}
+ ${'xl'} | ${true}
+ `(
+ 'sets `isExpanded` prop to $isExpandedValue only when current screen size is `lg` or `xl`',
+ async ({ breakpoint, isExpandedValue }) => {
+ jest.spyOn(bp, 'isDesktop').mockReturnValue(breakpoint === 'lg' || breakpoint === 'xl');
+
+ wrapper.vm.handleWindowResize();
+
+ expect(wrapper.vm.isExpanded).toBe(isExpandedValue);
+ },
+ );
+
+ it('calls `updatePageContainerClass` method', () => {
+ jest.spyOn(wrapper.vm, 'updatePageContainerClass');
+
+ wrapper.vm.handleWindowResize();
+
+ expect(wrapper.vm.updatePageContainerClass).toHaveBeenCalled();
+ });
+ });
+
+ describe('handleToggleSidebarClick', () => {
+ beforeEach(async () => {
+ jest.spyOn(Cookies, 'set').mockImplementation(jest.fn());
+ wrapper.setData({
+ isExpanded: true,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('flips value of `isExpanded`', () => {
+ wrapper.vm.handleToggleSidebarClick();
+
+ expect(wrapper.vm.isExpanded).toBe(false);
+ expect(wrapper.vm.userExpanded).toBe(false);
+ });
+
+ it('updates "collapsed_gutter" cookie value', () => {
+ wrapper.vm.handleToggleSidebarClick();
+
+ expect(Cookies.set).toHaveBeenCalledWith('collapsed_gutter', true);
+ });
+
+ it('calls `updatePageContainerClass` method', () => {
+ jest.spyOn(wrapper.vm, 'updatePageContainerClass');
+
+ wrapper.vm.handleWindowResize();
+
+ expect(wrapper.vm.updatePageContainerClass).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('template', () => {
+ describe('sidebar expanded', () => {
+ beforeEach(async () => {
+ wrapper.setData({
+ isExpanded: true,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders component container element with class `right-sidebar-expanded` when `isExpanded` prop is true', () => {
+ expect(wrapper.classes()).toContain('right-sidebar-expanded');
+ });
+
+ it('renders sidebar toggle button with text and icon', () => {
+ const buttonEl = wrapper.find('button');
+
+ expect(buttonEl.exists()).toBe(true);
+ expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
+ expect(buttonEl.find('span').text()).toBe('Collapse sidebar');
+ expect(buttonEl.find('[data-testid="icon-collapse"]').isVisible()).toBe(true);
+ });
+ });
+
+ describe('sidebar collapsed', () => {
+ beforeEach(async () => {
+ wrapper.setData({
+ isExpanded: false,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders component container element with class `right-sidebar-collapsed` when `isExpanded` prop is false', () => {
+ expect(wrapper.classes()).toContain('right-sidebar-collapsed');
+ });
+
+ it('renders sidebar toggle button with text and icon', () => {
+ const buttonEl = wrapper.find('button');
+
+ expect(buttonEl.exists()).toBe(true);
+ expect(buttonEl.attributes('title')).toBe('Toggle sidebar');
+ expect(buttonEl.find('[data-testid="icon-expand"]').isVisible()).toBe(true);
+ });
+ });
+
+ it('renders sidebar items', () => {
+ const sidebarItemsEl = wrapper.find('[data-testid="sidebar-items"]');
+
+ expect(sidebarItemsEl.exists()).toBe(true);
+ expect(sidebarItemsEl.find('button.js-todo').exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js b/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
index 8d50df5e406..c1ab4433761 100644
--- a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
+++ b/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import merge from 'lodash/merge';
import { GlLink } from '@gitlab/ui';
import HighlightBar from '~/issue_show/components/incidents/highlight_bar.vue';
import { formatDate } from '~/lib/utils/datetime_utility';
@@ -9,18 +10,24 @@ describe('Highlight Bar', () => {
let wrapper;
const alert = {
+ iid: 1,
startedAt: '2020-05-29T10:39:22Z',
detailsUrl: 'http://127.0.0.1:3000/root/unique-alerts/-/alert_management/1/details',
eventCount: 1,
title: 'Alert 1',
};
- const mountComponent = () => {
- wrapper = shallowMount(HighlightBar, {
- propsData: {
- alert,
- },
- });
+ const mountComponent = options => {
+ wrapper = shallowMount(
+ HighlightBar,
+ merge(
+ {
+ propsData: { alert },
+ provide: { fullPath: 'test', iid: 1, slaFeatureAvailable: true },
+ },
+ options,
+ ),
+ );
};
beforeEach(() => {
@@ -36,21 +43,52 @@ describe('Highlight Bar', () => {
const findLink = () => wrapper.find(GlLink);
- it('renders a link to the alert page', () => {
- expect(findLink().exists()).toBe(true);
- expect(findLink().attributes('href')).toBe(alert.detailsUrl);
- expect(findLink().text()).toContain(alert.title);
+ describe('empty state', () => {
+ beforeEach(() => {
+ mountComponent({ propsData: { alert: null } });
+ });
+
+ it('renders a empty component', () => {
+ expect(wrapper.isVisible()).toBe(false);
+ });
});
- it('renders formatted start time of the alert', () => {
- const formattedDate = '2020-05-29 UTC';
- formatDate.mockReturnValueOnce(formattedDate);
- mountComponent();
- expect(formatDate).toHaveBeenCalledWith(alert.startedAt, 'yyyy-mm-dd Z');
- expect(wrapper.text()).toContain(formattedDate);
+ describe('alert present', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders a link to the alert page', () => {
+ expect(findLink().exists()).toBe(true);
+ expect(findLink().attributes('href')).toBe(alert.detailsUrl);
+ expect(findLink().attributes('title')).toBe(alert.title);
+ expect(findLink().text()).toBe(`#${alert.iid}`);
+ });
+
+ it('renders formatted start time of the alert', () => {
+ const formattedDate = '2020-05-29 UTC';
+ formatDate.mockReturnValueOnce(formattedDate);
+ mountComponent();
+ expect(formatDate).toHaveBeenCalledWith(alert.startedAt, 'yyyy-mm-dd Z');
+ expect(wrapper.text()).toContain(formattedDate);
+ });
+
+ it('renders a number of alert events', () => {
+ expect(wrapper.text()).toContain(alert.eventCount);
+ });
});
- it('renders a number of alert events', () => {
- expect(wrapper.text()).toContain(alert.eventCount);
+ describe('when child data is present', () => {
+ beforeEach(() => {
+ mountComponent({
+ data() {
+ return { hasChildData: true };
+ },
+ });
+ });
+
+ it('renders the highlight bar component', () => {
+ expect(wrapper.isVisible()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
index a51b497cd79..c6200fd69bf 100644
--- a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
@@ -6,6 +6,8 @@ import { descriptionProps } from '../../mock_data';
import DescriptionComponent from '~/issue_show/components/description.vue';
import HighlightBar from '~/issue_show/components/incidents/highlight_bar.vue';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
+import Tracking from '~/tracking';
+import { trackIncidentDetailsViewsOptions } from '~/incidents/constants';
const mockAlert = {
__typename: 'AlertManagementAlert',
@@ -57,7 +59,6 @@ describe('Incident Tabs component', () => {
it('does not show the alert details tab', () => {
expect(findAlertDetailsComponent().exists()).toBe(false);
- expect(findHighlightBarComponent().exists()).toBe(false);
});
});
@@ -79,7 +80,7 @@ describe('Incident Tabs component', () => {
it('renders the alert details table with the correct props', () => {
const alert = { iid: mockAlert.iid };
- expect(findAlertDetailsComponent().props('alert')).toEqual(alert);
+ expect(findAlertDetailsComponent().props('alert')).toMatchObject(alert);
expect(findAlertDetailsComponent().props('loading')).toBe(true);
});
@@ -98,4 +99,16 @@ describe('Incident Tabs component', () => {
expect(findDescriptionComponent().props()).toMatchObject(descriptionProps);
});
});
+
+ describe('Snowplow tracking', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ mountComponent();
+ });
+
+ it('should track incident details views', () => {
+ const { category, action } = trackIncidentDetailsViewsOptions;
+ expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ });
+ });
});
diff --git a/spec/frontend/issue_show/issue_spec.js b/spec/frontend/issue_show/issue_spec.js
index befb670c6cd..c0175e774a2 100644
--- a/spec/frontend/issue_show/issue_spec.js
+++ b/spec/frontend/issue_show/issue_spec.js
@@ -14,12 +14,8 @@ useMockIntersectionObserver();
jest.mock('~/lib/utils/poll');
const setupHTML = initialData => {
- document.body.innerHTML = `
- <div id="js-issuable-app"></div>
- <script id="js-issuable-app-initial-data" type="application/json">
- ${JSON.stringify(initialData)}
- </script>
- `;
+ document.body.innerHTML = `<div id="js-issuable-app"></div>`;
+ document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(initialData);
};
describe('Issue show index', () => {
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index eede5426f42..cd0266068aa 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -98,7 +98,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
</span>
<svg
- class="dropdown-chevron gl-icon s16"
+ class="gl-button-icon dropdown-chevron gl-icon s16"
data-testid="chevron-down-icon"
>
<use
@@ -114,7 +114,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<!---->
<div
- class="gl-search-box-by-type gl-m-3"
+ class="gl-search-box-by-type"
>
<svg
class="gl-search-box-by-type-search-icon gl-icon s16"
@@ -209,7 +209,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
</span>
<svg
- class="dropdown-chevron gl-icon s16"
+ class="gl-button-icon dropdown-chevron gl-icon s16"
data-testid="chevron-down-icon"
>
<use
@@ -225,7 +225,7 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<!---->
<div
- class="gl-search-box-by-type gl-m-3"
+ class="gl-search-box-by-type"
>
<svg
class="gl-search-box-by-type-search-icon gl-icon s16"
diff --git a/spec/frontend/jobs/components/job_container_item_spec.js b/spec/frontend/jobs/components/job_container_item_spec.js
index 9019504d22d..41b399fa32b 100644
--- a/spec/frontend/jobs/components/job_container_item_spec.js
+++ b/spec/frontend/jobs/components/job_container_item_spec.js
@@ -90,7 +90,7 @@ describe('JobContainerItem', () => {
Vue.nextTick()
.then(() => {
- expect(vm.$el.querySelector('.js-job-link').getAttribute('data-original-title')).toEqual(
+ expect(vm.$el.querySelector('.js-job-link').getAttribute('title')).toEqual(
'delayed job - delayed manual action (00:22:17)',
);
})
diff --git a/spec/frontend/jobs/components/log/line_spec.js b/spec/frontend/jobs/components/log/line_spec.js
index c2412a807c3..1a30921fece 100644
--- a/spec/frontend/jobs/components/log/line_spec.js
+++ b/spec/frontend/jobs/components/log/line_spec.js
@@ -2,21 +2,25 @@ import { shallowMount } from '@vue/test-utils';
import Line from '~/jobs/components/log/line.vue';
import LineNumber from '~/jobs/components/log/line_number.vue';
+const httpUrl = 'http://example.com';
+const httpsUrl = 'https://example.com';
+
+const mockProps = ({ text = 'Running with gitlab-runner 12.1.0 (de7731dd)' } = {}) => ({
+ line: {
+ content: [
+ {
+ text,
+ style: 'term-fg-l-green',
+ },
+ ],
+ lineNumber: 0,
+ },
+ path: '/jashkenas/underscore/-/jobs/335',
+});
+
describe('Job Log Line', () => {
let wrapper;
-
- const data = {
- line: {
- content: [
- {
- text: 'Running with gitlab-runner 12.1.0 (de7731dd)',
- style: 'term-fg-l-green',
- },
- ],
- lineNumber: 0,
- },
- path: '/jashkenas/underscore/-/jobs/335',
- };
+ let data;
const createComponent = (props = {}) => {
wrapper = shallowMount(Line, {
@@ -27,6 +31,7 @@ describe('Job Log Line', () => {
};
beforeEach(() => {
+ data = mockProps();
createComponent(data);
});
@@ -45,4 +50,38 @@ describe('Job Log Line', () => {
it('renders the provided style as a class attribute', () => {
expect(wrapper.find('span').classes()).toContain(data.line.content[0].style);
});
+
+ describe('when the line contains a link', () => {
+ const findLink = () => wrapper.find('span a');
+
+ it('renders an http link', () => {
+ createComponent(mockProps({ text: httpUrl }));
+
+ expect(findLink().text()).toBe(httpUrl);
+ expect(findLink().attributes().href).toEqual(httpUrl);
+ });
+
+ it('renders an https link', () => {
+ createComponent(mockProps({ text: httpsUrl }));
+
+ expect(findLink().text()).toBe(httpsUrl);
+ expect(findLink().attributes().href).toEqual(httpsUrl);
+ });
+
+ it('renders a link with rel nofollow and noopener', () => {
+ createComponent(mockProps({ text: httpsUrl }));
+
+ expect(findLink().attributes().rel).toBe('nofollow noopener');
+ });
+
+ test.each`
+ type | text
+ ${'ftp'} | ${'ftp://example.com/file'}
+ ${'email'} | ${'email@example.com'}
+ ${'no scheme'} | ${'example.com/page'}
+ `('does not render a $type link', ({ text }) => {
+ createComponent(mockProps({ text }));
+ expect(findLink().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index 294f88bbc74..e50d304bb08 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -35,6 +35,14 @@ describe('Jobs Store Utils', () => {
lines: [],
});
});
+
+ it('pre-closes a section when specified in options', () => {
+ const headerLine = { content: [{ text: 'foo' }], section_options: { collapsed: 'true' } };
+
+ const parsedHeaderLine = parseHeaderLine(headerLine, 2);
+
+ expect(parsedHeaderLine.isClosed).toBe(true);
+ });
});
describe('parseLine', () => {
diff --git a/spec/frontend/labels_issue_sidebar_spec.js b/spec/frontend/labels_issue_sidebar_spec.js
deleted file mode 100644
index f74547c0554..00000000000
--- a/spec/frontend/labels_issue_sidebar_spec.js
+++ /dev/null
@@ -1,98 +0,0 @@
-/* eslint-disable no-new */
-
-import $ from 'jquery';
-import MockAdapter from 'axios-mock-adapter';
-import { shuffle } from 'lodash';
-import axios from '~/lib/utils/axios_utils';
-import IssuableContext from '~/issuable_context';
-import LabelsSelect from '~/labels_select';
-
-import 'select2';
-import '~/api';
-import '~/create_label';
-import '~/users_select';
-
-let saveLabelCount = 0;
-let mock;
-
-function testLabelClicks(labelOrder, done) {
- $('.edit-link')
- .get(0)
- .click();
-
- jest.runOnlyPendingTimers();
-
- setImmediate(() => {
- const labelsInDropdown = $('.dropdown-content a');
-
- expect(labelsInDropdown.length).toBe(10);
-
- const arrayOfLabels = labelsInDropdown.get();
- const randomArrayOfLabels = shuffle(arrayOfLabels);
- randomArrayOfLabels.forEach((label, i) => {
- if (i < saveLabelCount) {
- $(label).click();
- }
- });
-
- $('.edit-link')
- .get(0)
- .click();
-
- setImmediate(() => {
- expect($('.sidebar-collapsed-icon').attr('data-original-title')).toBe(labelOrder);
- done();
- });
- });
-}
-
-describe('Issue dropdown sidebar', () => {
- preloadFixtures('static/issue_sidebar_label.html');
-
- beforeEach(() => {
- loadFixtures('static/issue_sidebar_label.html');
-
- mock = new MockAdapter(axios);
-
- new IssuableContext('{"id":1,"name":"Administrator","username":"root"}');
- new LabelsSelect();
-
- mock.onGet('/root/test/labels.json').reply(() => {
- const labels = Array(10)
- .fill()
- .map((_val, i) => ({
- id: i,
- title: `test ${i}`,
- color: '#5CB85C',
- }));
-
- return [200, labels];
- });
-
- mock.onPut('/root/test/issues/2.json').reply(() => {
- const labels = Array(saveLabelCount)
- .fill()
- .map((_val, i) => ({
- id: i,
- title: `test ${i}`,
- color: '#5CB85C',
- }));
-
- return [200, { labels }];
- });
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('changes collapsed tooltip when changing labels when less than 5', done => {
- saveLabelCount = 5;
- testLabelClicks('test 0, test 1, test 2, test 3, test 4', done);
- });
-
- it('changes collapsed tooltip when changing labels when more than 5', done => {
- saveLabelCount = 6;
- testLabelClicks('test 0, test 1, test 2, test 3, test 4, and 1 more', done);
- });
-});
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
new file mode 100644
index 00000000000..ee1971a4931
--- /dev/null
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -0,0 +1,98 @@
+import { sanitize } from '~/lib/dompurify';
+
+// GDK
+const rootGon = {
+ sprite_file_icons: '/assets/icons-123a.svg',
+ sprite_icons: '/assets/icons-456b.svg',
+};
+
+// Production
+const absoluteGon = {
+ sprite_file_icons: `${window.location.protocol}//${window.location.hostname}/assets/icons-123a.svg`,
+ sprite_icons: `${window.location.protocol}//${window.location.hostname}/assets/icons-456b.svg`,
+};
+
+const expectedSanitized = '<svg><use></use></svg>';
+
+const safeUrls = {
+ root: Object.values(rootGon).map(url => `${url}#ellipsis_h`),
+ absolute: Object.values(absoluteGon).map(url => `${url}#ellipsis_h`),
+};
+
+const unsafeUrls = [
+ '/an/evil/url',
+ '../../../evil/url',
+ 'https://evil.url/assets/icons-123a.svg',
+ 'https://evil.url/assets/icons-456b.svg',
+ `https://evil.url/${rootGon.sprite_icons}`,
+ `https://evil.url/${rootGon.sprite_file_icons}`,
+ `https://evil.url/${absoluteGon.sprite_icons}`,
+ `https://evil.url/${absoluteGon.sprite_file_icons}`,
+];
+
+describe('~/lib/dompurify', () => {
+ let originalGon;
+
+ it('uses local configuration when given', () => {
+ // As dompurify uses a "Persistent Configuration", it might
+ // ignore config, this check verifies we respect
+ // https://github.com/cure53/DOMPurify#persistent-configuration
+ expect(sanitize('<br>', { ALLOWED_TAGS: [] })).toBe('');
+ expect(sanitize('<strong></strong>', { ALLOWED_TAGS: [] })).toBe('');
+ });
+
+ describe.each`
+ type | gon
+ ${'root'} | ${rootGon}
+ ${'absolute'} | ${absoluteGon}
+ `('when gon contains $type icon urls', ({ type, gon }) => {
+ beforeAll(() => {
+ originalGon = window.gon;
+ window.gon = gon;
+ });
+
+ afterAll(() => {
+ window.gon = originalGon;
+ });
+
+ it('allows no href attrs', () => {
+ const htmlHref = `<svg><use></use></svg>`;
+ expect(sanitize(htmlHref)).toBe(htmlHref);
+ });
+
+ it.each(safeUrls[type])('allows safe URL %s', url => {
+ const htmlHref = `<svg><use href="${url}"></use></svg>`;
+ expect(sanitize(htmlHref)).toBe(htmlHref);
+
+ const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
+ expect(sanitize(htmlXlink)).toBe(htmlXlink);
+ });
+
+ it.each(unsafeUrls)('sanitizes unsafe URL %s', url => {
+ const htmlHref = `<svg><use href="${url}"></use></svg>`;
+ const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
+
+ expect(sanitize(htmlHref)).toBe(expectedSanitized);
+ expect(sanitize(htmlXlink)).toBe(expectedSanitized);
+ });
+ });
+
+ describe('when gon does not contain icon urls', () => {
+ beforeAll(() => {
+ originalGon = window.gon;
+ window.gon = {};
+ });
+
+ afterAll(() => {
+ window.gon = originalGon;
+ });
+
+ it.each([...safeUrls.root, ...safeUrls.absolute, ...unsafeUrls])('sanitizes URL %s', url => {
+ const htmlHref = `<svg><use href="${url}"></use></svg>`;
+ const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
+
+ expect(sanitize(htmlHref)).toBe(expectedSanitized);
+ expect(sanitize(htmlXlink)).toBe(expectedSanitized);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/axios_startup_calls_spec.js b/spec/frontend/lib/utils/axios_startup_calls_spec.js
index e804cae7914..e12bf725560 100644
--- a/spec/frontend/lib/utils/axios_startup_calls_spec.js
+++ b/spec/frontend/lib/utils/axios_startup_calls_spec.js
@@ -111,21 +111,44 @@ describe('setupAxiosStartupCalls', () => {
});
});
- it('removes GitLab Base URL from startup call', async () => {
- const oldGon = window.gon;
- window.gon = { gitlab_url: 'https://example.org/gitlab' };
-
- window.gl.startup_calls = {
- '/startup': {
- fetchCall: mockFetchCall(200),
- },
- };
- setupAxiosStartupCalls(axios);
+ describe('startup call', () => {
+ let oldGon;
+
+ beforeEach(() => {
+ oldGon = window.gon;
+ window.gon = { gitlab_url: 'https://example.org/gitlab' };
+ });
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
- const { data } = await axios.get('https://example.org/gitlab/startup');
+ it('removes GitLab Base URL from startup call', async () => {
+ window.gl.startup_calls = {
+ '/startup': {
+ fetchCall: mockFetchCall(200),
+ },
+ };
+ setupAxiosStartupCalls(axios);
- expect(data).toEqual(STARTUP_JS_RESPONSE);
+ const { data } = await axios.get('https://example.org/gitlab/startup');
- window.gon = oldGon;
+ expect(data).toEqual(STARTUP_JS_RESPONSE);
+ });
+
+ it('sorts the params in the requested API url', async () => {
+ window.gl.startup_calls = {
+ '/startup?alpha=true&bravo=true': {
+ fetchCall: mockFetchCall(200),
+ },
+ };
+ setupAxiosStartupCalls(axios);
+
+ // Use a full url instead of passing options = { params: { ... } } to axios.get
+ // to ensure the params are listed in the specified order.
+ const { data } = await axios.get('https://example.org/gitlab/startup?bravo=true&alpha=true');
+
+ expect(data).toEqual(STARTUP_JS_RESPONSE);
+ });
});
});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 5b1fdea058b..b0b0b028761 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -69,6 +69,34 @@ describe('Date time utils', () => {
});
});
+ describe('formatDateAsMonth', () => {
+ it('should format dash cased date properly', () => {
+ const formattedMonth = datetimeUtility.formatDateAsMonth(new Date('2020-06-28'));
+
+ expect(formattedMonth).toBe('Jun');
+ });
+
+ it('should format return the non-abbreviated month', () => {
+ const formattedMonth = datetimeUtility.formatDateAsMonth(new Date('2020-07-28'), {
+ abbreviated: false,
+ });
+
+ expect(formattedMonth).toBe('July');
+ });
+
+ it('should format date with slashes properly', () => {
+ const formattedMonth = datetimeUtility.formatDateAsMonth(new Date('07/23/2016'));
+
+ expect(formattedMonth).toBe('Jul');
+ });
+
+ it('should format ISO date properly', () => {
+ const formattedMonth = datetimeUtility.formatDateAsMonth('2016-07-23T00:00:00.559Z');
+
+ expect(formattedMonth).toBe('Jul');
+ });
+ });
+
describe('formatDate', () => {
it('should format date properly', () => {
const formattedDate = datetimeUtility.formatDate(new Date('07/23/2016'));
@@ -654,6 +682,20 @@ describe('differenceInSeconds', () => {
});
});
+describe('differenceInMonths', () => {
+ const startDateTime = new Date('2019-07-17T00:00:00.000Z');
+
+ it.each`
+ startDate | endDate | expected
+ ${startDateTime} | ${startDateTime} | ${0}
+ ${startDateTime} | ${new Date('2019-12-17T12:00:00.000Z')} | ${5}
+ ${startDateTime} | ${new Date('2021-02-18T00:00:00.000Z')} | ${19}
+ ${new Date('2021-02-18T00:00:00.000Z')} | ${startDateTime} | ${-19}
+ `('returns $expected for $endDate - $startDate', ({ startDate, endDate, expected }) => {
+ expect(datetimeUtility.differenceInMonths(startDate, endDate)).toBe(expected);
+ });
+});
+
describe('differenceInMilliseconds', () => {
const startDateTime = new Date('2019-07-17T00:00:00.000Z');
@@ -667,3 +709,26 @@ describe('differenceInMilliseconds', () => {
expect(datetimeUtility.differenceInMilliseconds(startDate, endDate)).toBe(expected);
});
});
+
+describe('dateAtFirstDayOfMonth', () => {
+ const date = new Date('2019-07-16T12:00:00.000Z');
+
+ it('returns the date at the first day of the month', () => {
+ const startDate = datetimeUtility.dateAtFirstDayOfMonth(date);
+ const expectedStartDate = new Date('2019-07-01T12:00:00.000Z');
+
+ expect(startDate).toStrictEqual(expectedStartDate);
+ });
+});
+
+describe('datesMatch', () => {
+ const date = new Date('2019-07-17T00:00:00.000Z');
+
+ it.each`
+ date1 | date2 | expected
+ ${date} | ${new Date('2019-07-17T00:00:00.000Z')} | ${true}
+ ${date} | ${new Date('2019-07-17T12:00:00.000Z')} | ${false}
+ `('returns $expected for $date1 matches $date2', ({ date1, date2, expected }) => {
+ expect(datetimeUtility.datesMatch(date1, date2)).toBe(expected);
+ });
+});
diff --git a/spec/frontend/lib/utils/experimentation_spec.js b/spec/frontend/lib/utils/experimentation_spec.js
new file mode 100644
index 00000000000..2c5d2f89297
--- /dev/null
+++ b/spec/frontend/lib/utils/experimentation_spec.js
@@ -0,0 +1,20 @@
+import * as experimentUtils from '~/lib/utils/experimentation';
+
+const TEST_KEY = 'abc';
+
+describe('experiment Utilities', () => {
+ describe('isExperimentEnabled', () => {
+ it.each`
+ experiments | value
+ ${{ [TEST_KEY]: true }} | ${true}
+ ${{ [TEST_KEY]: false }} | ${false}
+ ${{ def: true }} | ${false}
+ ${{}} | ${false}
+ ${null} | ${false}
+ `('returns correct value of $value for experiments=$experiments', ({ experiments, value }) => {
+ window.gon = { experiments };
+
+ expect(experimentUtils.isExperimentEnabled(TEST_KEY)).toEqual(value);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/number_utility_spec.js b/spec/frontend/lib/utils/number_utility_spec.js
index 2f8f1092612..f600f2bcd55 100644
--- a/spec/frontend/lib/utils/number_utility_spec.js
+++ b/spec/frontend/lib/utils/number_utility_spec.js
@@ -1,5 +1,6 @@
import {
formatRelevantDigits,
+ bytesToKB,
bytesToKiB,
bytesToMiB,
bytesToGiB,
@@ -54,6 +55,16 @@ describe('Number Utils', () => {
});
});
+ describe('bytesToKB', () => {
+ it.each`
+ input | output
+ ${1000} | ${1}
+ ${1024} | ${1.024}
+ `('returns $output KB for $input bytes', ({ input, output }) => {
+ expect(bytesToKB(input)).toBe(output);
+ });
+ });
+
describe('bytesToKiB', () => {
it('calculates KiB for the given bytes', () => {
expect(bytesToKiB(1024)).toEqual(1);
diff --git a/spec/frontend/lib/utils/text_markdown_spec.js b/spec/frontend/lib/utils/text_markdown_spec.js
index 1aaae80dcdf..43de195c702 100644
--- a/spec/frontend/lib/utils/text_markdown_spec.js
+++ b/spec/frontend/lib/utils/text_markdown_spec.js
@@ -13,6 +13,23 @@ describe('init markdown', () => {
textArea.parentNode.removeChild(textArea);
});
+ describe('insertMarkdownText', () => {
+ it('will not error if selected text is a number', () => {
+ const selected = 2;
+
+ insertMarkdownText({
+ textArea,
+ text: '',
+ tag: '',
+ blockTag: null,
+ selected,
+ wrap: false,
+ });
+
+ expect(textArea.value).toBe(selected.toString());
+ });
+ });
+
describe('textArea', () => {
describe('without selection', () => {
it('inserts the tag on an empty line', () => {
@@ -251,88 +268,10 @@ describe('init markdown', () => {
});
});
- describe('Ace Editor', () => {
- let editor;
-
- beforeEach(() => {
- editor = {
- getSelectionRange: jest.fn().mockReturnValue({
- start: 0,
- end: 0,
- }),
- getValue: jest.fn().mockReturnValue('this is text \n in two lines'),
- insert: jest.fn(),
- navigateLeft: jest.fn(),
- };
- });
-
- it('uses ace editor insert text when editor is passed in', () => {
- insertMarkdownText({
- text: editor.getValue,
- tag: '*',
- blockTag: null,
- selected: '',
- wrap: false,
- editor,
- });
-
- expect(editor.insert).toHaveBeenCalled();
- });
-
- it('adds block tags on line above and below selection', () => {
- const selected = 'this text \n is multiple \n lines';
- const text = `before \n ${selected} \n after`;
-
- insertMarkdownText({
- text,
- tag: '',
- blockTag: '***',
- selected,
- wrap: true,
- editor,
- });
-
- expect(editor.insert).toHaveBeenCalledWith(`***\n${selected}\n***`);
- });
-
- it('uses ace editor to navigate back tag length when nothing is selected', () => {
- insertMarkdownText({
- text: editor.getValue,
- tag: '*',
- blockTag: null,
- selected: '',
- wrap: true,
- editor,
- });
-
- expect(editor.navigateLeft).toHaveBeenCalledWith(1);
- });
-
- it('ace editor does not navigate back when there is selected text', () => {
- insertMarkdownText({
- text: editor.getValue,
- tag: '*',
- blockTag: null,
- selected: 'foobar',
- wrap: true,
- editor,
- });
-
- expect(editor.navigateLeft).not.toHaveBeenCalled();
- });
- });
-
describe('Editor Lite', () => {
let editor;
- let origGon;
beforeEach(() => {
- origGon = window.gon;
- window.gon = {
- features: {
- monacoBlobs: true,
- },
- };
editor = {
getSelection: jest.fn().mockReturnValue({
startLineNumber: 1,
@@ -347,10 +286,6 @@ describe('init markdown', () => {
};
});
- afterEach(() => {
- window.gon = origGon;
- });
-
it('replaces selected text', () => {
insertMarkdownText({
text: editor.getValue,
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 869ae274a3f..0f9290e36b5 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -509,6 +509,20 @@ describe('URL utility', () => {
});
});
+ describe('isBlobUrl', () => {
+ it.each`
+ url | valid
+ ${undefined} | ${false}
+ ${'blob:http://gitlab.com/abcd'} | ${true}
+ ${'data:image/png;base64,abcdef'} | ${false}
+ ${'notaurl'} | ${false}
+ ${'../relative_url'} | ${false}
+ ${'<a></a>'} | ${false}
+ `('returns $valid for $url', ({ url, valid }) => {
+ expect(urlUtils.isBlobUrl(url)).toBe(valid);
+ });
+ });
+
describe('relativePathToAbsolute', () => {
it.each`
path | base | result
@@ -664,6 +678,19 @@ describe('URL utility', () => {
});
});
+ describe('cleanLeadingSeparator', () => {
+ it.each`
+ path | expected
+ ${'/foo/bar'} | ${'foo/bar'}
+ ${'foo/bar'} | ${'foo/bar'}
+ ${'//foo/bar'} | ${'foo/bar'}
+ ${'/./foo/bar'} | ${'./foo/bar'}
+ ${''} | ${''}
+ `('$path becomes $expected', ({ path, expected }) => {
+ expect(urlUtils.cleanLeadingSeparator(path)).toBe(expected);
+ });
+ });
+
describe('joinPaths', () => {
it.each`
paths | expected
@@ -688,6 +715,18 @@ describe('URL utility', () => {
});
});
+ describe('stripFinalUrlSegment', () => {
+ it.each`
+ path | expected
+ ${'http://fake.domain/twitter/typeahead-js/-/tags/v0.11.0'} | ${'http://fake.domain/twitter/typeahead-js/-/tags/'}
+ ${'http://fake.domain/bar/cool/-/nested/content'} | ${'http://fake.domain/bar/cool/-/nested/'}
+ ${'http://fake.domain/bar/cool?q="search"'} | ${'http://fake.domain/bar/'}
+ ${'http://fake.domain/bar/cool#link-to-something'} | ${'http://fake.domain/bar/'}
+ `('stripFinalUrlSegment $path => $expected', ({ path, expected }) => {
+ expect(urlUtils.stripFinalUrlSegment(path)).toBe(expected);
+ });
+ });
+
describe('escapeFileUrl', () => {
it('encodes URL excluding the slashes', () => {
expect(urlUtils.escapeFileUrl('/foo-bar/file.md')).toBe('/foo-bar/file.md');
@@ -787,4 +826,36 @@ describe('URL utility', () => {
expect(urlUtils.getHTTPProtocol(url)).toBe(expectation);
});
});
+
+ describe('stripPathTail', () => {
+ it.each`
+ path | expected
+ ${''} | ${''}
+ ${'index.html'} | ${''}
+ ${'/'} | ${'/'}
+ ${'/foo/bar'} | ${'/foo/'}
+ ${'/foo/bar/'} | ${'/foo/bar/'}
+ ${'/foo/bar/index.html'} | ${'/foo/bar/'}
+ `('strips the filename from $path => $expected', ({ path, expected }) => {
+ expect(urlUtils.stripPathTail(path)).toBe(expected);
+ });
+ });
+
+ describe('getURLOrigin', () => {
+ it('when no url passed, returns correct origin from window location', () => {
+ const origin = 'https://foo.bar';
+
+ setWindowLocation({ origin });
+ expect(urlUtils.getURLOrigin()).toBe(origin);
+ });
+
+ it.each`
+ url | expectation
+ ${'not-a-url'} | ${null}
+ ${'wss://example.com'} | ${'wss://example.com'}
+ ${'https://foo.bar/foo/bar'} | ${'https://foo.bar'}
+ `('returns correct origin for $url', ({ url, expectation }) => {
+ expect(urlUtils.getURLOrigin(url)).toBe(expectation);
+ });
+ });
});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
index 559ce4f9414..e32deaea993 100644
--- a/spec/frontend/logs/components/environment_logs_spec.js
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -1,4 +1,4 @@
-import { GlSprintf, GlIcon, GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlSprintf, GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import EnvironmentLogs from '~/logs/components/environment_logs.vue';
@@ -121,7 +121,7 @@ describe('EnvironmentLogs', () => {
it('displays UI elements', () => {
initWrapper();
- expect(findEnvironmentsDropdown().is(GlDeprecatedDropdown)).toBe(true);
+ expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
expect(findSimpleFilters().exists()).toBe(true);
expect(findLogControlButtons().exists()).toBe(true);
@@ -164,7 +164,7 @@ describe('EnvironmentLogs', () => {
it('displays a disabled environments dropdown', () => {
expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
- expect(findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem).length).toBe(0);
+ expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
});
it('does not update buttons state', () => {
@@ -241,7 +241,7 @@ describe('EnvironmentLogs', () => {
});
it('populates environments dropdown', () => {
- const items = findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem);
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
expect(items.length).toBe(mockEnvironments.length);
mockEnvironments.forEach((env, i) => {
@@ -251,14 +251,14 @@ describe('EnvironmentLogs', () => {
});
it('dropdown has one environment selected', () => {
- const items = findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem);
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
mockEnvironments.forEach((env, i) => {
const item = items.at(i);
if (item.text() !== mockEnvName) {
- expect(item.find(GlIcon).classes('invisible')).toBe(true);
+ expect(item.find(GlDropdownItem).attributes('ischecked')).toBeFalsy();
} else {
- expect(item.find(GlIcon).classes('invisible')).toBe(false);
+ expect(item.find(GlDropdownItem).attributes('ischecked')).toBeTruthy();
}
});
});
@@ -286,7 +286,7 @@ describe('EnvironmentLogs', () => {
describe('when user clicks', () => {
it('environment name, trace is refreshed', () => {
- const items = findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem);
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
const index = 1; // any env
expect(dispatch).not.toHaveBeenCalledWith(`${module}/showEnvironment`, expect.anything());
diff --git a/spec/frontend/logs/components/log_simple_filters_spec.js b/spec/frontend/logs/components/log_simple_filters_spec.js
index 1e30a7df559..b819f0d25a8 100644
--- a/spec/frontend/logs/components/log_simple_filters_spec.js
+++ b/spec/frontend/logs/components/log_simple_filters_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/logs/stores';
import { mockPods, mockPodName } from '../mock_data';
@@ -17,7 +17,7 @@ describe('LogSimpleFilters', () => {
const findPodsNoPodsText = () => wrapper.find({ ref: 'noPodsMsg' });
const findPodsDropdownItems = () =>
findPodsDropdown()
- .findAll(GlDeprecatedDropdownItem)
+ .findAll(GlDropdownItem)
.filter(item => !('disabled' in item.attributes()));
const mockPodsLoading = () => {
@@ -114,9 +114,9 @@ describe('LogSimpleFilters', () => {
mockPods.forEach((pod, i) => {
const item = items.at(i);
if (item.text() !== mockPodName) {
- expect(item.find(GlIcon).classes('invisible')).toBe(true);
+ expect(item.find(GlDropdownItem).attributes('ischecked')).toBeFalsy();
} else {
- expect(item.find(GlIcon).classes('invisible')).toBe(false);
+ expect(item.find(GlDropdownItem).attributes('ischecked')).toBeTruthy();
}
});
});
diff --git a/spec/frontend/merge_request_spec.js b/spec/frontend/merge_request_spec.js
index 16f04d032fd..37509f77f71 100644
--- a/spec/frontend/merge_request_spec.js
+++ b/spec/frontend/merge_request_spec.js
@@ -3,8 +3,6 @@ import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'spec/test_constants';
import axios from '~/lib/utils/axios_utils';
import MergeRequest from '~/merge_request';
-import CloseReopenReportToggle from '~/close_reopen_report_toggle';
-import IssuablesHelper from '~/helpers/issuables_helper';
describe('MergeRequest', () => {
const test = {};
@@ -112,66 +110,7 @@ describe('MergeRequest', () => {
});
});
- describe('class constructor', () => {
- beforeEach(() => {
- jest.spyOn($, 'ajax').mockImplementation();
- });
-
- it('calls .initCloseReopenReport', () => {
- jest.spyOn(IssuablesHelper, 'initCloseReopenReport').mockImplementation(() => {});
-
- new MergeRequest(); // eslint-disable-line no-new
-
- expect(IssuablesHelper.initCloseReopenReport).toHaveBeenCalled();
- });
-
- it('calls .initDroplab', () => {
- const container = {
- querySelector: jest.fn().mockName('container.querySelector'),
- };
- const dropdownTrigger = {};
- const dropdownList = {};
- const button = {};
-
- jest.spyOn(CloseReopenReportToggle.prototype, 'initDroplab').mockImplementation(() => {});
- jest.spyOn(document, 'querySelector').mockReturnValue(container);
-
- container.querySelector
- .mockReturnValueOnce(dropdownTrigger)
- .mockReturnValueOnce(dropdownList)
- .mockReturnValueOnce(button);
-
- new MergeRequest(); // eslint-disable-line no-new
-
- expect(document.querySelector).toHaveBeenCalledWith('.js-issuable-close-dropdown');
- expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-toggle');
- expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-menu');
- expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-button');
- expect(CloseReopenReportToggle.prototype.initDroplab).toHaveBeenCalled();
- });
- });
-
describe('hideCloseButton', () => {
- describe('merge request of another user', () => {
- beforeEach(() => {
- loadFixtures('merge_requests/merge_request_with_task_list.html');
- test.el = document.querySelector('.js-issuable-actions');
- new MergeRequest(); // eslint-disable-line no-new
- MergeRequest.hideCloseButton();
- });
-
- it('hides the dropdown close item and selects the next item', () => {
- const closeItem = test.el.querySelector('li.close-item');
- const smallCloseItem = test.el.querySelector('.js-close-item');
- const reportItem = test.el.querySelector('li.report-item');
-
- expect(closeItem).toHaveClass('hidden');
- expect(smallCloseItem).toHaveClass('hidden');
- expect(reportItem).toHaveClass('droplab-item-selected');
- expect(reportItem).not.toHaveClass('hidden');
- });
- });
-
describe('merge request of current_user', () => {
beforeEach(() => {
loadFixtures('merge_requests/merge_request_of_current_user.html');
@@ -180,10 +119,8 @@ describe('MergeRequest', () => {
});
it('hides the close button', () => {
- const closeButton = test.el.querySelector('.btn-close');
const smallCloseItem = test.el.querySelector('.js-close-item');
- expect(closeButton).toHaveClass('hidden');
expect(smallCloseItem).toHaveClass('hidden');
});
});
diff --git a/spec/frontend/milestones/stores/actions_spec.js b/spec/frontend/milestones/stores/actions_spec.js
new file mode 100644
index 00000000000..ad73d0e4238
--- /dev/null
+++ b/spec/frontend/milestones/stores/actions_spec.js
@@ -0,0 +1,140 @@
+import testAction from 'helpers/vuex_action_helper';
+import createState from '~/milestones/stores/state';
+import * as actions from '~/milestones/stores/actions';
+import * as types from '~/milestones/stores/mutation_types';
+
+let mockProjectMilestonesReturnValue;
+let mockProjectSearchReturnValue;
+
+jest.mock('~/api', () => ({
+ // `__esModule: true` is required when mocking modules with default exports:
+ // https://jestjs.io/docs/en/jest-object#jestmockmodulename-factory-options
+ __esModule: true,
+ default: {
+ projectMilestones: () => mockProjectMilestonesReturnValue,
+ projectSearch: () => mockProjectSearchReturnValue,
+ },
+}));
+
+describe('Milestone combobox Vuex store actions', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState();
+ });
+
+ describe('setProjectId', () => {
+ it(`commits ${types.SET_PROJECT_ID} with the new project ID`, () => {
+ const projectId = '4';
+ testAction(actions.setProjectId, projectId, state, [
+ { type: types.SET_PROJECT_ID, payload: projectId },
+ ]);
+ });
+ });
+
+ describe('setSelectedMilestones', () => {
+ it(`commits ${types.SET_SELECTED_MILESTONES} with the new selected milestones name`, () => {
+ const selectedMilestones = ['v1.2.3'];
+ testAction(actions.setSelectedMilestones, selectedMilestones, state, [
+ { type: types.SET_SELECTED_MILESTONES, payload: selectedMilestones },
+ ]);
+ });
+ });
+
+ describe('toggleMilestones', () => {
+ const selectedMilestone = 'v1.2.3';
+ it(`commits ${types.ADD_SELECTED_MILESTONE} with the new selected milestone name`, () => {
+ testAction(actions.toggleMilestones, selectedMilestone, state, [
+ { type: types.ADD_SELECTED_MILESTONE, payload: selectedMilestone },
+ ]);
+ });
+
+ it(`commits ${types.REMOVE_SELECTED_MILESTONE} with the new selected milestone name`, () => {
+ state.selectedMilestones = [selectedMilestone];
+ testAction(actions.toggleMilestones, selectedMilestone, state, [
+ { type: types.REMOVE_SELECTED_MILESTONE, payload: selectedMilestone },
+ ]);
+ });
+ });
+
+ describe('search', () => {
+ it(`commits ${types.SET_QUERY} with the new search query`, () => {
+ const query = 'v1.0';
+ testAction(
+ actions.search,
+ query,
+ state,
+ [{ type: types.SET_QUERY, payload: query }],
+ [{ type: 'searchMilestones' }],
+ );
+ });
+ });
+
+ describe('searchMilestones', () => {
+ describe('when the search is successful', () => {
+ const projectSearchApiResponse = { data: [{ title: 'v1.0' }] };
+
+ beforeEach(() => {
+ mockProjectSearchReturnValue = Promise.resolve(projectSearchApiResponse);
+ });
+
+ it(`commits ${types.REQUEST_START}, ${types.RECEIVE_PROJECT_MILESTONES_SUCCESS} with the response from the API, and ${types.REQUEST_FINISH}`, () => {
+ return testAction(actions.searchMilestones, undefined, state, [
+ { type: types.REQUEST_START },
+ { type: types.RECEIVE_PROJECT_MILESTONES_SUCCESS, payload: projectSearchApiResponse },
+ { type: types.REQUEST_FINISH },
+ ]);
+ });
+ });
+
+ describe('when the search fails', () => {
+ const error = new Error('Something went wrong!');
+
+ beforeEach(() => {
+ mockProjectSearchReturnValue = Promise.reject(error);
+ });
+
+ it(`commits ${types.REQUEST_START}, ${types.RECEIVE_PROJECT_MILESTONES_ERROR} with the error object, and ${types.REQUEST_FINISH}`, () => {
+ return testAction(actions.searchMilestones, undefined, state, [
+ { type: types.REQUEST_START },
+ { type: types.RECEIVE_PROJECT_MILESTONES_ERROR, payload: error },
+ { type: types.REQUEST_FINISH },
+ ]);
+ });
+ });
+ });
+
+ describe('fetchMilestones', () => {
+ describe('when the fetch is successful', () => {
+ const projectMilestonesApiResponse = { data: [{ title: 'v1.0' }] };
+
+ beforeEach(() => {
+ mockProjectMilestonesReturnValue = Promise.resolve(projectMilestonesApiResponse);
+ });
+
+ it(`commits ${types.REQUEST_START}, ${types.RECEIVE_PROJECT_MILESTONES_SUCCESS} with the response from the API, and ${types.REQUEST_FINISH}`, () => {
+ return testAction(actions.fetchMilestones, undefined, state, [
+ { type: types.REQUEST_START },
+ { type: types.RECEIVE_PROJECT_MILESTONES_SUCCESS, payload: projectMilestonesApiResponse },
+ { type: types.REQUEST_FINISH },
+ ]);
+ });
+ });
+
+ describe('when the fetch fails', () => {
+ const error = new Error('Something went wrong!');
+
+ beforeEach(() => {
+ mockProjectMilestonesReturnValue = Promise.reject(error);
+ });
+
+ it(`commits ${types.REQUEST_START}, ${types.RECEIVE_PROJECT_MILESTONES_ERROR} with the error object, and ${types.REQUEST_FINISH}`, () => {
+ return testAction(actions.fetchMilestones, undefined, state, [
+ { type: types.REQUEST_START },
+ { type: types.RECEIVE_PROJECT_MILESTONES_ERROR, payload: error },
+ { type: types.REQUEST_FINISH },
+ ]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/milestones/stores/getter_spec.js b/spec/frontend/milestones/stores/getter_spec.js
new file mode 100644
index 00000000000..df7c3d28e67
--- /dev/null
+++ b/spec/frontend/milestones/stores/getter_spec.js
@@ -0,0 +1,15 @@
+import * as getters from '~/milestones/stores/getters';
+
+describe('Milestone comboxbox Vuex store getters', () => {
+ describe('isLoading', () => {
+ it.each`
+ requestCount | isLoading
+ ${2} | ${true}
+ ${1} | ${true}
+ ${0} | ${false}
+ ${-1} | ${false}
+ `('returns true when at least one request is in progress', ({ requestCount, isLoading }) => {
+ expect(getters.isLoading({ requestCount })).toBe(isLoading);
+ });
+ });
+});
diff --git a/spec/frontend/milestones/stores/mutations_spec.js b/spec/frontend/milestones/stores/mutations_spec.js
new file mode 100644
index 00000000000..8f8ce3c87ad
--- /dev/null
+++ b/spec/frontend/milestones/stores/mutations_spec.js
@@ -0,0 +1,159 @@
+import createState from '~/milestones/stores/state';
+import mutations from '~/milestones/stores/mutations';
+import * as types from '~/milestones/stores/mutation_types';
+
+describe('Milestones combobox Vuex store mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState();
+ });
+
+ describe('initial state', () => {
+ it('is created with the correct structure and initial values', () => {
+ expect(state).toEqual({
+ projectId: null,
+ groupId: null,
+ query: '',
+ matches: {
+ projectMilestones: {
+ list: [],
+ totalCount: 0,
+ error: null,
+ },
+ },
+ selectedMilestones: [],
+ requestCount: 0,
+ });
+ });
+ });
+
+ describe(`${types.SET_PROJECT_ID}`, () => {
+ it('updates the project ID', () => {
+ const newProjectId = '4';
+ mutations[types.SET_PROJECT_ID](state, newProjectId);
+
+ expect(state.projectId).toBe(newProjectId);
+ });
+ });
+
+ describe(`${types.SET_SELECTED_MILESTONES}`, () => {
+ it('sets the selected milestones', () => {
+ const selectedMilestones = ['v1.2.3'];
+ mutations[types.SET_SELECTED_MILESTONES](state, selectedMilestones);
+
+ expect(state.selectedMilestones).toEqual(['v1.2.3']);
+ });
+ });
+
+ describe(`${types.ADD_SELECTED_MILESTONESs}`, () => {
+ it('adds the selected milestones', () => {
+ const selectedMilestone = 'v1.2.3';
+ mutations[types.ADD_SELECTED_MILESTONE](state, selectedMilestone);
+
+ expect(state.selectedMilestones).toEqual(['v1.2.3']);
+ });
+ });
+
+ describe(`${types.REMOVE_SELECTED_MILESTONES}`, () => {
+ it('removes the selected milestones', () => {
+ const selectedMilestone = 'v1.2.3';
+
+ mutations[types.SET_SELECTED_MILESTONES](state, [selectedMilestone]);
+ expect(state.selectedMilestones).toEqual(['v1.2.3']);
+
+ mutations[types.REMOVE_SELECTED_MILESTONE](state, selectedMilestone);
+ expect(state.selectedMilestones).toEqual([]);
+ });
+ });
+
+ describe(`${types.SET_QUERY}`, () => {
+ it('updates the search query', () => {
+ const newQuery = 'hello';
+ mutations[types.SET_QUERY](state, newQuery);
+
+ expect(state.query).toBe(newQuery);
+ });
+ });
+
+ describe(`${types.REQUEST_START}`, () => {
+ it('increments requestCount by 1', () => {
+ mutations[types.REQUEST_START](state);
+ expect(state.requestCount).toBe(1);
+
+ mutations[types.REQUEST_START](state);
+ expect(state.requestCount).toBe(2);
+
+ mutations[types.REQUEST_START](state);
+ expect(state.requestCount).toBe(3);
+ });
+ });
+
+ describe(`${types.REQUEST_FINISH}`, () => {
+ it('decrements requestCount by 1', () => {
+ state.requestCount = 3;
+
+ mutations[types.REQUEST_FINISH](state);
+ expect(state.requestCount).toBe(2);
+
+ mutations[types.REQUEST_FINISH](state);
+ expect(state.requestCount).toBe(1);
+
+ mutations[types.REQUEST_FINISH](state);
+ expect(state.requestCount).toBe(0);
+ });
+ });
+
+ describe(`${types.RECEIVE_PROJECT_MILESTONES_SUCCESS}`, () => {
+ it('updates state.matches.projectMilestones based on the provided API response', () => {
+ const response = {
+ data: [
+ {
+ title: 'v0.1',
+ },
+ {
+ title: 'v0.2',
+ },
+ ],
+ headers: {
+ 'x-total': 2,
+ },
+ };
+
+ mutations[types.RECEIVE_PROJECT_MILESTONES_SUCCESS](state, response);
+
+ expect(state.matches.projectMilestones).toEqual({
+ list: [
+ {
+ title: 'v0.1',
+ },
+ {
+ title: 'v0.2',
+ },
+ ],
+ error: null,
+ totalCount: 2,
+ });
+ });
+
+ describe(`${types.RECEIVE_PROJECT_MILESTONES_ERROR}`, () => {
+ it('updates state.matches.projectMilestones to an empty state with the error object', () => {
+ const error = new Error('Something went wrong!');
+
+ state.matches.projectMilestones = {
+ list: [{ title: 'v0.1' }],
+ totalCount: 1,
+ error: null,
+ };
+
+ mutations[types.RECEIVE_PROJECT_MILESTONES_ERROR](state, error);
+
+ expect(state.matches.projectMilestones).toEqual({
+ list: [],
+ totalCount: 0,
+ error,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/mini_pipeline_graph_dropdown_spec.js b/spec/frontend/mini_pipeline_graph_dropdown_spec.js
index 506290834c8..1ecf01894af 100644
--- a/spec/frontend/mini_pipeline_graph_dropdown_spec.js
+++ b/spec/frontend/mini_pipeline_graph_dropdown_spec.js
@@ -69,7 +69,7 @@ describe('Mini Pipeline Graph Dropdown', () => {
html: `<li>
<a class="mini-pipeline-graph-dropdown-item" href="#">
<span class="ci-status-icon ci-status-icon-failed"></span>
- <span class="ci-build-text">build</span>
+ <span>build</span>
</a>
<a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a>
</li>`,
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index a28ecac00fd..645aca0b157 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -52,7 +52,6 @@ exports[`Dashboard template matches the default snapshot 1`] = `
</gl-dropdown-section-header-stub>
<gl-search-box-by-type-stub
- class="gl-m-3"
clearbuttontitle="Clear"
value=""
/>
diff --git a/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
index c30fb572826..9b2aa3a5b5b 100644
--- a/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
@@ -1,79 +1,146 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`GroupEmptyState Renders an empty state for BAD_QUERY 1`] = `
-<gl-empty-state-stub
- compact="true"
- primarybuttonlink="/path/to/settings"
- primarybuttontext="Verify configuration"
- svgpath="/path/to/empty-group-illustration.svg"
- title="Query cannot be processed"
-/>
+exports[`GroupEmptyState given state BAD_QUERY passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": null,
+ "primaryButtonLink": "/path/to/settings",
+ "primaryButtonText": "Verify configuration",
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Query cannot be processed",
+}
`;
-exports[`GroupEmptyState Renders an empty state for BAD_QUERY 2`] = `"The Prometheus server responded with \\"bad request\\". Please check your queries are correct and are supported in your Prometheus version. <a href=\\"/path/to/docs\\">More information</a>"`;
+exports[`GroupEmptyState given state BAD_QUERY renders the slotted content 1`] = `
+<div>
+ <div>
+ The Prometheus server responded with "bad request". Please check your queries are correct and are supported in your Prometheus version.
+ <a
+ href="/path/to/docs"
+ >
+ More information
+ </a>
+ </div>
+</div>
+`;
-exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="We couldn't reach the Prometheus server. Either the server no longer exists or the configuration details need updating."
- primarybuttonlink="/path/to/settings"
- primarybuttontext="Verify configuration"
- svgpath="/path/to/empty-group-illustration.svg"
- title="Connection failed"
-/>
+exports[`GroupEmptyState given state CONNECTION_FAILED passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "We couldn't reach the Prometheus server. Either the server no longer exists or the configuration details need updating.",
+ "primaryButtonLink": "/path/to/settings",
+ "primaryButtonText": "Verify configuration",
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Connection failed",
+}
`;
-exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 2`] = `undefined`;
+exports[`GroupEmptyState given state CONNECTION_FAILED renders the slotted content 1`] = `<div />`;
-exports[`GroupEmptyState Renders an empty state for FOO STATE 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="An error occurred while loading the data. Please try again."
- svgpath="/path/to/empty-group-illustration.svg"
- title="An error has occurred"
-/>
+exports[`GroupEmptyState given state FOO STATE passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "An error occurred while loading the data. Please try again.",
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "An error has occurred",
+}
`;
-exports[`GroupEmptyState Renders an empty state for FOO STATE 2`] = `undefined`;
+exports[`GroupEmptyState given state FOO STATE renders the slotted content 1`] = `<div />`;
-exports[`GroupEmptyState Renders an empty state for LOADING 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available."
- svgpath="/path/to/empty-group-illustration.svg"
- title="Waiting for performance data"
-/>
+exports[`GroupEmptyState given state LOADING passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available.",
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Waiting for performance data",
+}
`;
-exports[`GroupEmptyState Renders an empty state for LOADING 2`] = `undefined`;
+exports[`GroupEmptyState given state LOADING renders the slotted content 1`] = `<div />`;
-exports[`GroupEmptyState Renders an empty state for NO_DATA 1`] = `
-<gl-empty-state-stub
- compact="true"
- svgpath="/path/to/empty-group-illustration.svg"
- title="No data to display"
-/>
+exports[`GroupEmptyState given state NO_DATA passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": null,
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "No data to display",
+}
`;
-exports[`GroupEmptyState Renders an empty state for NO_DATA 2`] = `"The data source is connected, but there is no data to display. <a href=\\"/path/to/docs\\">More information</a>"`;
+exports[`GroupEmptyState given state NO_DATA renders the slotted content 1`] = `
+<div>
+ <div>
+ The data source is connected, but there is no data to display.
+ <a
+ href="/path/to/docs"
+ >
+ More information
+ </a>
+ </div>
+</div>
+`;
-exports[`GroupEmptyState Renders an empty state for TIMEOUT 1`] = `
-<gl-empty-state-stub
- compact="true"
- svgpath="/path/to/empty-group-illustration.svg"
- title="Connection timed out"
-/>
+exports[`GroupEmptyState given state TIMEOUT passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": null,
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Connection timed out",
+}
`;
-exports[`GroupEmptyState Renders an empty state for TIMEOUT 2`] = `"Charts can't be displayed as the request for data has timed out. <a href=\\"/path/to/docs\\">More information</a>"`;
+exports[`GroupEmptyState given state TIMEOUT renders the slotted content 1`] = `
+<div>
+ <div>
+ Charts can't be displayed as the request for data has timed out.
+ <a
+ href="/path/to/docs"
+ >
+ More information
+ </a>
+ </div>
+</div>
+`;
-exports[`GroupEmptyState Renders an empty state for UNKNOWN_ERROR 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="An error occurred while loading the data. Please try again."
- svgpath="/path/to/empty-group-illustration.svg"
- title="An error has occurred"
-/>
+exports[`GroupEmptyState given state UNKNOWN_ERROR passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "An error occurred while loading the data. Please try again.",
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "An error has occurred",
+}
`;
-exports[`GroupEmptyState Renders an empty state for UNKNOWN_ERROR 2`] = `undefined`;
+exports[`GroupEmptyState given state UNKNOWN_ERROR renders the slotted content 1`] = `<div />`;
diff --git a/spec/frontend/monitoring/components/dashboard_panel_spec.js b/spec/frontend/monitoring/components/dashboard_panel_spec.js
index 8947a6c1570..ee0e1fd3176 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_spec.js
@@ -38,8 +38,6 @@ import MonitorStackedColumnChart from '~/monitoring/components/charts/stacked_co
import { createStore, monitoringDashboard } from '~/monitoring/stores';
import { createStore as createEmbedGroupStore } from '~/monitoring/stores/embed_group';
-global.URL.createObjectURL = jest.fn();
-
const mocks = {
$toast: {
show: jest.fn(),
@@ -94,6 +92,8 @@ describe('Dashboard Panel', () => {
state = store.state.monitoringDashboard;
axiosMock = new AxiosMockAdapter(axios);
+
+ jest.spyOn(URL, 'createObjectURL');
});
afterEach(() => {
diff --git a/spec/frontend/monitoring/components/group_empty_state_spec.js b/spec/frontend/monitoring/components/group_empty_state_spec.js
index 90bd6f67196..3b94c4c6806 100644
--- a/spec/frontend/monitoring/components/group_empty_state_spec.js
+++ b/spec/frontend/monitoring/components/group_empty_state_spec.js
@@ -1,7 +1,13 @@
+import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
import { metricStates } from '~/monitoring/constants';
+const MockGlEmptyState = {
+ props: GlEmptyState.props,
+ template: '<div><slot name="description"></slot></div>',
+};
+
function createComponent(props) {
return shallowMount(GroupEmptyState, {
propsData: {
@@ -10,11 +16,20 @@ function createComponent(props) {
settingsPath: '/path/to/settings',
svgPath: '/path/to/empty-group-illustration.svg',
},
+ stubs: {
+ GlEmptyState: MockGlEmptyState,
+ },
});
}
describe('GroupEmptyState', () => {
- const supportedStates = [
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each([
metricStates.NO_DATA,
metricStates.TIMEOUT,
metricStates.CONNECTION_FAILED,
@@ -22,13 +37,17 @@ describe('GroupEmptyState', () => {
metricStates.LOADING,
metricStates.UNKNOWN_ERROR,
'FOO STATE', // does not fail with unknown states
- ];
+ ])('given state %s', selectedState => {
+ beforeEach(() => {
+ wrapper = createComponent({ selectedState });
+ });
- it.each(supportedStates)('Renders an empty state for %s', selectedState => {
- const wrapper = createComponent({ selectedState });
+ it('renders the slotted content', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
- expect(wrapper.element).toMatchSnapshot();
- // slot is not rendered by the stub, test it separately
- expect(wrapper.vm.currentState.slottedDescription).toMatchSnapshot();
+ it('passes the expected props to GlEmptyState', () => {
+ expect(wrapper.find(MockGlEmptyState).props()).toMatchSnapshot();
+ });
});
});
diff --git a/spec/frontend/monitoring/router_spec.js b/spec/frontend/monitoring/router_spec.js
index 8b97c8ed125..2bf2065b178 100644
--- a/spec/frontend/monitoring/router_spec.js
+++ b/spec/frontend/monitoring/router_spec.js
@@ -105,8 +105,7 @@ describe('Monitoring router', () => {
path | currentDashboard
${'/panel/new'} | ${undefined}
${'/dashboard.yml/panel/new'} | ${'dashboard.yml'}
- ${'/config/prometheus/common_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
- ${'/config%2Fprometheus%2Fcommon_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
+ ${'/config%2Fprometheus%2Fcommon_metrics.yml/panel/new'} | ${'config%2Fprometheus%2Fcommon_metrics.yml'}
`('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
const wrapper = createWrapper(BASE_PATH, path);
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
index affd6c1d1d2..d82590c7e9e 100644
--- a/spec/frontend/notes/components/discussion_counter_spec.js
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlIcon } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import notesModule from '~/notes/stores/modules';
import DiscussionCounter from '~/notes/components/discussion_counter.vue';
import { noteableDataMock, discussionMock, notesDataMock, userDataMock } from '../mock_data';
@@ -9,6 +9,7 @@ import * as types from '~/notes/stores/mutation_types';
describe('DiscussionCounter component', () => {
let store;
let wrapper;
+ let setExpandDiscussionsFn;
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -16,6 +17,7 @@ describe('DiscussionCounter component', () => {
beforeEach(() => {
window.mrTabs = {};
const { state, getters, mutations, actions } = notesModule();
+ setExpandDiscussionsFn = jest.fn().mockImplementation(actions.setExpandDiscussions);
store = new Vuex.Store({
state: {
@@ -24,7 +26,10 @@ describe('DiscussionCounter component', () => {
},
getters,
mutations,
- actions,
+ actions: {
+ ...actions,
+ setExpandDiscussions: setExpandDiscussionsFn,
+ },
});
store.dispatch('setNoteableData', {
...noteableDataMock,
@@ -84,7 +89,7 @@ describe('DiscussionCounter component', () => {
wrapper = shallowMount(DiscussionCounter, { store, localVue });
expect(wrapper.find(`.is-active`).exists()).toBe(isActive);
- expect(wrapper.findAll('[role="group"').length).toBe(groupLength);
+ expect(wrapper.findAll(GlButton)).toHaveLength(groupLength);
});
});
@@ -103,23 +108,22 @@ describe('DiscussionCounter component', () => {
it('calls button handler when clicked', () => {
updateStoreWithExpanded(true);
- wrapper.setMethods({ handleExpandDiscussions: jest.fn() });
- toggleAllButton.trigger('click');
+ toggleAllButton.vm.$emit('click');
- expect(wrapper.vm.handleExpandDiscussions).toHaveBeenCalledTimes(1);
+ expect(setExpandDiscussionsFn).toHaveBeenCalledTimes(1);
});
it('collapses all discussions if expanded', () => {
updateStoreWithExpanded(true);
expect(wrapper.vm.allExpanded).toBe(true);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-up');
+ expect(toggleAllButton.props('icon')).toBe('angle-up');
- toggleAllButton.trigger('click');
+ toggleAllButton.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.allExpanded).toBe(false);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-down');
+ expect(toggleAllButton.props('icon')).toBe('angle-down');
});
});
@@ -127,13 +131,13 @@ describe('DiscussionCounter component', () => {
updateStoreWithExpanded(false);
expect(wrapper.vm.allExpanded).toBe(false);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-down');
+ expect(toggleAllButton.props('icon')).toBe('angle-down');
- toggleAllButton.trigger('click');
+ toggleAllButton.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.allExpanded).toBe(true);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-up');
+ expect(toggleAllButton.props('icon')).toBe('angle-up');
});
});
});
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index 91ff796b9de..9f3655c53b9 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -25,6 +25,8 @@ describe('DiscussionFilter component', () => {
const filterDiscussion = jest.fn();
+ const findFilter = filterType => wrapper.find(`.dropdown-item[data-filter-type="${filterType}"]`);
+
const mountComponent = () => {
const discussions = [
{
@@ -74,22 +76,22 @@ describe('DiscussionFilter component', () => {
});
it('renders the all filters', () => {
- expect(wrapper.findAll('.dropdown-menu li').length).toBe(discussionFiltersMock.length);
+ expect(wrapper.findAll('.discussion-filter-container .dropdown-item').length).toBe(
+ discussionFiltersMock.length,
+ );
});
it('renders the default selected item', () => {
expect(
wrapper
- .find('#discussion-filter-dropdown')
+ .find('#discussion-filter-dropdown .dropdown-item')
.text()
.trim(),
).toBe(discussionFiltersMock[0].title);
});
it('updates to the selected item', () => {
- const filterItem = wrapper.find(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.HISTORY}"] button`,
- );
+ const filterItem = findFilter(DISCUSSION_FILTER_TYPES.ALL);
filterItem.trigger('click');
@@ -97,37 +99,37 @@ describe('DiscussionFilter component', () => {
});
it('only updates when selected filter changes', () => {
- wrapper
- .find(`.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"] button`)
- .trigger('click');
+ findFilter(DISCUSSION_FILTER_TYPES.ALL).trigger('click');
expect(filterDiscussion).not.toHaveBeenCalled();
});
+ it('disables timeline view if it was enabled', () => {
+ store.state.isTimelineEnabled = true;
+
+ findFilter(DISCUSSION_FILTER_TYPES.HISTORY).trigger('click');
+
+ expect(wrapper.vm.$store.state.isTimelineEnabled).toBe(false);
+ });
+
it('disables commenting when "Show history only" filter is applied', () => {
- const filterItem = wrapper.find(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.HISTORY}"] button`,
- );
- filterItem.trigger('click');
+ findFilter(DISCUSSION_FILTER_TYPES.HISTORY).trigger('click');
expect(wrapper.vm.$store.state.commentsDisabled).toBe(true);
});
it('enables commenting when "Show history only" filter is not applied', () => {
- const filterItem = wrapper.find(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"] button`,
- );
- filterItem.trigger('click');
+ findFilter(DISCUSSION_FILTER_TYPES.ALL).trigger('click');
expect(wrapper.vm.$store.state.commentsDisabled).toBe(false);
});
it('renders a dropdown divider for the default filter', () => {
const defaultFilter = wrapper.findAll(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"] > *`,
+ `.discussion-filter-container .dropdown-item-wrapper > *`,
);
- expect(defaultFilter.at(defaultFilter.length - 1).classes('dropdown-divider')).toBe(true);
+ expect(defaultFilter.at(1).classes('gl-new-dropdown-divider')).toBe(true);
});
describe('Merge request tabs', () => {
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index c6034639a4a..e905a12919e 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -174,6 +174,23 @@ describe('note_app', () => {
});
});
+ describe('timeline view', () => {
+ beforeEach(() => {
+ setFixtures('<div class="js-discussions-count"></div>');
+
+ axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
+ store.state.commentsDisabled = false;
+ store.state.isTimelineEnabled = true;
+
+ wrapper = mountComponent();
+ return waitForDiscussionsRequest();
+ });
+
+ it('should not render comments form', () => {
+ expect(wrapper.find('.js-main-target-form').exists()).toBe(false);
+ });
+ });
+
describe('while fetching data', () => {
beforeEach(() => {
setFixtures('<div class="js-discussions-count"></div>');
diff --git a/spec/frontend/notes/components/sort_discussion_spec.js b/spec/frontend/notes/components/sort_discussion_spec.js
index 575f1057db2..739e247735d 100644
--- a/spec/frontend/notes/components/sort_discussion_spec.js
+++ b/spec/frontend/notes/components/sort_discussion_spec.js
@@ -46,7 +46,7 @@ describe('Sort Discussion component', () => {
it('calls setDiscussionSortDirection when update is emitted', () => {
findLocalStorageSync().vm.$emit('input', ASC);
- expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', ASC);
+ expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', { direction: ASC });
});
});
@@ -55,9 +55,11 @@ describe('Sort Discussion component', () => {
it('calls the right actions', () => {
createComponent();
- wrapper.find('.js-newest-first').trigger('click');
+ wrapper.find('.js-newest-first').vm.$emit('click');
- expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', DESC);
+ expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', {
+ direction: DESC,
+ });
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'change_discussion_sort_direction', {
property: DESC,
});
@@ -67,7 +69,7 @@ describe('Sort Discussion component', () => {
it('shows the "Oldest First" as the dropdown', () => {
createComponent();
- expect(wrapper.find('.js-dropdown-text').text()).toBe('Oldest first');
+ expect(wrapper.find('.js-dropdown-text').props('text')).toBe('Oldest first');
});
});
@@ -79,21 +81,23 @@ describe('Sort Discussion component', () => {
describe('when the dropdown item is clicked', () => {
it('calls the right actions', () => {
- wrapper.find('.js-oldest-first').trigger('click');
+ wrapper.find('.js-oldest-first').vm.$emit('click');
- expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', ASC);
+ expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', {
+ direction: ASC,
+ });
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'change_discussion_sort_direction', {
property: ASC,
});
});
- it('applies the active class to the correct button in the dropdown', () => {
- expect(wrapper.find('.js-newest-first').classes()).toContain('is-active');
+ it('sets is-checked to true on the active button in the dropdown', () => {
+ expect(wrapper.find('.js-newest-first').props('isChecked')).toBe(true);
});
});
it('shows the "Newest First" as the dropdown', () => {
- expect(wrapper.find('.js-dropdown-text').text()).toBe('Newest first');
+ expect(wrapper.find('.js-dropdown-text').props('text')).toBe('Newest first');
});
});
});
diff --git a/spec/frontend/notes/components/timeline_toggle_spec.js b/spec/frontend/notes/components/timeline_toggle_spec.js
new file mode 100644
index 00000000000..b8df6fc7996
--- /dev/null
+++ b/spec/frontend/notes/components/timeline_toggle_spec.js
@@ -0,0 +1,117 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import Vuex from 'vuex';
+import TimelineToggle, {
+ timelineEnabledTooltip,
+ timelineDisabledTooltip,
+} from '~/notes/components/timeline_toggle.vue';
+import createStore from '~/notes/stores';
+import { ASC, DESC } from '~/notes/constants';
+import { trackToggleTimelineView } from '~/notes/utils';
+import Tracking from '~/tracking';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Timeline toggle', () => {
+ let wrapper;
+ let store;
+ const mockEvent = { currentTarget: { blur: jest.fn() } };
+
+ const createComponent = () => {
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ jest.spyOn(Tracking, 'event').mockImplementation();
+
+ wrapper = shallowMount(TimelineToggle, {
+ localVue,
+ store,
+ });
+ };
+
+ const findGlButton = () => wrapper.find(GlButton);
+
+ beforeEach(() => {
+ store = createStore();
+ createComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ store.dispatch.mockReset();
+ mockEvent.currentTarget.blur.mockReset();
+ Tracking.event.mockReset();
+ });
+
+ describe('ON state', () => {
+ it('should update timeline flag in the store', () => {
+ store.state.isTimelineEnabled = false;
+ findGlButton().vm.$emit('click', mockEvent);
+ expect(store.dispatch).toHaveBeenCalledWith('setTimelineView', true);
+ });
+
+ it('should set sort direction to DESC if not set', () => {
+ store.state.isTimelineEnabled = true;
+ store.state.sortDirection = ASC;
+ findGlButton().vm.$emit('click', mockEvent);
+ expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', {
+ direction: DESC,
+ persist: false,
+ });
+ });
+
+ it('should set correct UI state', async () => {
+ store.state.isTimelineEnabled = true;
+ findGlButton().vm.$emit('click', mockEvent);
+ await wrapper.vm.$nextTick();
+ expect(findGlButton().attributes('title')).toBe(timelineEnabledTooltip);
+ expect(findGlButton().attributes('selected')).toBe('true');
+ expect(mockEvent.currentTarget.blur).toHaveBeenCalled();
+ });
+
+ it('should track Snowplow event', async () => {
+ store.state.isTimelineEnabled = true;
+ await wrapper.vm.$nextTick();
+
+ findGlButton().trigger('click');
+
+ const { category, action, label, property, value } = trackToggleTimelineView(true);
+ expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property, value });
+ });
+ });
+
+ describe('OFF state', () => {
+ it('should update timeline flag in the store', () => {
+ store.state.isTimelineEnabled = true;
+ findGlButton().vm.$emit('click', mockEvent);
+ expect(store.dispatch).toHaveBeenCalledWith('setTimelineView', false);
+ });
+
+ it('should NOT update sort direction', () => {
+ store.state.isTimelineEnabled = false;
+ findGlButton().vm.$emit('click', mockEvent);
+ expect(store.dispatch).not.toHaveBeenCalledWith();
+ });
+
+ it('should set correct UI state', async () => {
+ store.state.isTimelineEnabled = false;
+ findGlButton().vm.$emit('click', mockEvent);
+ await wrapper.vm.$nextTick();
+ expect(findGlButton().attributes('title')).toBe(timelineDisabledTooltip);
+ expect(findGlButton().attributes('selected')).toBe(undefined);
+ expect(mockEvent.currentTarget.blur).toHaveBeenCalled();
+ });
+
+ it('should track Snowplow event', async () => {
+ store.state.isTimelineEnabled = false;
+ await wrapper.vm.$nextTick();
+
+ findGlButton().trigger('click');
+
+ const { category, action, label, property, value } = trackToggleTimelineView(false);
+ expect(Tracking.event).toHaveBeenCalledWith(category, action, { label, property, value });
+ });
+ });
+});
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 4681f3aa429..920959f41e7 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1144,9 +1144,14 @@ describe('Actions Notes Store', () => {
it('calls the correct mutation with the correct args', done => {
testAction(
actions.setDiscussionSortDirection,
- notesConstants.DESC,
+ { direction: notesConstants.DESC, persist: false },
{},
- [{ type: mutationTypes.SET_DISCUSSIONS_SORT, payload: notesConstants.DESC }],
+ [
+ {
+ type: mutationTypes.SET_DISCUSSIONS_SORT,
+ payload: { direction: notesConstants.DESC, persist: false },
+ },
+ ],
[],
done,
);
diff --git a/spec/frontend/notes/stores/getters_spec.js b/spec/frontend/notes/stores/getters_spec.js
index a07aa45d812..1a369caee49 100644
--- a/spec/frontend/notes/stores/getters_spec.js
+++ b/spec/frontend/notes/stores/getters_spec.js
@@ -6,6 +6,7 @@ import {
noteableDataMock,
individualNote,
collapseNotesMock,
+ discussionMock,
discussion1,
discussion2,
discussion3,
@@ -65,6 +66,18 @@ describe('Getters Notes Store', () => {
it('should return all discussions in the store', () => {
expect(getters.discussions(state)).toEqual([individualNote]);
});
+
+ it('should transform discussion to individual notes in timeline view', () => {
+ state.discussions = [discussionMock];
+ state.isTimelineEnabled = true;
+
+ expect(getters.discussions(state).length).toEqual(discussionMock.notes.length);
+ getters.discussions(state).forEach(discussion => {
+ expect(discussion.individual_note).toBe(true);
+ expect(discussion.id).toBe(discussion.notes[0].id);
+ expect(discussion.created_at).toBe(discussion.notes[0].created_at);
+ });
+ });
});
describe('resolvedDiscussionsById', () => {
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index b953bffc4fe..2618c3a53b8 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -680,9 +680,10 @@ describe('Notes Store mutations', () => {
});
it('sets sort order', () => {
- mutations.SET_DISCUSSIONS_SORT(state, DESC);
+ mutations.SET_DISCUSSIONS_SORT(state, { direction: DESC, persist: false });
expect(state.discussionSortOrder).toBe(DESC);
+ expect(state.persistSortOrder).toBe(false);
});
});
diff --git a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
index 4d9e0af1545..d317264bdae 100644
--- a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
+++ b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
@@ -2,151 +2,163 @@
exports[`PackageTitle renders with tags 1`] = `
<div
- class="gl-display-flex gl-justify-content-space-between gl-py-3"
+ class="gl-display-flex gl-flex-direction-column"
data-qa-selector="package_title"
>
<div
- class="gl-flex-direction-column"
+ class="gl-display-flex gl-justify-content-space-between gl-py-3"
>
<div
- class="gl-display-flex"
+ class="gl-flex-direction-column"
>
- <!---->
-
<div
- class="gl-display-flex gl-flex-direction-column"
+ class="gl-display-flex"
>
- <h1
- class="gl-font-size-h1 gl-mt-3 gl-mb-2"
- data-testid="title"
- >
- Test package
- </h1>
+ <!---->
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ class="gl-display-flex gl-flex-direction-column"
>
- <gl-icon-stub
- class="gl-mr-3"
- name="eye"
- size="16"
- />
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ data-testid="title"
+ >
+ Test package
+ </h1>
- <gl-sprintf-stub
- message="v%{version} published %{timeAgo}"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <gl-sprintf-stub
+ message="v%{version} published %{timeAgo}"
+ />
+ </div>
</div>
</div>
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
- >
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-type"
- icon="package"
- link=""
- size="s"
- text="maven"
- />
- </div>
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-size"
- icon="disk"
- link=""
- size="s"
- text="300 bytes"
- />
- </div>
+
<div
- class="gl-display-flex gl-align-items-center gl-mr-5"
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
>
- <package-tags-stub
- hidelabel="true"
- tagdisplaylimit="2"
- tags="[object Object],[object Object],[object Object],[object Object]"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-type"
+ icon="package"
+ link=""
+ size="s"
+ text="maven"
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-size"
+ icon="disk"
+ link=""
+ size="s"
+ text="300 bytes"
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <package-tags-stub
+ hidelabel="true"
+ tagdisplaylimit="2"
+ tags="[object Object],[object Object],[object Object],[object Object]"
+ />
+ </div>
</div>
</div>
+
+ <!---->
</div>
- <!---->
+ <p />
</div>
`;
exports[`PackageTitle renders without tags 1`] = `
<div
- class="gl-display-flex gl-justify-content-space-between gl-py-3"
+ class="gl-display-flex gl-flex-direction-column"
data-qa-selector="package_title"
>
<div
- class="gl-flex-direction-column"
+ class="gl-display-flex gl-justify-content-space-between gl-py-3"
>
<div
- class="gl-display-flex"
+ class="gl-flex-direction-column"
>
- <!---->
-
<div
- class="gl-display-flex gl-flex-direction-column"
+ class="gl-display-flex"
>
- <h1
- class="gl-font-size-h1 gl-mt-3 gl-mb-2"
- data-testid="title"
- >
- Test package
- </h1>
+ <!---->
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ class="gl-display-flex gl-flex-direction-column"
>
- <gl-icon-stub
- class="gl-mr-3"
- name="eye"
- size="16"
- />
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ data-testid="title"
+ >
+ Test package
+ </h1>
- <gl-sprintf-stub
- message="v%{version} published %{timeAgo}"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <gl-sprintf-stub
+ message="v%{version} published %{timeAgo}"
+ />
+ </div>
</div>
</div>
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
- >
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-type"
- icon="package"
- link=""
- size="s"
- text="maven"
- />
- </div>
+
<div
- class="gl-display-flex gl-align-items-center gl-mr-5"
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
>
- <metadata-item-stub
- data-testid="package-size"
- icon="disk"
- link=""
- size="s"
- text="300 bytes"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-type"
+ icon="package"
+ link=""
+ size="s"
+ text="maven"
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-size"
+ icon="disk"
+ link=""
+ size="s"
+ text="300 bytes"
+ />
+ </div>
</div>
</div>
+
+ <!---->
</div>
- <!---->
+ <p />
</div>
`;
diff --git a/spec/frontend/packages/details/components/composer_installation_spec.js b/spec/frontend/packages/details/components/composer_installation_spec.js
index c13981fbb87..b44609e8ae7 100644
--- a/spec/frontend/packages/details/components/composer_installation_spec.js
+++ b/spec/frontend/packages/details/components/composer_installation_spec.js
@@ -12,21 +12,23 @@ localVue.use(Vuex);
describe('ComposerInstallation', () => {
let wrapper;
+ let store;
const composerRegistryIncludeStr = 'foo/registry';
const composerPackageIncludeStr = 'foo/package';
- const store = new Vuex.Store({
- state: {
- packageEntity,
- composerHelpPath,
- },
- getters: {
- composerRegistryInclude: () => composerRegistryIncludeStr,
- composerPackageInclude: () => composerPackageIncludeStr,
- },
- });
+ const createStore = (groupExists = true) => {
+ store = new Vuex.Store({
+ state: { packageEntity, composerHelpPath },
+ getters: {
+ composerRegistryInclude: () => composerRegistryIncludeStr,
+ composerPackageInclude: () => composerPackageIncludeStr,
+ groupExists: () => groupExists,
+ },
+ });
+ };
+ const findRootNode = () => wrapper.find('[data-testid="root-node"]');
const findRegistryInclude = () => wrapper.find('[data-testid="registry-include"]');
const findPackageInclude = () => wrapper.find('[data-testid="package-include"]');
const findHelpText = () => wrapper.find('[data-testid="help-text"]');
@@ -42,15 +44,16 @@ describe('ComposerInstallation', () => {
});
}
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
describe('registry include command', () => {
+ beforeEach(() => {
+ createStore();
+ createComponent();
+ });
+
it('uses code_instructions', () => {
const registryIncludeCommand = findRegistryInclude();
expect(registryIncludeCommand.exists()).toBe(true);
@@ -62,11 +65,16 @@ describe('ComposerInstallation', () => {
});
it('has the correct title', () => {
- expect(findRegistryInclude().props('label')).toBe('composer.json registry include');
+ expect(findRegistryInclude().props('label')).toBe('Add composer registry');
});
});
describe('package include command', () => {
+ beforeEach(() => {
+ createStore();
+ createComponent();
+ });
+
it('uses code_instructions', () => {
const registryIncludeCommand = findPackageInclude();
expect(registryIncludeCommand.exists()).toBe(true);
@@ -78,7 +86,7 @@ describe('ComposerInstallation', () => {
});
it('has the correct title', () => {
- expect(findPackageInclude().props('label')).toBe('composer.json require package include');
+ expect(findPackageInclude().props('label')).toBe('Install package version');
});
it('has the correct help text', () => {
@@ -91,4 +99,20 @@ describe('ComposerInstallation', () => {
});
});
});
+
+ describe('root node', () => {
+ it('is normally rendered', () => {
+ createStore();
+ createComponent();
+
+ expect(findRootNode().exists()).toBe(true);
+ });
+
+ it('is not rendered when the group does not exist', () => {
+ createStore(false);
+ createComponent();
+
+ expect(findRootNode().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
index 0e95ee4cfd3..b8c2138e7f5 100644
--- a/spec/frontend/packages/details/store/getters_spec.js
+++ b/spec/frontend/packages/details/store/getters_spec.js
@@ -15,6 +15,7 @@ import {
pypiSetupCommand,
composerRegistryInclude,
composerPackageInclude,
+ groupExists,
} from '~/packages/details/store/getters';
import {
conanPackage,
@@ -31,7 +32,6 @@ import {
registryUrl,
pypiSetupCommandStr,
} from '../mock_data';
-import { generateConanRecipe } from '~/packages/details/utils';
import { NpmManager } from '~/packages/details/constants';
describe('Getters PackageDetails Store', () => {
@@ -53,8 +53,7 @@ describe('Getters PackageDetails Store', () => {
};
};
- const recipe = generateConanRecipe(conanPackage);
- const conanInstallationCommandStr = `conan install ${recipe} --remote=gitlab`;
+ const conanInstallationCommandStr = `conan install ${conanPackage.name} --remote=gitlab`;
const conanSetupCommandStr = `conan remote add gitlab ${registryUrl}`;
const mavenCommandStr = generateMavenCommand(packageWithoutBuildInfo.maven_metadatum);
@@ -69,11 +68,12 @@ describe('Getters PackageDetails Store', () => {
const nugetInstallationCommandStr = `nuget install ${nugetPackage.name} -Source "GitLab"`;
const nugetSetupCommandStr = `nuget source Add -Name "GitLab" -Source "${registryUrl}" -UserName <your_username> -Password <your_token>`;
- const pypiPipCommandStr = `pip install ${pypiPackage.name} --index-url ${registryUrl}`;
- const composerRegistryIncludeStr = '{"type":"composer","url":"foo"}';
- const composerPackageIncludeStr = JSON.stringify({
- [packageWithoutBuildInfo.name]: packageWithoutBuildInfo.version,
- });
+ const pypiPipCommandStr = `pip install ${pypiPackage.name} --extra-index-url ${registryUrl}`;
+ const composerRegistryIncludeStr =
+ 'composer config repositories.gitlab.com/123 \'{"type": "composer", "url": "foo"}\'';
+ const composerPackageIncludeStr = `composer req ${[packageWithoutBuildInfo.name]}:${
+ packageWithoutBuildInfo.version
+ }`;
describe('packagePipeline', () => {
it('should return the pipeline info when pipeline exists', () => {
@@ -101,7 +101,7 @@ describe('Getters PackageDetails Store', () => {
${packageWithoutBuildInfo} | ${'Maven'}
${npmPackage} | ${'NPM'}
${nugetPackage} | ${'NuGet'}
- ${pypiPackage} | ${'PyPi'}
+ ${pypiPackage} | ${'PyPI'}
`(`package type`, ({ packageEntity, expectedResult }) => {
beforeEach(() => setupState({ packageEntity }));
@@ -223,7 +223,7 @@ describe('Getters PackageDetails Store', () => {
describe('composer string getters', () => {
it('gets the correct composerRegistryInclude command', () => {
- setupState({ composerPath: 'foo' });
+ setupState({ composerPath: 'foo', composerConfigRepositoryName: 'gitlab.com/123' });
expect(composerRegistryInclude(state)).toBe(composerRegistryIncludeStr);
});
@@ -234,4 +234,18 @@ describe('Getters PackageDetails Store', () => {
expect(composerPackageInclude(state)).toBe(composerPackageIncludeStr);
});
});
+
+ describe('check if group', () => {
+ it('is set', () => {
+ setupState({ groupListUrl: '/groups/composer/-/packages' });
+
+ expect(groupExists(state)).toBe(true);
+ });
+
+ it('is not set', () => {
+ setupState({ groupListUrl: '' });
+
+ expect(groupExists(state)).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/packages/details/utils_spec.js b/spec/frontend/packages/details/utils_spec.js
deleted file mode 100644
index 087888016ee..00000000000
--- a/spec/frontend/packages/details/utils_spec.js
+++ /dev/null
@@ -1,24 +0,0 @@
-import { generateConanRecipe } from '~/packages/details/utils';
-import { conanPackage } from '../mock_data';
-
-describe('Package detail utils', () => {
- describe('generateConanRecipe', () => {
- it('correctly generates the conan recipe', () => {
- const recipe = generateConanRecipe(conanPackage);
-
- expect(recipe).toEqual(conanPackage.recipe);
- });
-
- it('returns an empty recipe when no information is supplied', () => {
- const recipe = generateConanRecipe({});
-
- expect(recipe).toEqual('/@/');
- });
-
- it('recipe returns empty strings for missing metadata', () => {
- const recipe = generateConanRecipe({ name: 'foo', version: '0.0.1' });
-
- expect(recipe).toBe('foo/0.0.1@/');
- });
- });
-});
diff --git a/spec/frontend/packages/list/coming_soon/helpers_spec.js b/spec/frontend/packages/list/coming_soon/helpers_spec.js
deleted file mode 100644
index 4a996bfad76..00000000000
--- a/spec/frontend/packages/list/coming_soon/helpers_spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-import * as comingSoon from '~/packages/list/coming_soon/helpers';
-import { fakeIssues, asGraphQLResponse, asViewModel } from './mock_data';
-
-jest.mock('~/api.js');
-
-describe('Coming Soon Helpers', () => {
- const [noLabels, acceptingMergeRequestLabel, workflowLabel] = fakeIssues;
-
- describe('toViewModel', () => {
- it('formats a GraphQL response correctly', () => {
- expect(comingSoon.toViewModel(asGraphQLResponse)).toEqual(asViewModel);
- });
- });
-
- describe('findWorkflowLabel', () => {
- it('finds a workflow label', () => {
- expect(comingSoon.findWorkflowLabel(workflowLabel.labels)).toEqual(workflowLabel.labels[0]);
- });
-
- it("returns undefined when there isn't one", () => {
- expect(comingSoon.findWorkflowLabel(noLabels.labels)).toBeUndefined();
- });
- });
-
- describe('findAcceptingContributionsLabel', () => {
- it('finds the correct label when it exists', () => {
- expect(comingSoon.findAcceptingContributionsLabel(acceptingMergeRequestLabel.labels)).toEqual(
- acceptingMergeRequestLabel.labels[0],
- );
- });
-
- it("returns undefined when there isn't one", () => {
- expect(comingSoon.findAcceptingContributionsLabel(noLabels.labels)).toBeUndefined();
- });
- });
-});
diff --git a/spec/frontend/packages/list/coming_soon/mock_data.js b/spec/frontend/packages/list/coming_soon/mock_data.js
deleted file mode 100644
index bb4568e4bd5..00000000000
--- a/spec/frontend/packages/list/coming_soon/mock_data.js
+++ /dev/null
@@ -1,90 +0,0 @@
-export const fakeIssues = [
- {
- id: 1,
- iid: 1,
- title: 'issue one',
- webUrl: 'foo',
- },
- {
- id: 2,
- iid: 2,
- title: 'issue two',
- labels: [{ title: 'Accepting merge requests', color: '#69d100' }],
- milestone: {
- title: '12.10',
- },
- webUrl: 'foo',
- },
- {
- id: 3,
- iid: 3,
- title: 'issue three',
- labels: [{ title: 'workflow::In dev', color: '#428bca' }],
- webUrl: 'foo',
- },
- {
- id: 4,
- iid: 4,
- title: 'issue four',
- labels: [
- { title: 'Accepting merge requests', color: '#69d100' },
- { title: 'workflow::In dev', color: '#428bca' },
- ],
- webUrl: 'foo',
- },
-];
-
-export const asGraphQLResponse = {
- project: {
- issues: {
- nodes: fakeIssues.map(x => ({
- ...x,
- labels: {
- nodes: x.labels,
- },
- })),
- },
- },
-};
-
-export const asViewModel = [
- {
- ...fakeIssues[0],
- labels: [],
- },
- {
- ...fakeIssues[1],
- labels: [
- {
- title: 'Accepting merge requests',
- color: '#69d100',
- scoped: false,
- },
- ],
- },
- {
- ...fakeIssues[2],
- labels: [
- {
- title: 'workflow::In dev',
- color: '#428bca',
- scoped: true,
- },
- ],
- },
- {
- ...fakeIssues[3],
- labels: [
- {
- title: 'workflow::In dev',
- color: '#428bca',
- scoped: true,
- },
- {
- title: 'Accepting merge requests',
- color: '#69d100',
- scoped: false,
- },
- ],
- },
-];
diff --git a/spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js b/spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js
deleted file mode 100644
index c4cdadc45e6..00000000000
--- a/spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js
+++ /dev/null
@@ -1,138 +0,0 @@
-import { GlEmptyState, GlSkeletonLoader, GlLabel } from '@gitlab/ui';
-import { mount, createLocalVue } from '@vue/test-utils';
-import VueApollo, { ApolloQuery } from 'vue-apollo';
-import ComingSoon from '~/packages/list/coming_soon/packages_coming_soon.vue';
-import { TrackingActions } from '~/packages/shared/constants';
-import { asViewModel } from './mock_data';
-import Tracking from '~/tracking';
-
-jest.mock('~/packages/list/coming_soon/helpers.js');
-
-const localVue = createLocalVue();
-localVue.use(VueApollo);
-
-describe('packages_coming_soon', () => {
- let wrapper;
-
- const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
- const findAllIssues = () => wrapper.findAll('[data-testid="issue-row"]');
- const findIssuesData = () =>
- findAllIssues().wrappers.map(x => {
- const titleLink = x.find('[data-testid="issue-title-link"]');
- const milestone = x.find('[data-testid="milestone"]');
- const issueIdLink = x.find('[data-testid="issue-id-link"]');
- const labels = x.findAll(GlLabel);
-
- const issueId = Number(issueIdLink.text().substr(1));
-
- return {
- id: issueId,
- iid: issueId,
- title: titleLink.text(),
- webUrl: titleLink.attributes('href'),
- labels: labels.wrappers.map(label => ({
- color: label.props('backgroundColor'),
- title: label.props('title'),
- scoped: label.props('scoped'),
- })),
- ...(milestone.exists() ? { milestone: { title: milestone.text() } } : {}),
- };
- });
- const findIssueTitleLink = () => wrapper.find('[data-testid="issue-title-link"]');
- const findIssueIdLink = () => wrapper.find('[data-testid="issue-id-link"]');
- const findEmptyState = () => wrapper.find(GlEmptyState);
-
- const mountComponent = (testParams = {}) => {
- const $apolloData = {
- loading: testParams.isLoading || false,
- };
-
- wrapper = mount(ComingSoon, {
- localVue,
- propsData: {
- illustration: 'foo',
- projectPath: 'foo',
- suggestedContributionsPath: 'foo',
- },
- stubs: {
- ApolloQuery,
- GlLink: true,
- },
- mocks: {
- $apolloData,
- },
- });
-
- // Mock the GraphQL query result
- wrapper.find(ApolloQuery).setData({
- result: {
- data: testParams.issues || asViewModel,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('when loading', () => {
- beforeEach(() => mountComponent({ isLoading: true }));
-
- it('renders the skeleton loader', () => {
- expect(findSkeletonLoader().exists()).toBe(true);
- });
- });
-
- describe('when there are no issues', () => {
- beforeEach(() => mountComponent({ issues: [] }));
-
- it('renders the empty state', () => {
- expect(findEmptyState().exists()).toBe(true);
- });
- });
-
- describe('when there are issues', () => {
- beforeEach(() => mountComponent());
-
- it('renders each issue', () => {
- expect(findIssuesData()).toEqual(asViewModel);
- });
- });
-
- describe('tracking', () => {
- const firstIssue = asViewModel[0];
- let eventSpy;
-
- beforeEach(() => {
- eventSpy = jest.spyOn(Tracking, 'event');
- mountComponent();
- });
-
- it('tracks when mounted', () => {
- expect(eventSpy).toHaveBeenCalledWith(undefined, TrackingActions.COMING_SOON_REQUESTED, {});
- });
-
- it('tracks when an issue title link is clicked', () => {
- eventSpy.mockClear();
-
- findIssueTitleLink().vm.$emit('click');
-
- expect(eventSpy).toHaveBeenCalledWith(undefined, TrackingActions.COMING_SOON_LIST, {
- label: firstIssue.title,
- value: firstIssue.iid,
- });
- });
-
- it('tracks when an issue id link is clicked', () => {
- eventSpy.mockClear();
-
- findIssueIdLink().vm.$emit('click');
-
- expect(eventSpy).toHaveBeenCalledWith(undefined, TrackingActions.COMING_SOON_LIST, {
- label: firstIssue.title,
- value: firstIssue.iid,
- });
- });
- });
-});
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
index 6ff9376565a..ce3a58c856d 100644
--- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -1,457 +1,461 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`packages_list_app renders 1`] = `
-<b-tabs-stub
- activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
- class="gl-tabs"
- contentclass=",gl-tab-content"
- navclass="gl-tabs-nav"
- nofade="true"
- nonavstyle="true"
- tag="div"
->
- <template>
-
- <b-tab-stub
- tag="div"
- title="All"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+<div>
+ <package-title-stub
+ packagehelpurl="foo"
+ />
+
+ <b-tabs-stub
+ activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
+ class="gl-tabs"
+ contentclass=",gl-tab-content"
+ navclass="gl-tabs-nav"
+ nofade="true"
+ nonavstyle="true"
+ tag="div"
+ >
+ <template>
+
+ <b-tab-stub
+ tag="div"
+ title="All"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Composer"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Composer"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no Composer packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Composer packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no Composer packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no Composer packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Conan"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Conan"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no Conan packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Conan packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no Conan packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no Conan packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Maven"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Maven"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no Maven packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Maven packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no Maven packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no Maven packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="NPM"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="NPM"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no NPM packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no NPM packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no NPM packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no NPM packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="NuGet"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="NuGet"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no NuGet packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no NuGet packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no NuGet packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no NuGet packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="PyPi"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="PyPI"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no PyPi packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no PyPI packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no PyPi packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no PyPI packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
-
- <!---->
- </template>
- <template>
- <div
- class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
- >
- <package-filter-stub
- class="mr-1"
- />
-
- <package-sort-stub />
- </div>
- </template>
-</b-tabs-stub>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ </template>
+ <template>
+ <div
+ class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
+ >
+ <package-filter-stub
+ class="gl-mr-2"
+ />
+
+ <package-sort-stub />
+ </div>
+ </template>
+ </b-tabs-stub>
+</div>
`;
diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js
index 19ff4290f50..217096f822a 100644
--- a/spec/frontend/packages/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages/list/components/packages_list_app_spec.js
@@ -36,6 +36,7 @@ describe('packages_list_app', () => {
resourceId: 'project_id',
emptyListIllustration: 'helpSvg',
emptyListHelpUrl,
+ packageHelpUrl: 'foo',
},
filterQuery,
},
diff --git a/spec/frontend/packages/list/components/packages_title_spec.js b/spec/frontend/packages/list/components/packages_title_spec.js
new file mode 100644
index 00000000000..5e9ebd8ecb0
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_title_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import PackageTitle from '~/packages/list/components/package_title.vue';
+import TitleArea from '~/vue_shared/components/registry/title_area.vue';
+import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue';
+import { LIST_INTRO_TEXT, LIST_TITLE_TEXT } from '~/packages/list//constants';
+
+describe('PackageTitle', () => {
+ let wrapper;
+ let store;
+
+ const findTitleArea = () => wrapper.find(TitleArea);
+ const findMetadataItem = () => wrapper.find(MetadataItem);
+
+ const mountComponent = (propsData = { packageHelpUrl: 'foo' }) => {
+ wrapper = shallowMount(PackageTitle, {
+ store,
+ propsData,
+ stubs: {
+ TitleArea,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('title area', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findTitleArea().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ mountComponent();
+
+ expect(findTitleArea().props()).toMatchObject({
+ title: LIST_TITLE_TEXT,
+ infoMessages: [{ text: LIST_INTRO_TEXT, link: 'foo' }],
+ });
+ });
+ });
+
+ describe.each`
+ packagesCount | exist | text
+ ${null} | ${false} | ${''}
+ ${undefined} | ${false} | ${''}
+ ${0} | ${true} | ${'0 Packages'}
+ ${1} | ${true} | ${'1 Package'}
+ ${2} | ${true} | ${'2 Packages'}
+ `('when packagesCount is $packagesCount metadata item', ({ packagesCount, exist, text }) => {
+ beforeEach(() => {
+ mountComponent({ packagesCount, packageHelpUrl: 'foo' });
+ });
+
+ it(`is ${exist} that it exists`, () => {
+ expect(findMetadataItem().exists()).toBe(exist);
+ });
+
+ if (exist) {
+ it('has the correct props', () => {
+ expect(findMetadataItem().props()).toMatchObject({
+ icon: 'package',
+ text,
+ });
+ });
+ }
+ });
+});
diff --git a/spec/frontend/packages/list/stores/mutations_spec.js b/spec/frontend/packages/list/stores/mutations_spec.js
index 563a3dabbb3..0d424a0c011 100644
--- a/spec/frontend/packages/list/stores/mutations_spec.js
+++ b/spec/frontend/packages/list/stores/mutations_spec.js
@@ -18,7 +18,6 @@ describe('Mutations Registry Store', () => {
userCanDelete: '',
emptyListIllustration: 'foo',
emptyListHelpUrl: 'baz',
- comingSoonJson: '{ "project_path": "gitlab-org/gitlab-test" }',
};
const expectedState = {
diff --git a/spec/frontend/packages/mock_data.js b/spec/frontend/packages/mock_data.js
index b95d06428ff..d7494bf85d0 100644
--- a/spec/frontend/packages/mock_data.js
+++ b/spec/frontend/packages/mock_data.js
@@ -84,15 +84,15 @@ export const conanPackage = {
package_channel: 'stable',
package_username: 'conan+conan-package',
},
+ conan_package_name: 'conan-package',
created_at: '2015-12-10',
id: 3,
- name: 'conan-package',
+ name: 'conan-package/1.0.0@conan+conan-package/stable',
project_path: 'foo/bar/baz',
projectPathName: 'foo/bar/baz',
package_files: [],
package_type: 'conan',
project_id: 1,
- recipe: 'conan-package/1.0.0@conan+conan-package/stable',
updated_at: '2015-12-10',
version: '1.0.0',
_links,
diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
index 6aaefed92d0..5faae5690db 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
@@ -52,27 +52,6 @@ exports[`packages_list_row renders 1`] = `
<!---->
<div
- class="gl-display-flex gl-align-items-center"
- >
- <gl-icon-stub
- class="gl-ml-3 gl-mr-2 gl-min-w-0"
- name="review-list"
- size="16"
- />
-
- <gl-link-stub
- class="gl-text-body gl-min-w-0"
- data-testid="packages-row-project"
- href="/foo/bar/baz"
- >
- <gl-truncate-stub
- position="end"
- text="foo/bar/baz"
- />
- </gl-link-stub>
- </div>
-
- <div
class="d-flex align-items-center"
data-testid="package-type"
>
@@ -86,6 +65,10 @@ exports[`packages_list_row renders 1`] = `
Maven
</span>
</div>
+
+ <package-path-stub
+ path="foo/bar/baz"
+ />
</div>
</div>
</div>
@@ -118,6 +101,7 @@ exports[`packages_list_row renders 1`] = `
>
<gl-button-stub
aria-label="Remove package"
+ buttontextclasses=""
category="primary"
data-testid="action-delete"
icon="remove"
diff --git a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
index 9a0c52cee47..acdf7c49ebd 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
@@ -32,7 +32,8 @@ exports[`publish_method renders 1`] = `
</gl-link-stub>
<clipboard-button-stub
- cssclass="gl-border-0 gl-py-0 gl-px-2"
+ category="tertiary"
+ size="small"
text="sha-baz"
title="Copy commit SHA"
tooltipplacement="top"
diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages/shared/components/package_list_row_spec.js
index f4eabf7bb67..0d0ea4e2122 100644
--- a/spec/frontend/packages/shared/components/package_list_row_spec.js
+++ b/spec/frontend/packages/shared/components/package_list_row_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
import PackageTags from '~/packages/shared/components/package_tags.vue';
+import PackagePath from '~/packages/shared/components/package_path.vue';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
import { packageList } from '../../mock_data';
@@ -11,7 +12,7 @@ describe('packages_list_row', () => {
const [packageWithoutTags, packageWithTags] = packageList;
const findPackageTags = () => wrapper.find(PackageTags);
- const findProjectLink = () => wrapper.find('[data-testid="packages-row-project"]');
+ const findPackagePath = () => wrapper.find(PackagePath);
const findDeleteButton = () => wrapper.find('[data-testid="action-delete"]');
const findPackageType = () => wrapper.find('[data-testid="package-type"]');
@@ -63,8 +64,9 @@ describe('packages_list_row', () => {
mountComponent({ isGroup: true });
});
- it('has project field', () => {
- expect(findProjectLink().exists()).toBe(true);
+ it('has a package path component', () => {
+ expect(findPackagePath().exists()).toBe(true);
+ expect(findPackagePath().props()).toMatchObject({ path: 'foo/bar/baz' });
});
});
diff --git a/spec/frontend/packages/shared/components/package_path_spec.js b/spec/frontend/packages/shared/components/package_path_spec.js
new file mode 100644
index 00000000000..40d455ac77c
--- /dev/null
+++ b/spec/frontend/packages/shared/components/package_path_spec.js
@@ -0,0 +1,86 @@
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import PackagePath from '~/packages/shared/components/package_path.vue';
+
+describe('PackagePath', () => {
+ let wrapper;
+
+ const mountComponent = (propsData = { path: 'foo' }) => {
+ wrapper = shallowMount(PackagePath, {
+ propsData,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const BASE_ICON = 'base-icon';
+ const ROOT_LINK = 'root-link';
+ const ROOT_CHEVRON = 'root-chevron';
+ const ELLIPSIS_ICON = 'ellipsis-icon';
+ const ELLIPSIS_CHEVRON = 'ellipsis-chevron';
+ const LEAF_LINK = 'leaf-link';
+
+ const findItem = name => wrapper.find(`[data-testid="${name}"]`);
+ const findTooltip = w => getBinding(w.element, 'gl-tooltip');
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe.each`
+ path | rootUrl | shouldExist | shouldNotExist
+ ${'foo/bar'} | ${'/foo/bar'} | ${[]} | ${[ROOT_CHEVRON, ELLIPSIS_ICON, ELLIPSIS_CHEVRON, LEAF_LINK]}
+ ${'foo/bar/baz'} | ${'/foo/bar'} | ${[ROOT_CHEVRON, LEAF_LINK]} | ${[ELLIPSIS_ICON, ELLIPSIS_CHEVRON]}
+ ${'foo/bar/baz/baz2'} | ${'/foo/bar'} | ${[ROOT_CHEVRON, LEAF_LINK, ELLIPSIS_ICON, ELLIPSIS_CHEVRON]} | ${[]}
+ ${'foo/bar/baz/baz2/bar2'} | ${'/foo/bar'} | ${[ROOT_CHEVRON, LEAF_LINK, ELLIPSIS_ICON, ELLIPSIS_CHEVRON]} | ${[]}
+ `('given path $path', ({ path, shouldExist, shouldNotExist, rootUrl }) => {
+ const pathPieces = path.split('/').slice(1);
+ const hasTooltip = shouldExist.includes(ELLIPSIS_ICON);
+
+ beforeEach(() => {
+ mountComponent({ path });
+ });
+
+ it('should have a base icon', () => {
+ expect(findItem(BASE_ICON).exists()).toBe(true);
+ });
+
+ it('should have a root link', () => {
+ const root = findItem(ROOT_LINK);
+ expect(root.exists()).toBe(true);
+ expect(root.attributes('href')).toBe(rootUrl);
+ });
+
+ if (hasTooltip) {
+ it('should have a tooltip', () => {
+ const tooltip = findTooltip(findItem(ELLIPSIS_ICON));
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toMatchObject({
+ title: path,
+ });
+ });
+ }
+
+ if (shouldExist.length) {
+ it.each(shouldExist)(`should have %s`, element => {
+ expect(findItem(element).exists()).toBe(true);
+ });
+ }
+
+ if (shouldNotExist.length) {
+ it.each(shouldNotExist)(`should not have %s`, element => {
+ expect(findItem(element).exists()).toBe(false);
+ });
+ }
+
+ if (shouldExist.includes(LEAF_LINK)) {
+ it('the last link should be the last piece of the path', () => {
+ const leaf = findItem(LEAF_LINK);
+ expect(leaf.attributes('href')).toBe(`/${path}`);
+ expect(leaf.text()).toBe(pathPieces[pathPieces.length - 1]);
+ });
+ }
+ });
+});
diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js
index 1fe90a4827f..3e4ce8eb323 100644
--- a/spec/frontend/packages/shared/utils_spec.js
+++ b/spec/frontend/packages/shared/utils_spec.js
@@ -37,7 +37,7 @@ describe('Packages shared utils', () => {
${'maven'} | ${'Maven'}
${'npm'} | ${'NPM'}
${'nuget'} | ${'NuGet'}
- ${'pypi'} | ${'PyPi'}
+ ${'pypi'} | ${'PyPI'}
${'composer'} | ${'Composer'}
${'foo'} | ${null}
`(`package type`, ({ packageType, expectedResult }) => {
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index 2fbc700d4f5..ddeaa2a79db 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -39,6 +39,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
/>
</form>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
size="medium"
@@ -48,6 +49,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
</gl-button-stub>
<gl-button-stub
+ buttontextclasses=""
category="primary"
disabled="true"
icon=""
@@ -60,6 +62,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
</gl-button-stub>
<gl-button-stub
+ buttontextclasses=""
category="primary"
disabled="true"
icon=""
diff --git a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
index 211f4ea20f5..8ccad7d5c22 100644
--- a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
+++ b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
@@ -9,65 +9,54 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
<!---->
- <gl-deprecated-dropdown-stub
+ <gl-dropdown-stub
+ category="tertiary"
+ headertext=""
+ size="medium"
text="rspec"
+ variant="default"
>
- <gl-deprecated-dropdown-item-stub
+ <gl-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischecked="true"
+ ischeckitem="true"
+ secondarytext=""
value="rspec"
>
- <div
- class="gl-display-flex"
- >
- <gl-icon-stub
- class="gl-absolute"
- name="mobile-issue-close"
- size="16"
- />
-
- <span
- class="gl-display-flex align-items-center ml-4"
- >
-
- rspec
-
- </span>
- </div>
- </gl-deprecated-dropdown-item-stub>
- <gl-deprecated-dropdown-item-stub
+
+ rspec
+
+ </gl-dropdown-item-stub>
+ <gl-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischeckitem="true"
+ secondarytext=""
value="cypress"
>
- <div
- class="gl-display-flex"
- >
- <!---->
-
- <span
- class="gl-display-flex align-items-center ml-4"
- >
-
- cypress
-
- </span>
- </div>
- </gl-deprecated-dropdown-item-stub>
- <gl-deprecated-dropdown-item-stub
+
+ cypress
+
+ </gl-dropdown-item-stub>
+ <gl-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischeckitem="true"
+ secondarytext=""
value="karma"
>
- <div
- class="gl-display-flex"
- >
- <!---->
-
- <span
- class="gl-display-flex align-items-center ml-4"
- >
-
- karma
-
- </span>
- </div>
- </gl-deprecated-dropdown-item-stub>
- </gl-deprecated-dropdown-stub>
+
+ karma
+
+ </gl-dropdown-item-stub>
+ </gl-dropdown-stub>
</div>
<gl-area-chart-stub
diff --git a/spec/frontend/pages/projects/graphs/code_coverage_spec.js b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
index 8884f7815ab..4a60c7fd509 100644
--- a/spec/frontend/pages/projects/graphs/code_coverage_spec.js
+++ b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
-import { GlAlert, GlIcon, GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { GlAreaChart } from '@gitlab/ui/dist/charts';
import waitForPromises from 'helpers/wait_for_promises';
@@ -17,7 +17,7 @@ describe('Code Coverage', () => {
const findAlert = () => wrapper.find(GlAlert);
const findAreaChart = () => wrapper.find(GlAreaChart);
- const findAllDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
+ const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
const findFirstDropdownItem = () => findAllDropdownItems().at(0);
const findSecondDropdownItem = () => findAllDropdownItems().at(1);
@@ -124,7 +124,7 @@ describe('Code Coverage', () => {
});
it('renders the dropdown with all custom names as options', () => {
- expect(wrapper.find(GlDeprecatedDropdown).exists()).toBeDefined();
+ expect(wrapper.find(GlDropdown).exists()).toBeDefined();
expect(findAllDropdownItems()).toHaveLength(codeCoverageMockData.length);
expect(findFirstDropdownItem().text()).toBe(codeCoverageMockData[0].group_name);
});
@@ -145,16 +145,8 @@ describe('Code Coverage', () => {
await wrapper.vm.$nextTick();
- expect(
- findFirstDropdownItem()
- .find(GlIcon)
- .exists(),
- ).toBe(false);
- expect(
- findSecondDropdownItem()
- .find(GlIcon)
- .exists(),
- ).toBe(true);
+ expect(findFirstDropdownItem().attributes('ischecked')).toBeFalsy();
+ expect(findSecondDropdownItem().attributes('ischecked')).toBeTruthy();
});
it('updates the graph data when selecting a different option in dropdown', async () => {
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
index 5a61f9fca69..5da998d9d2d 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
@@ -1,23 +1,18 @@
import Vue from 'vue';
import Cookies from 'js-cookie';
import PipelineSchedulesCallout from '~/pages/projects/pipeline_schedules/shared/components/pipeline_schedules_callout.vue';
-import '~/pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg';
-
-jest.mock(
- '~/pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg',
- () => '<svg></svg>',
-);
const PipelineSchedulesCalloutComponent = Vue.extend(PipelineSchedulesCallout);
const cookieKey = 'pipeline_schedules_callout_dismissed';
const docsUrl = 'help/ci/scheduled_pipelines';
+const imageUrl = 'pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg';
describe('Pipeline Schedule Callout', () => {
let calloutComponent;
beforeEach(() => {
setFixtures(`
- <div id='pipeline-schedules-callout' data-docs-url=${docsUrl}></div>
+ <div id='pipeline-schedules-callout' data-docs-url=${docsUrl} data-image-url=${imageUrl}></div>
`);
});
@@ -30,13 +25,13 @@ describe('Pipeline Schedule Callout', () => {
expect(calloutComponent).toBeDefined();
});
- it('correctly sets illustrationSvg', () => {
- expect(calloutComponent.illustrationSvg).toContain('<svg');
- });
-
it('correctly sets docsUrl', () => {
expect(calloutComponent.docsUrl).toContain(docsUrl);
});
+
+ it('correctly sets imageUrl', () => {
+ expect(calloutComponent.imageUrl).toContain(imageUrl);
+ });
});
describe(`when ${cookieKey} cookie is set`, () => {
@@ -68,8 +63,8 @@ describe('Pipeline Schedule Callout', () => {
expect(calloutComponent.$el.querySelector('.bordered-box')).not.toBeNull();
});
- it('renders the callout svg', () => {
- expect(calloutComponent.$el.outerHTML).toContain('<svg');
+ it('renders the callout img', () => {
+ expect(calloutComponent.$el.outerHTML).toContain('<img');
});
it('renders the callout title', () => {
diff --git a/spec/frontend/performance_bar/index_spec.js b/spec/frontend/performance_bar/index_spec.js
index 1517142c21e..bcd2cbbd530 100644
--- a/spec/frontend/performance_bar/index_spec.js
+++ b/spec/frontend/performance_bar/index_spec.js
@@ -9,7 +9,6 @@ describe('performance bar wrapper', () => {
let vm;
beforeEach(() => {
- URL.createObjectURL = jest.fn();
performance.getEntriesByType = jest.fn().mockReturnValue([]);
// clear html so that elements from previous tests don't mess with this test
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
index 97a92778f1a..040c0fbecc5 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -2,6 +2,7 @@ import { mount, shallowMount } from '@vue/test-utils';
import { GlDropdown, GlDropdownItem, GlForm, GlSprintf } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
+import httpStatusCodes from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
import PipelineNewForm from '~/pipeline_new/components/pipeline_new_form.vue';
import { mockRefs, mockParams, mockPostParams, mockProjectId, mockError } from '../mock_data';
@@ -11,7 +12,8 @@ jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
}));
-const pipelinesPath = '/root/project/-/pipleines';
+const pipelinesPath = '/root/project/-/pipelines';
+const configVariablesPath = '/root/project/-/pipelines/config_variables';
const postResponse = { id: 1 };
describe('Pipeline New Form', () => {
@@ -28,6 +30,7 @@ describe('Pipeline New Form', () => {
const findVariableRows = () => wrapper.findAll('[data-testid="ci-variable-row"]');
const findRemoveIcons = () => wrapper.findAll('[data-testid="remove-ci-variable-row"]');
const findKeyInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-key"]');
+ const findValueInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-value"]');
const findErrorAlert = () => wrapper.find('[data-testid="run-pipeline-error-alert"]');
const findWarningAlert = () => wrapper.find('[data-testid="run-pipeline-warning-alert"]');
const findWarningAlertSummary = () => findWarningAlert().find(GlSprintf);
@@ -39,6 +42,7 @@ describe('Pipeline New Form', () => {
propsData: {
projectId: mockProjectId,
pipelinesPath,
+ configVariablesPath,
refs: mockRefs,
defaultBranch: 'master',
settingsLink: '',
@@ -55,6 +59,7 @@ describe('Pipeline New Form', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
+ mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {});
});
afterEach(() => {
@@ -66,7 +71,7 @@ describe('Pipeline New Form', () => {
describe('Dropdown with branches and tags', () => {
beforeEach(() => {
- mock.onPost(pipelinesPath).reply(200, postResponse);
+ mock.onPost(pipelinesPath).reply(httpStatusCodes.OK, postResponse);
});
it('displays dropdown with all branches and tags', () => {
@@ -87,17 +92,27 @@ describe('Pipeline New Form', () => {
});
describe('Form', () => {
- beforeEach(() => {
+ beforeEach(async () => {
createComponent('', mockParams, mount);
- mock.onPost(pipelinesPath).reply(200, postResponse);
+ mock.onPost(pipelinesPath).reply(httpStatusCodes.OK, postResponse);
+
+ await waitForPromises();
});
+
it('displays the correct values for the provided query params', async () => {
expect(findDropdown().props('text')).toBe('tag-1');
+ expect(findVariableRows()).toHaveLength(3);
+ });
- await wrapper.vm.$nextTick();
+ it('displays a variable from provided query params', () => {
+ expect(findKeyInputs().at(0).element.value).toBe('test_var');
+ expect(findValueInputs().at(0).element.value).toBe('test_var_val');
+ });
- expect(findVariableRows()).toHaveLength(3);
+ it('displays an empty variable for the user to fill out', async () => {
+ expect(findKeyInputs().at(2).element.value).toBe('');
+ expect(findValueInputs().at(2).element.value).toBe('');
});
it('does not display remove icon for last row', () => {
@@ -124,13 +139,143 @@ describe('Pipeline New Form', () => {
});
it('creates blank variable on input change event', async () => {
- findKeyInputs()
- .at(2)
- .trigger('change');
+ const input = findKeyInputs().at(2);
+ input.element.value = 'test_var_2';
+ input.trigger('change');
await wrapper.vm.$nextTick();
expect(findVariableRows()).toHaveLength(4);
+ expect(findKeyInputs().at(3).element.value).toBe('');
+ expect(findValueInputs().at(3).element.value).toBe('');
+ });
+
+ describe('when the form has been modified', () => {
+ const selectRef = i =>
+ findDropdownItems()
+ .at(i)
+ .vm.$emit('click');
+
+ beforeEach(async () => {
+ const input = findKeyInputs().at(0);
+ input.element.value = 'test_var_2';
+ input.trigger('change');
+
+ findRemoveIcons()
+ .at(1)
+ .trigger('click');
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('form values are restored when the ref changes', async () => {
+ expect(findVariableRows()).toHaveLength(2);
+
+ selectRef(1);
+ await waitForPromises();
+
+ expect(findVariableRows()).toHaveLength(3);
+ expect(findKeyInputs().at(0).element.value).toBe('test_var');
+ });
+
+ it('form values are restored again when the ref is reverted', async () => {
+ selectRef(1);
+ await waitForPromises();
+
+ selectRef(2);
+ await waitForPromises();
+
+ expect(findVariableRows()).toHaveLength(2);
+ expect(findKeyInputs().at(0).element.value).toBe('test_var_2');
+ });
+ });
+ });
+
+ describe('when feature flag new_pipeline_form_prefilled_vars is enabled', () => {
+ let origGon;
+
+ const mockYmlKey = 'yml_var';
+ const mockYmlValue = 'yml_var_val';
+ const mockYmlDesc = 'A var from yml.';
+
+ beforeAll(() => {
+ origGon = window.gon;
+ window.gon = { features: { newPipelineFormPrefilledVars: true } };
+ });
+
+ afterAll(() => {
+ window.gon = origGon;
+ });
+
+ describe('when yml defines a variable with description', () => {
+ beforeEach(async () => {
+ createComponent('', mockParams, mount);
+
+ mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
+ [mockYmlKey]: {
+ value: mockYmlValue,
+ description: mockYmlDesc,
+ },
+ });
+
+ await waitForPromises();
+ });
+
+ it('displays all the variables', async () => {
+ expect(findVariableRows()).toHaveLength(4);
+ });
+
+ it('displays a variable from yml', () => {
+ expect(findKeyInputs().at(0).element.value).toBe(mockYmlKey);
+ expect(findValueInputs().at(0).element.value).toBe(mockYmlValue);
+ });
+
+ it('displays a variable from provided query params', () => {
+ expect(findKeyInputs().at(1).element.value).toBe('test_var');
+ expect(findValueInputs().at(1).element.value).toBe('test_var_val');
+ });
+
+ it('adds a description to the first variable from yml', () => {
+ expect(
+ findVariableRows()
+ .at(0)
+ .text(),
+ ).toContain(mockYmlDesc);
+ });
+
+ it('removes the description when a variable key changes', async () => {
+ findKeyInputs().at(0).element.value = 'yml_var_modified';
+ findKeyInputs()
+ .at(0)
+ .trigger('change');
+
+ await wrapper.vm.$nextTick();
+
+ expect(
+ findVariableRows()
+ .at(0)
+ .text(),
+ ).not.toContain(mockYmlDesc);
+ });
+ });
+
+ describe('when yml defines a variable without description', () => {
+ beforeEach(async () => {
+ createComponent('', mockParams, mount);
+
+ mock.onGet(configVariablesPath).reply(httpStatusCodes.OK, {
+ [mockYmlKey]: {
+ value: mockYmlValue,
+ description: null,
+ },
+ });
+
+ await waitForPromises();
+ });
+
+ it('displays all the variables', async () => {
+ expect(findVariableRows()).toHaveLength(3);
+ });
});
});
@@ -138,7 +283,7 @@ describe('Pipeline New Form', () => {
beforeEach(() => {
createComponent();
- mock.onPost(pipelinesPath).reply(400, mockError);
+ mock.onPost(pipelinesPath).reply(httpStatusCodes.BAD_REQUEST, mockError);
findForm().vm.$emit('submit', dummySubmitEvent);
diff --git a/spec/frontend/pipeline_new/mock_data.js b/spec/frontend/pipeline_new/mock_data.js
index 55286e0ec7e..cdbd6d4437e 100644
--- a/spec/frontend/pipeline_new/mock_data.js
+++ b/spec/frontend/pipeline_new/mock_data.js
@@ -14,9 +14,9 @@ export const mockProjectId = '21';
export const mockPostParams = {
ref: 'tag-1',
- variables: [
- { key: 'test_var', value: 'test_var_val', variable_type: 'env_var' },
- { key: 'test_file', value: 'test_file_val', variable_type: 'file' },
+ variables_attributes: [
+ { key: 'test_var', secret_value: 'test_var_val', variable_type: 'env_var' },
+ { key: 'test_file', secret_value: 'test_file_val', variable_type: 'file' },
],
};
diff --git a/spec/frontend/pipelines/components/dag/dag_graph_spec.js b/spec/frontend/pipelines/components/dag/dag_graph_spec.js
index e312791b01f..7786212cb69 100644
--- a/spec/frontend/pipelines/components/dag/dag_graph_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_graph_spec.js
@@ -3,7 +3,7 @@ import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
import { IS_HIGHLIGHTED, LINK_SELECTOR, NODE_SELECTOR } from '~/pipelines/components/dag/constants';
import { highlightIn, highlightOut } from '~/pipelines/components/dag/interactions';
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
-import { removeOrphanNodes } from '~/pipelines/components/dag/parsing_utils';
+import { removeOrphanNodes } from '~/pipelines/components/parsing_utils';
import { parsedData } from './mock_data';
describe('The DAG graph', () => {
diff --git a/spec/frontend/pipelines/components/dag/dag_spec.js b/spec/frontend/pipelines/components/dag/dag_spec.js
index 989f6c17197..08a43199594 100644
--- a/spec/frontend/pipelines/components/dag/dag_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_spec.js
@@ -4,13 +4,8 @@ import Dag from '~/pipelines/components/dag/dag.vue';
import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
import DagAnnotations from '~/pipelines/components/dag/dag_annotations.vue';
-import {
- ADD_NOTE,
- REMOVE_NOTE,
- REPLACE_NOTES,
- PARSE_FAILURE,
- UNSUPPORTED_DATA,
-} from '~/pipelines/components/dag//constants';
+import { ADD_NOTE, REMOVE_NOTE, REPLACE_NOTES } from '~/pipelines/components/dag/constants';
+import { PARSE_FAILURE, UNSUPPORTED_DATA } from '~/pipelines/constants';
import {
mockParsedGraphQLNodes,
tooSmallGraph,
diff --git a/spec/frontend/pipelines/components/dag/drawing_utils_spec.js b/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
index 37a7d07485b..095ded01298 100644
--- a/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
+++ b/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
@@ -1,5 +1,5 @@
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
-import { parseData } from '~/pipelines/components/dag/parsing_utils';
+import { parseData } from '~/pipelines/components/parsing_utils';
import { mockParsedGraphQLNodes } from './mock_data';
describe('DAG visualization drawing utilities', () => {
diff --git a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js b/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
index e93fa8e6760..ceb6b64d4ad 100644
--- a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
+++ b/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
@@ -5,7 +5,7 @@ import {
parseData,
removeOrphanNodes,
getMaxNodes,
-} from '~/pipelines/components/dag/parsing_utils';
+} from '~/pipelines/components/parsing_utils';
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
import { mockParsedGraphQLNodes } from './mock_data';
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index d977db58a0e..062c9759a65 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -3,23 +3,27 @@ import { mount } from '@vue/test-utils';
import { setHTMLFixture } from 'helpers/fixtures';
import PipelineStore from '~/pipelines/stores/pipeline_store';
import graphComponent from '~/pipelines/components/graph/graph_component.vue';
-import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
import linkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
import graphJSON from './mock_data';
import linkedPipelineJSON from './linked_pipelines_mock_data';
import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
describe('graph component', () => {
- const store = new PipelineStore();
- store.storePipeline(linkedPipelineJSON);
- const mediator = new PipelinesMediator({ endpoint: '' });
-
+ let store;
+ let mediator;
let wrapper;
const findExpandPipelineBtn = () => wrapper.find('[data-testid="expandPipelineButton"]');
const findAllExpandPipelineBtns = () => wrapper.findAll('[data-testid="expandPipelineButton"]');
+ const findStageColumns = () => wrapper.findAll(StageColumnComponent);
+ const findStageColumnAt = i => findStageColumns().at(i);
beforeEach(() => {
+ mediator = new PipelinesMediator({ endpoint: '' });
+ store = new PipelineStore();
+ store.storePipeline(linkedPipelineJSON);
+
setHTMLFixture('<div class="layout-page"></div>');
});
@@ -43,7 +47,7 @@ describe('graph component', () => {
});
describe('with data', () => {
- it('should render the graph', () => {
+ beforeEach(() => {
wrapper = mount(graphComponent, {
propsData: {
isLoading: false,
@@ -51,26 +55,17 @@ describe('graph component', () => {
mediator,
},
});
+ });
+ it('renders the graph', () => {
expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
-
- expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
-
- expect(
- wrapper
- .findAll(stageColumnComponent)
- .at(1)
- .classes(),
- ).toContain('left-margin');
-
- expect(wrapper.find('.stage-column:nth-child(2) .build:nth-child(1)').classes()).toContain(
- 'left-connector',
- );
-
expect(wrapper.find('.loading-icon').exists()).toBe(false);
-
expect(wrapper.find('.stage-column-list').exists()).toBe(true);
});
+
+ it('renders columns in the graph', () => {
+ expect(findStageColumns()).toHaveLength(graphJSON.details.stages.length);
+ });
});
describe('when linked pipelines are present', () => {
@@ -93,26 +88,26 @@ describe('graph component', () => {
expect(wrapper.find('.fa-spinner').exists()).toBe(false);
});
- it('should include the stage column list', () => {
- expect(wrapper.find(stageColumnComponent).exists()).toBe(true);
- });
-
- it('should include the no-margin class on the first child if there is only one job', () => {
- const firstStageColumnElement = wrapper.find(stageColumnComponent);
-
- expect(firstStageColumnElement.classes()).toContain('no-margin');
+ it('should include the stage column', () => {
+ expect(findStageColumnAt(0).exists()).toBe(true);
});
- it('should include the has-only-one-job class on the first child', () => {
- const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column');
-
- expect(firstStageColumnElement.classes()).toContain('has-only-one-job');
+ it('stage column should have no-margin, gl-mr-26, has-only-one-job classes if there is only one job', () => {
+ expect(findStageColumnAt(0).classes()).toEqual(
+ expect.arrayContaining(['no-margin', 'gl-mr-26', 'has-only-one-job']),
+ );
});
it('should include the left-margin class on the second child', () => {
- const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column:last-child');
+ expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
+ });
- expect(firstStageColumnElement.classes()).toContain('left-margin');
+ it('should include the left-connector class in the build of the second child', () => {
+ expect(
+ findStageColumnAt(1)
+ .find('.build:nth-child(1)')
+ .classes('left-connector'),
+ ).toBe(true);
});
it('should include the js-has-linked-pipelines flag', () => {
@@ -134,12 +129,7 @@ describe('graph component', () => {
describe('stageConnectorClass', () => {
it('it returns left-margin when there is a triggerer', () => {
- expect(
- wrapper
- .findAll(stageColumnComponent)
- .at(1)
- .classes(),
- ).toContain('left-margin');
+ expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
});
});
@@ -248,6 +238,16 @@ describe('graph component', () => {
.catch(done.fail);
});
});
+
+ describe('when column requests a refresh', () => {
+ beforeEach(() => {
+ findStageColumnAt(0).vm.$emit('refreshPipelineGraph');
+ });
+
+ it('refreshPipelineGraph is emitted', () => {
+ expect(wrapper.emitted().refreshPipelineGraph).toHaveLength(1);
+ });
+ });
});
});
});
@@ -268,7 +268,7 @@ describe('graph component', () => {
it('should include the first column with a no margin', () => {
const firstColumn = wrapper.find('.stage-column');
- expect(firstColumn.classes()).toContain('no-margin');
+ expect(firstColumn.classes('no-margin')).toBe(true);
});
it('should not render a linked pipelines column', () => {
@@ -278,16 +278,11 @@ describe('graph component', () => {
describe('stageConnectorClass', () => {
it('it returns no-margin when no triggerer and there is one job', () => {
- expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
+ expect(findStageColumnAt(0).classes('no-margin')).toBe(true);
});
it('it returns left-margin when no triggerer and not the first stage', () => {
- expect(
- wrapper
- .findAll(stageColumnComponent)
- .at(1)
- .classes(),
- ).toContain('left-margin');
+ expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
});
});
@@ -302,12 +297,9 @@ describe('graph component', () => {
},
});
- expect(
- wrapper
- .find('.stage-column:nth-child(2) .stage-name')
- .text()
- .trim(),
- ).toEqual('Deploy &lt;img src=x onerror=alert(document.domain)&gt;');
+ expect(findStageColumnAt(1).props('title')).toEqual(
+ 'Deploy &lt;img src=x onerror=alert(document.domain)&gt;',
+ );
});
});
});
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index e844cbc5bf8..8aabb2f9cdd 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -1,5 +1,4 @@
import { mount } from '@vue/test-utils';
-import { trimText } from 'helpers/text_helper';
import JobItem from '~/pipelines/components/graph/job_item.vue';
describe('pipeline graph job item', () => {
@@ -65,7 +64,7 @@ describe('pipeline graph job item', () => {
expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
- expect(trimText(wrapper.find('.ci-status-text').text())).toBe(mockJob.name);
+ expect(wrapper.text()).toBe(mockJob.name);
done();
});
@@ -85,7 +84,7 @@ describe('pipeline graph job item', () => {
expect(wrapper.find('.ci-status-icon-success').exists()).toBe(true);
expect(wrapper.find('a').exists()).toBe(false);
- expect(trimText(wrapper.find('.ci-status-text').text())).toBe(mockJobWithoutDetails.name);
+ expect(wrapper.text()).toBe(mockJobWithoutDetails.name);
});
it('should apply hover class and provided class name', () => {
diff --git a/spec/frontend/pipelines/graph/job_name_component_spec.js b/spec/frontend/pipelines/graph/job_name_component_spec.js
index 3574b66403e..f0aa646b8d7 100644
--- a/spec/frontend/pipelines/graph/job_name_component_spec.js
+++ b/spec/frontend/pipelines/graph/job_name_component_spec.js
@@ -21,12 +21,7 @@ describe('job name component', () => {
});
it('should render the provided name', () => {
- expect(
- wrapper
- .find('.ci-status-text')
- .text()
- .trim(),
- ).toBe(propsData.name);
+ expect(wrapper.text()).toBe(propsData.name);
});
it('should render an icon with the provided status', () => {
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 5388d624d3c..2e10b0f068c 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -1,115 +1,164 @@
import { shallowMount } from '@vue/test-utils';
-import { GlModal } from '@gitlab/ui';
+import { GlModal, GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import {
+ mockCancelledPipelineHeader,
+ mockFailedPipelineHeader,
+ mockRunningPipelineHeader,
+ mockSuccessfulPipelineHeader,
+} from './mock_data';
+import axios from '~/lib/utils/axios_utils';
import HeaderComponent from '~/pipelines/components/header_component.vue';
-import CiHeader from '~/vue_shared/components/header_ci_component.vue';
-import eventHub from '~/pipelines/event_hub';
describe('Pipeline details header', () => {
let wrapper;
let glModalDirective;
-
- const threeWeeksAgo = new Date();
- threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+ let mockAxios;
const findDeleteModal = () => wrapper.find(GlModal);
-
- const defaultProps = {
- pipeline: {
- details: {
- status: {
- group: 'failed',
- icon: 'status_failed',
- label: 'failed',
- text: 'failed',
- details_path: 'path',
- },
- },
- id: 123,
- created_at: threeWeeksAgo.toISOString(),
- user: {
- web_url: 'path',
- name: 'Foo',
- username: 'foobar',
- email: 'foo@bar.com',
- avatar_url: 'link',
- },
- retry_path: 'retry',
- cancel_path: 'cancel',
- delete_path: 'delete',
+ const findRetryButton = () => wrapper.find('[data-testid="retryPipeline"]');
+ const findCancelButton = () => wrapper.find('[data-testid="cancelPipeline"]');
+ const findDeleteButton = () => wrapper.find('[data-testid="deletePipeline"]');
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
+ const defaultProvideOptions = {
+ pipelineId: 14,
+ pipelineIid: 1,
+ paths: {
+ retry: '/retry',
+ cancel: '/cancel',
+ delete: '/delete',
+ fullProject: '/namespace/my-project',
},
- isLoading: false,
};
- const createComponent = (props = {}) => {
+ const createComponent = (pipelineMock = mockRunningPipelineHeader, { isLoading } = false) => {
glModalDirective = jest.fn();
- wrapper = shallowMount(HeaderComponent, {
- propsData: {
- ...props,
+ const $apollo = {
+ queries: {
+ pipeline: {
+ loading: isLoading,
+ stopPolling: jest.fn(),
+ startPolling: jest.fn(),
+ },
+ },
+ };
+
+ return shallowMount(HeaderComponent, {
+ data() {
+ return {
+ pipeline: pipelineMock,
+ };
+ },
+ provide: {
+ ...defaultProvideOptions,
},
directives: {
glModal: {
- bind(el, { value }) {
+ bind(_, { value }) {
glModalDirective(value);
},
},
},
+ mocks: { $apollo },
});
};
beforeEach(() => {
- jest.spyOn(eventHub, '$emit');
-
- createComponent(defaultProps);
+ mockAxios = new MockAdapter(axios);
+ mockAxios.onGet('*').replyOnce(200);
});
afterEach(() => {
- eventHub.$off();
-
wrapper.destroy();
wrapper = null;
+
+ mockAxios.restore();
});
- it('should render provided pipeline info', () => {
- expect(wrapper.find(CiHeader).props()).toMatchObject({
- status: defaultProps.pipeline.details.status,
- itemId: defaultProps.pipeline.id,
- time: defaultProps.pipeline.created_at,
- user: defaultProps.pipeline.user,
+ describe('initial loading', () => {
+ beforeEach(() => {
+ wrapper = createComponent(null, { isLoading: true });
});
- });
- describe('action buttons', () => {
- it('should not trigger eventHub when nothing happens', () => {
- expect(eventHub.$emit).not.toHaveBeenCalled();
+ it('shows a loading state while graphQL is fetching initial data', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
});
+ });
+
+ describe('visible state', () => {
+ it.each`
+ state | pipelineData | retryValue | cancelValue
+ ${'cancelled'} | ${mockCancelledPipelineHeader} | ${true} | ${false}
+ ${'failed'} | ${mockFailedPipelineHeader} | ${true} | ${false}
+ ${'running'} | ${mockRunningPipelineHeader} | ${false} | ${true}
+ ${'successful'} | ${mockSuccessfulPipelineHeader} | ${false} | ${false}
+ `(
+ 'with a $state pipeline, it will show actions: retry $retryValue and cancel $cancelValue',
+ ({ pipelineData, retryValue, cancelValue }) => {
+ wrapper = createComponent(pipelineData);
+
+ expect(findRetryButton().exists()).toBe(retryValue);
+ expect(findCancelButton().exists()).toBe(cancelValue);
+ },
+ );
+ });
- it('should call postAction when retry button action is clicked', () => {
- wrapper.find('[data-testid="retryButton"]').vm.$emit('click');
+ describe('actions', () => {
+ describe('Retry action', () => {
+ beforeEach(() => {
+ wrapper = createComponent(mockCancelledPipelineHeader);
+ });
- expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
- });
+ it('should call axios with the right path when retry button is clicked', async () => {
+ jest.spyOn(axios, 'post');
+ findRetryButton().vm.$emit('click');
- it('should call postAction when cancel button action is clicked', () => {
- wrapper.find('[data-testid="cancelPipeline"]').vm.$emit('click');
+ await wrapper.vm.$nextTick();
- expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
+ expect(axios.post).toHaveBeenCalledWith(defaultProvideOptions.paths.retry);
+ });
});
- it('does not show delete modal', () => {
- expect(findDeleteModal()).not.toBeVisible();
+ describe('Cancel action', () => {
+ beforeEach(() => {
+ wrapper = createComponent(mockRunningPipelineHeader);
+ });
+
+ it('should call axios with the right path when cancel button is clicked', async () => {
+ jest.spyOn(axios, 'post');
+ findCancelButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(axios.post).toHaveBeenCalledWith(defaultProvideOptions.paths.cancel);
+ });
});
- describe('when delete button action is clicked', () => {
- it('displays delete modal', () => {
+ describe('Delete action', () => {
+ beforeEach(() => {
+ wrapper = createComponent(mockFailedPipelineHeader);
+ });
+
+ it('displays delete modal when clicking on delete and does not call the delete action', async () => {
+ jest.spyOn(axios, 'delete');
+ findDeleteButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
expect(findDeleteModal().props('modalId')).toBe(wrapper.vm.$options.DELETE_MODAL_ID);
expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.$options.DELETE_MODAL_ID);
+ expect(axios.delete).not.toHaveBeenCalled();
});
- it('should call delete when modal is submitted', () => {
+ it('should call delete path when modal is submitted', async () => {
+ jest.spyOn(axios, 'delete');
findDeleteModal().vm.$emit('ok');
- expect(eventHub.$emit).toHaveBeenCalledWith('headerDeleteAction', 'delete');
+ await wrapper.vm.$nextTick();
+
+ expect(axios.delete).toHaveBeenCalledWith(defaultProvideOptions.paths.delete);
});
});
});
diff --git a/spec/frontend/pipelines/legacy_header_component_spec.js b/spec/frontend/pipelines/legacy_header_component_spec.js
new file mode 100644
index 00000000000..fb7feb8898a
--- /dev/null
+++ b/spec/frontend/pipelines/legacy_header_component_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import LegacyHeaderComponent from '~/pipelines/components/legacy_header_component.vue';
+import CiHeader from '~/vue_shared/components/header_ci_component.vue';
+import eventHub from '~/pipelines/event_hub';
+
+describe('Pipeline details header', () => {
+ let wrapper;
+ let glModalDirective;
+
+ const threeWeeksAgo = new Date();
+ threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+
+ const findDeleteModal = () => wrapper.find(GlModal);
+
+ const defaultProps = {
+ pipeline: {
+ details: {
+ status: {
+ group: 'failed',
+ icon: 'status_failed',
+ label: 'failed',
+ text: 'failed',
+ details_path: 'path',
+ },
+ },
+ id: 123,
+ created_at: threeWeeksAgo.toISOString(),
+ user: {
+ web_url: 'path',
+ name: 'Foo',
+ username: 'foobar',
+ email: 'foo@bar.com',
+ avatar_url: 'link',
+ },
+ retry_path: 'retry',
+ cancel_path: 'cancel',
+ delete_path: 'delete',
+ },
+ isLoading: false,
+ };
+
+ const createComponent = (props = {}) => {
+ glModalDirective = jest.fn();
+
+ wrapper = shallowMount(LegacyHeaderComponent, {
+ propsData: {
+ ...props,
+ },
+ directives: {
+ glModal: {
+ bind(el, { value }) {
+ glModalDirective(value);
+ },
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(eventHub, '$emit');
+
+ createComponent(defaultProps);
+ });
+
+ afterEach(() => {
+ eventHub.$off();
+
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('should render provided pipeline info', () => {
+ expect(wrapper.find(CiHeader).props()).toMatchObject({
+ status: defaultProps.pipeline.details.status,
+ itemId: defaultProps.pipeline.id,
+ time: defaultProps.pipeline.created_at,
+ user: defaultProps.pipeline.user,
+ });
+ });
+
+ describe('action buttons', () => {
+ it('should not trigger eventHub when nothing happens', () => {
+ expect(eventHub.$emit).not.toHaveBeenCalled();
+ });
+
+ it('should call postAction when retry button action is clicked', () => {
+ wrapper.find('[data-testid="retryButton"]').vm.$emit('click');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
+ });
+
+ it('should call postAction when cancel button action is clicked', () => {
+ wrapper.find('[data-testid="cancelPipeline"]').vm.$emit('click');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
+ });
+
+ it('does not show delete modal', () => {
+ expect(findDeleteModal()).not.toBeVisible();
+ });
+
+ describe('when delete button action is clicked', () => {
+ it('displays delete modal', () => {
+ expect(findDeleteModal().props('modalId')).toBe(wrapper.vm.$options.DELETE_MODAL_ID);
+ expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.$options.DELETE_MODAL_ID);
+ });
+
+ it('should call delete when modal is submitted', () => {
+ findDeleteModal().vm.$emit('ok');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerDeleteAction', 'delete');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js
index e63efc543f1..2afdbb05107 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/pipelines/mock_data.js
@@ -1,3 +1,7 @@
+const PIPELINE_RUNNING = 'RUNNING';
+const PIPELINE_CANCELED = 'CANCELED';
+const PIPELINE_FAILED = 'FAILED';
+
export const pipelineWithStages = {
id: 20333396,
user: {
@@ -320,6 +324,80 @@ export const pipelineWithStages = {
triggered: [],
};
+const threeWeeksAgo = new Date();
+threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+
+export const mockPipelineHeader = {
+ detailedStatus: {},
+ id: 123,
+ userPermissions: {
+ destroyPipeline: true,
+ },
+ createdAt: threeWeeksAgo.toISOString(),
+ user: {
+ name: 'Foo',
+ username: 'foobar',
+ email: 'foo@bar.com',
+ avatarUrl: 'link',
+ },
+};
+
+export const mockFailedPipelineHeader = {
+ ...mockPipelineHeader,
+ status: PIPELINE_FAILED,
+ retryable: true,
+ cancelable: false,
+ detailedStatus: {
+ group: 'failed',
+ icon: 'status_failed',
+ label: 'failed',
+ text: 'failed',
+ detailsPath: 'path',
+ },
+};
+
+export const mockRunningPipelineHeader = {
+ ...mockPipelineHeader,
+ status: PIPELINE_RUNNING,
+ retryable: false,
+ cancelable: true,
+ detailedStatus: {
+ group: 'running',
+ icon: 'status_running',
+ label: 'running',
+ text: 'running',
+ detailsPath: 'path',
+ },
+};
+
+export const mockCancelledPipelineHeader = {
+ ...mockPipelineHeader,
+ status: PIPELINE_CANCELED,
+ retryable: true,
+ cancelable: false,
+ detailedStatus: {
+ group: 'cancelled',
+ icon: 'status_cancelled',
+ label: 'cancelled',
+ text: 'cancelled',
+ detailsPath: 'path',
+ },
+};
+
+export const mockSuccessfulPipelineHeader = {
+ ...mockPipelineHeader,
+ status: 'SUCCESS',
+ retryable: false,
+ cancelable: false,
+ detailedStatus: {
+ group: 'success',
+ icon: 'status_success',
+ label: 'success',
+ text: 'success',
+ detailsPath: 'path',
+ },
+};
+
export const stageReply = {
name: 'deploy',
title: 'deploy: running',
diff --git a/spec/frontend/pipelines/pipeline_graph/mock_data.js b/spec/frontend/pipelines/pipeline_graph/mock_data.js
index 5a5d6c021a6..b50932deec6 100644
--- a/spec/frontend/pipelines/pipeline_graph/mock_data.js
+++ b/spec/frontend/pipelines/pipeline_graph/mock_data.js
@@ -1,3 +1,5 @@
+import { createUniqueJobId } from '~/pipelines/utils';
+
export const yamlString = `stages:
- empty
- build
@@ -39,18 +41,20 @@ deploy_a:
script: echo hello
`;
+const jobId1 = createUniqueJobId('build', 'build_1');
+const jobId2 = createUniqueJobId('test', 'test_1');
+const jobId3 = createUniqueJobId('test', 'test_2');
+const jobId4 = createUniqueJobId('deploy', 'deploy_1');
+
export const pipelineData = {
stages: [
{
name: 'build',
- groups: [],
- },
- {
- name: 'build',
groups: [
{
name: 'build_1',
jobs: [{ script: 'echo hello', stage: 'build' }],
+ id: jobId1,
},
],
},
@@ -60,10 +64,12 @@ export const pipelineData = {
{
name: 'test_1',
jobs: [{ script: 'yarn test', stage: 'test' }],
+ id: jobId2,
},
{
name: 'test_2',
jobs: [{ script: 'yarn karma', stage: 'test' }],
+ id: jobId3,
},
],
},
@@ -73,8 +79,15 @@ export const pipelineData = {
{
name: 'deploy_1',
jobs: [{ script: 'yarn magick', stage: 'deploy' }],
+ id: jobId4,
},
],
},
],
+ jobs: {
+ [jobId1]: {},
+ [jobId2]: {},
+ [jobId3]: {},
+ [jobId4]: {},
+ },
};
diff --git a/spec/frontend/pipelines/pipeline_graph/utils_spec.js b/spec/frontend/pipelines/pipeline_graph/utils_spec.js
index dd85c8c2bd0..ade026c7053 100644
--- a/spec/frontend/pipelines/pipeline_graph/utils_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/utils_spec.js
@@ -1,150 +1,211 @@
-import { preparePipelineGraphData } from '~/pipelines/utils';
+import {
+ preparePipelineGraphData,
+ createUniqueJobId,
+ generateJobNeedsDict,
+} from '~/pipelines/utils';
-describe('preparePipelineGraphData', () => {
- const emptyResponse = { stages: [] };
+describe('utils functions', () => {
+ const emptyResponse = { stages: [], jobs: {} };
const jobName1 = 'build_1';
const jobName2 = 'build_2';
const jobName3 = 'test_1';
const jobName4 = 'deploy_1';
- const job1 = { [jobName1]: { script: 'echo hello', stage: 'build' } };
- const job2 = { [jobName2]: { script: 'echo build', stage: 'build' } };
- const job3 = { [jobName3]: { script: 'echo test', stage: 'test' } };
- const job4 = { [jobName4]: { script: 'echo deploy', stage: 'deploy' } };
-
- describe('returns an object with an empty array of stages if', () => {
- it('no data is passed', () => {
- expect(preparePipelineGraphData({})).toEqual(emptyResponse);
- });
+ const job1 = { script: 'echo hello', stage: 'build' };
+ const job2 = { script: 'echo build', stage: 'build' };
+ const job3 = { script: 'echo test', stage: 'test', needs: [jobName1, jobName2] };
+ const job4 = { script: 'echo deploy', stage: 'deploy', needs: [jobName3] };
+ const userDefinedStage = 'myStage';
- it('no stages are found', () => {
- expect(preparePipelineGraphData({ includes: 'template/myTemplate.gitlab-ci.yml' })).toEqual(
- emptyResponse,
- );
- });
- });
-
- describe('returns the correct array of stages', () => {
- it('when multiple jobs are in the same stage', () => {
- const expectedData = {
- stages: [
+ const pipelineGraphData = {
+ stages: [
+ {
+ name: userDefinedStage,
+ groups: [],
+ },
+ {
+ name: job4.stage,
+ groups: [
{
- name: job1[jobName1].stage,
- groups: [
- {
- name: jobName1,
- jobs: [{ script: job1[jobName1].script, stage: job1[jobName1].stage }],
- },
- {
- name: jobName2,
- jobs: [{ script: job2[jobName2].script, stage: job2[jobName2].stage }],
- },
- ],
+ name: jobName4,
+ jobs: [{ ...job4 }],
+ id: createUniqueJobId(job4.stage, jobName4),
},
],
- };
-
- expect(preparePipelineGraphData({ ...job1, ...job2 })).toEqual(expectedData);
- });
-
- it('when stages are defined by the user', () => {
- const userDefinedStage = 'myStage';
- const userDefinedStage2 = 'myStage2';
-
- const expectedData = {
- stages: [
+ },
+ {
+ name: job1.stage,
+ groups: [
{
- name: userDefinedStage,
- groups: [],
+ name: jobName1,
+ jobs: [{ ...job1 }],
+ id: createUniqueJobId(job1.stage, jobName1),
},
{
- name: userDefinedStage2,
- groups: [],
+ name: jobName2,
+ jobs: [{ ...job2 }],
+ id: createUniqueJobId(job2.stage, jobName2),
},
],
- };
+ },
+ {
+ name: job3.stage,
+ groups: [
+ {
+ name: jobName3,
+ jobs: [{ ...job3 }],
+ id: createUniqueJobId(job3.stage, jobName3),
+ },
+ ],
+ },
+ ],
+ jobs: {
+ [jobName1]: { ...job1, id: createUniqueJobId(job1.stage, jobName1) },
+ [jobName2]: { ...job2, id: createUniqueJobId(job2.stage, jobName2) },
+ [jobName3]: { ...job3, id: createUniqueJobId(job3.stage, jobName3) },
+ [jobName4]: { ...job4, id: createUniqueJobId(job4.stage, jobName4) },
+ },
+ };
- expect(preparePipelineGraphData({ stages: [userDefinedStage, userDefinedStage2] })).toEqual(
- expectedData,
- );
- });
+ describe('preparePipelineGraphData', () => {
+ describe('returns an empty array of stages and empty job objects if', () => {
+ it('no data is passed', () => {
+ expect(preparePipelineGraphData({})).toEqual(emptyResponse);
+ });
- it('by combining user defined stage and job stages, it preserves user defined order', () => {
- const userDefinedStage = 'myStage';
- const userDefinedStageThatOverlaps = 'deploy';
+ it('no stages are found', () => {
+ expect(preparePipelineGraphData({ includes: 'template/myTemplate.gitlab-ci.yml' })).toEqual(
+ emptyResponse,
+ );
+ });
+ });
- const expectedData = {
- stages: [
- {
- name: userDefinedStage,
- groups: [],
- },
- {
- name: job4[jobName4].stage,
- groups: [
- {
- name: jobName4,
- jobs: [{ script: job4[jobName4].script, stage: job4[jobName4].stage }],
- },
- ],
- },
- {
- name: job1[jobName1].stage,
- groups: [
- {
- name: jobName1,
- jobs: [{ script: job1[jobName1].script, stage: job1[jobName1].stage }],
- },
- {
- name: jobName2,
- jobs: [{ script: job2[jobName2].script, stage: job2[jobName2].stage }],
- },
- ],
+ describe('returns the correct array of stages and object of jobs', () => {
+ it('when multiple jobs are in the same stage', () => {
+ const expectedData = {
+ stages: [
+ {
+ name: job1.stage,
+ groups: [
+ {
+ name: jobName1,
+ jobs: [{ ...job1 }],
+ id: createUniqueJobId(job1.stage, jobName1),
+ },
+ {
+ name: jobName2,
+ jobs: [{ ...job2 }],
+ id: createUniqueJobId(job2.stage, jobName2),
+ },
+ ],
+ },
+ ],
+ jobs: {
+ [jobName1]: { ...job1, id: createUniqueJobId(job1.stage, jobName1) },
+ [jobName2]: { ...job2, id: createUniqueJobId(job2.stage, jobName2) },
},
- {
- name: job3[jobName3].stage,
- groups: [
- {
- name: jobName3,
- jobs: [{ script: job3[jobName3].script, stage: job3[jobName3].stage }],
- },
- ],
+ };
+ expect(
+ preparePipelineGraphData({ [jobName1]: { ...job1 }, [jobName2]: { ...job2 } }),
+ ).toEqual(expectedData);
+ });
+
+ it('when stages are defined by the user', () => {
+ const userDefinedStage2 = 'myStage2';
+
+ const expectedData = {
+ stages: [
+ {
+ name: userDefinedStage,
+ groups: [],
+ },
+ {
+ name: userDefinedStage2,
+ groups: [],
+ },
+ ],
+ jobs: {},
+ };
+
+ expect(preparePipelineGraphData({ stages: [userDefinedStage, userDefinedStage2] })).toEqual(
+ expectedData,
+ );
+ });
+
+ it('by combining user defined stage and job stages, it preserves user defined order', () => {
+ const userDefinedStageThatOverlaps = 'deploy';
+
+ expect(
+ preparePipelineGraphData({
+ stages: [userDefinedStage, userDefinedStageThatOverlaps],
+ [jobName1]: { ...job1 },
+ [jobName2]: { ...job2 },
+ [jobName3]: { ...job3 },
+ [jobName4]: { ...job4 },
+ }),
+ ).toEqual(pipelineGraphData);
+ });
+
+ it('with only unique values', () => {
+ const expectedData = {
+ stages: [
+ {
+ name: job1.stage,
+ groups: [
+ {
+ name: jobName1,
+ jobs: [{ ...job1 }],
+ id: createUniqueJobId(job1.stage, jobName1),
+ },
+ ],
+ },
+ ],
+ jobs: {
+ [jobName1]: { ...job1, id: createUniqueJobId(job1.stage, jobName1) },
},
- ],
- };
+ };
- expect(
- preparePipelineGraphData({
- stages: [userDefinedStage, userDefinedStageThatOverlaps],
- ...job1,
- ...job2,
- ...job3,
- ...job4,
- }),
- ).toEqual(expectedData);
+ expect(
+ preparePipelineGraphData({
+ stages: ['build'],
+ [jobName1]: { ...job1 },
+ [jobName1]: { ...job1 },
+ }),
+ ).toEqual(expectedData);
+ });
});
+ });
- it('with only unique values', () => {
- const expectedData = {
- stages: [
- {
- name: job1[jobName1].stage,
- groups: [
- {
- name: jobName1,
- jobs: [{ script: job1[jobName1].script, stage: job1[jobName1].stage }],
- },
- ],
- },
- ],
+ describe('generateJobNeedsDict', () => {
+ it('generates an empty object if it receives no jobs', () => {
+ expect(generateJobNeedsDict({ jobs: {} })).toEqual({});
+ });
+
+ it('generates a dict with empty needs if there are no dependencies', () => {
+ const smallGraph = {
+ jobs: {
+ [jobName1]: { ...job1, id: createUniqueJobId(job1.stage, jobName1) },
+ [jobName2]: { ...job2, id: createUniqueJobId(job2.stage, jobName2) },
+ },
};
- expect(
- preparePipelineGraphData({
- stages: ['build'],
- ...job1,
- ...job1,
- }),
- ).toEqual(expectedData);
+ expect(generateJobNeedsDict(smallGraph)).toEqual({
+ [pipelineGraphData.jobs[jobName1].id]: [],
+ [pipelineGraphData.jobs[jobName2].id]: [],
+ });
+ });
+
+ it('generates a dict where key is the a job and its value is an array of all its needs', () => {
+ const uniqueJobName1 = pipelineGraphData.jobs[jobName1].id;
+ const uniqueJobName2 = pipelineGraphData.jobs[jobName2].id;
+ const uniqueJobName3 = pipelineGraphData.jobs[jobName3].id;
+ const uniqueJobName4 = pipelineGraphData.jobs[jobName4].id;
+
+ expect(generateJobNeedsDict(pipelineGraphData)).toEqual({
+ [uniqueJobName1]: [],
+ [uniqueJobName2]: [],
+ [uniqueJobName3]: [uniqueJobName1, uniqueJobName2],
+ [uniqueJobName4]: [uniqueJobName3, uniqueJobName1, uniqueJobName2],
+ });
});
});
});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index b0ad6bbd228..1298a2a1524 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,9 +1,17 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
-import { GlFilteredSearch } from '@gitlab/ui';
+import { GlFilteredSearch, GlButton, GlLoadingIcon } from '@gitlab/ui';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
+import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
+import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+
+import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
+import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
+import BlankState from '~/pipelines/components/pipelines_list/blank_state.vue';
+import PipelinesTableComponent from '~/pipelines/components/pipelines_list/pipelines_table.vue';
+
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
import Store from '~/pipelines/stores/pipelines_store';
import { pipelineWithStages, stageReply, users, mockSearch, branches } from './mock_data';
@@ -49,6 +57,20 @@ describe('Pipelines', () => {
};
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
+ const findByTestId = id => wrapper.find(`[data-testid="${id}"]`);
+ const findNavigationTabs = () => wrapper.find(NavigationTabs);
+ const findNavigationControls = () => wrapper.find(NavigationControls);
+ const findTab = tab => findByTestId(`pipelines-tab-${tab}`);
+
+ const findRunPipelineButton = () => findByTestId('run-pipeline-button');
+ const findCiLintButton = () => findByTestId('ci-lint-button');
+ const findCleanCacheButton = () => findByTestId('clear-cache-button');
+
+ const findEmptyState = () => wrapper.find(EmptyState);
+ const findBlankState = () => wrapper.find(BlankState);
+ const findStagesDropdown = () => wrapper.find('.js-builds-dropdown-button');
+
+ const findTablePagination = () => wrapper.find(TablePagination);
const createComponent = (props = defaultProps, methods) => {
wrapper = mount(PipelinesComponent, {
@@ -87,19 +109,19 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('renders Run Pipeline link', () => {
- expect(wrapper.find('.js-run-pipeline').attributes('href')).toBe(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
});
it('renders CI Lint link', () => {
- expect(wrapper.find('.js-ci-lint').attributes('href')).toBe(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
});
it('renders Clear Runner Cache button', () => {
- expect(wrapper.find('.js-clear-cache').text()).toBe('Clear Runner Caches');
+ expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
});
it('renders pipelines table', () => {
@@ -127,23 +149,31 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('renders Run Pipeline link', () => {
- expect(wrapper.find('.js-run-pipeline').attributes('href')).toEqual(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
});
it('renders CI Lint link', () => {
- expect(wrapper.find('.js-ci-lint').attributes('href')).toEqual(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
});
it('renders Clear Runner Cache button', () => {
- expect(wrapper.find('.js-clear-cache').text()).toEqual('Clear Runner Caches');
+ expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
});
it('renders tab empty state', () => {
- expect(wrapper.find('.empty-state h4').text()).toEqual('There are currently no pipelines.');
+ expect(findBlankState().text()).toBe('There are currently no pipelines.');
+ });
+
+ it('renders tab empty state finished scope', () => {
+ wrapper.vm.scope = 'finished';
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findBlankState().text()).toBe('There are currently no finished pipelines.');
+ });
});
});
@@ -165,18 +195,23 @@ describe('Pipelines', () => {
});
it('renders empty state', () => {
- expect(wrapper.find('.js-empty-state h4').text()).toEqual('Build with confidence');
-
- expect(wrapper.find('.js-get-started-pipelines').attributes('href')).toEqual(
- paths.helpPagePath,
- );
+ expect(
+ findEmptyState()
+ .find('h4')
+ .text(),
+ ).toBe('Build with confidence');
+ expect(
+ findEmptyState()
+ .find(GlButton)
+ .attributes('href'),
+ ).toBe(paths.helpPagePath);
});
it('does not render tabs nor buttons', () => {
- expect(wrapper.find('.js-pipelines-tab-all').exists()).toBeFalsy();
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findTab('all').exists()).toBe(false);
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
});
@@ -189,20 +224,18 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('renders buttons', () => {
- expect(wrapper.find('.js-run-pipeline').attributes('href')).toEqual(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
- expect(wrapper.find('.js-ci-lint').attributes('href')).toEqual(paths.ciLintPath);
- expect(wrapper.find('.js-clear-cache').text()).toEqual('Clear Runner Caches');
+ expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
+ expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
});
it('renders error state', () => {
- expect(wrapper.find('.empty-state').text()).toContain(
- 'There was an error fetching the pipelines.',
- );
+ expect(findBlankState().text()).toContain('There was an error fetching the pipelines.');
});
});
});
@@ -218,13 +251,13 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('does not render buttons', () => {
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
it('renders pipelines table', () => {
@@ -252,17 +285,17 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('does not render buttons', () => {
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
it('renders tab empty state', () => {
- expect(wrapper.find('.empty-state h4').text()).toEqual('There are currently no pipelines.');
+ expect(wrapper.find('.empty-state h4').text()).toBe('There are currently no pipelines.');
});
});
@@ -284,18 +317,22 @@ describe('Pipelines', () => {
});
it('renders empty state without button to set CI', () => {
- expect(wrapper.find('.js-empty-state').text()).toEqual(
+ expect(findEmptyState().text()).toBe(
'This project is not currently set up to run pipelines.',
);
- expect(wrapper.find('.js-get-started-pipelines').exists()).toBeFalsy();
+ expect(
+ findEmptyState()
+ .find(GlButton)
+ .exists(),
+ ).toBeFalsy();
});
it('does not render tabs or buttons', () => {
- expect(wrapper.find('.js-pipelines-tab-all').exists()).toBeFalsy();
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findTab('all').exists()).toBe(false);
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
});
@@ -309,13 +346,13 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('does not renders buttons', () => {
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
it('renders error state', () => {
@@ -342,14 +379,20 @@ describe('Pipelines', () => {
);
});
- it('should render navigation tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
-
- expect(wrapper.find('.js-pipelines-tab-finished').text()).toContain('Finished');
-
- expect(wrapper.find('.js-pipelines-tab-branches').text()).toContain('Branches');
+ it('should set up navigation tabs', () => {
+ expect(findNavigationTabs().props('tabs')).toEqual([
+ { name: 'All', scope: 'all', count: '3', isActive: true },
+ { name: 'Finished', scope: 'finished', count: undefined, isActive: false },
+ { name: 'Branches', scope: 'branches', isActive: false },
+ { name: 'Tags', scope: 'tags', isActive: false },
+ ]);
+ });
- expect(wrapper.find('.js-pipelines-tab-tags').text()).toContain('Tags');
+ it('should render navigation tabs', () => {
+ expect(findTab('all').html()).toContain('All');
+ expect(findTab('finished').text()).toContain('Finished');
+ expect(findTab('branches').text()).toContain('Branches');
+ expect(findTab('tags').text()).toContain('Tags');
});
it('should make an API request when using tabs', () => {
@@ -362,7 +405,7 @@ describe('Pipelines', () => {
);
return waitForPromises().then(() => {
- wrapper.find('.js-pipelines-tab-finished').trigger('click');
+ findTab('finished').trigger('click');
expect(updateContentMock).toHaveBeenCalledWith({ scope: 'finished', page: '1' });
});
@@ -401,133 +444,172 @@ describe('Pipelines', () => {
});
});
- describe('methods', () => {
+ describe('User Interaction', () => {
+ let updateContentMock;
+
beforeEach(() => {
jest.spyOn(window.history, 'pushState').mockImplementation(() => null);
});
- describe('onChangeTab', () => {
- it('should set page to 1', () => {
- const updateContentMock = jest.fn(() => {});
- createComponent(
- { hasGitlabCi: true, canCreatePipeline: true, ...paths },
- {
- updateContent: updateContentMock,
- },
- );
+ beforeEach(() => {
+ mock.onGet(paths.endpoint).reply(200, pipelines);
+ createComponent();
- wrapper.vm.onChangeTab('running');
+ updateContentMock = jest.spyOn(wrapper.vm, 'updateContent');
+
+ return waitForPromises();
+ });
+
+ describe('when user changes tabs', () => {
+ it('should set page to 1', () => {
+ findNavigationTabs().vm.$emit('onChangeTab', 'running');
expect(updateContentMock).toHaveBeenCalledWith({ scope: 'running', page: '1' });
});
});
- describe('onChangePage', () => {
+ describe('when user changes page', () => {
it('should update page and keep scope', () => {
- const updateContentMock = jest.fn(() => {});
- createComponent(
- { hasGitlabCi: true, canCreatePipeline: true, ...paths },
- {
- updateContent: updateContentMock,
- },
- );
-
- wrapper.vm.onChangePage(4);
+ findTablePagination().vm.change(4);
expect(updateContentMock).toHaveBeenCalledWith({ scope: wrapper.vm.scope, page: '4' });
});
});
- });
- describe('computed properties', () => {
- beforeEach(() => {
- createComponent();
- });
+ describe('updates results when a staged is clicked', () => {
+ beforeEach(() => {
+ const copyPipeline = { ...pipelineWithStages };
+ copyPipeline.id += 1;
+ mock
+ .onGet('twitter/flight/pipelines.json')
+ .reply(
+ 200,
+ {
+ pipelines: [pipelineWithStages],
+ count: {
+ all: 1,
+ finished: 1,
+ pending: 0,
+ running: 0,
+ },
+ },
+ {
+ 'POLL-INTERVAL': 100,
+ },
+ )
+ .onGet(pipelineWithStages.details.stages[0].dropdown_path)
+ .reply(200, stageReply);
- describe('tabs', () => {
- it('returns default tabs', () => {
- expect(wrapper.vm.tabs).toEqual([
- { name: 'All', scope: 'all', count: undefined, isActive: true },
- { name: 'Finished', scope: 'finished', count: undefined, isActive: false },
- { name: 'Branches', scope: 'branches', isActive: false },
- { name: 'Tags', scope: 'tags', isActive: false },
- ]);
+ createComponent();
});
- });
- describe('emptyTabMessage', () => {
- it('returns message with finished scope', () => {
- wrapper.vm.scope = 'finished';
+ describe('when a request is being made', () => {
+ it('stops polling, cancels the request, & restarts polling', () => {
+ const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
+ const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
+ const cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
+ mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.emptyTabMessage).toEqual('There are currently no finished pipelines.');
+ return waitForPromises()
+ .then(() => {
+ wrapper.vm.isMakingRequest = true;
+ findStagesDropdown().trigger('click');
+ })
+ .then(() => {
+ expect(cancelMock).toHaveBeenCalled();
+ expect(stopMock).toHaveBeenCalled();
+ expect(restartMock).toHaveBeenCalled();
+ });
});
});
- it('returns message without scope when scope is `all`', () => {
- expect(wrapper.vm.emptyTabMessage).toEqual('There are currently no pipelines.');
+ describe('when no request is being made', () => {
+ it('stops polling & restarts polling', () => {
+ const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
+ const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
+ mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
+
+ return waitForPromises()
+ .then(() => {
+ findStagesDropdown().trigger('click');
+ expect(stopMock).toHaveBeenCalled();
+ })
+ .then(() => {
+ expect(restartMock).toHaveBeenCalled();
+ });
+ });
});
});
+ });
- describe('stateToRender', () => {
- it('returns loading state when the app is loading', () => {
- expect(wrapper.vm.stateToRender).toEqual('loading');
+ describe('Rendered content', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('displays different content', () => {
+ it('shows loading state when the app is loading', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
- it('returns error state when app has error', () => {
+ it('shows error state when app has error', () => {
wrapper.vm.hasError = true;
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('error');
+ expect(findBlankState().props('message')).toBe(
+ 'There was an error fetching the pipelines. Try again in a few moments or contact your support team.',
+ );
});
});
- it('returns table list when app has pipelines', () => {
+ it('shows table list when app has pipelines', () => {
wrapper.vm.isLoading = false;
wrapper.vm.hasError = false;
wrapper.vm.state.pipelines = pipelines.pipelines;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('tableList');
+ expect(wrapper.find(PipelinesTableComponent).exists()).toBe(true);
});
});
- it('returns empty tab when app does not have pipelines but project has pipelines', () => {
+ it('shows empty tab when app does not have pipelines but project has pipelines', () => {
wrapper.vm.state.count.all = 10;
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('emptyTab');
+ expect(findBlankState().exists()).toBe(true);
+ expect(findBlankState().props('message')).toBe('There are currently no pipelines.');
});
});
- it('returns empty tab when project has CI', () => {
+ it('shows empty tab when project has CI', () => {
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('emptyTab');
+ expect(findBlankState().exists()).toBe(true);
+ expect(findBlankState().props('message')).toBe('There are currently no pipelines.');
});
});
- it('returns empty state when project does not have pipelines nor CI', () => {
+ it('shows empty state when project does not have pipelines nor CI', () => {
createComponent({ hasGitlabCi: false, canCreatePipeline: true, ...paths });
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('emptyState');
+ expect(wrapper.find(EmptyState).exists()).toBe(true);
});
});
});
- describe('shouldRenderTabs', () => {
+ describe('displays tabs', () => {
it('returns true when state is loading & has already made the first request', () => {
wrapper.vm.isLoading = true;
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -537,7 +619,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -547,7 +629,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -557,7 +639,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -565,7 +647,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(false);
+ expect(findNavigationTabs().exists()).toBe(false);
});
});
@@ -576,17 +658,17 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(false);
+ expect(findNavigationTabs().exists()).toBe(false);
});
});
});
- describe('shouldRenderButtons', () => {
+ describe('displays buttons', () => {
it('returns true when it has paths & has made the first request', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderButtons).toEqual(true);
+ expect(findNavigationControls().exists()).toBe(true);
});
});
@@ -594,77 +676,12 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderButtons).toEqual(false);
+ expect(findNavigationControls().exists()).toBe(false);
});
});
});
});
- describe('updates results when a staged is clicked', () => {
- beforeEach(() => {
- const copyPipeline = { ...pipelineWithStages };
- copyPipeline.id += 1;
- mock
- .onGet('twitter/flight/pipelines.json')
- .reply(
- 200,
- {
- pipelines: [pipelineWithStages],
- count: {
- all: 1,
- finished: 1,
- pending: 0,
- running: 0,
- },
- },
- {
- 'POLL-INTERVAL': 100,
- },
- )
- .onGet(pipelineWithStages.details.stages[0].dropdown_path)
- .reply(200, stageReply);
-
- createComponent();
- });
-
- describe('when a request is being made', () => {
- it('stops polling, cancels the request, & restarts polling', () => {
- const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
- const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
- const cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
- mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
-
- return waitForPromises()
- .then(() => {
- wrapper.vm.isMakingRequest = true;
- wrapper.find('.js-builds-dropdown-button').trigger('click');
- })
- .then(() => {
- expect(cancelMock).toHaveBeenCalled();
- expect(stopMock).toHaveBeenCalled();
- expect(restartMock).toHaveBeenCalled();
- });
- });
- });
-
- describe('when no request is being made', () => {
- it('stops polling & restarts polling', () => {
- const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
- const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
- mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
-
- return waitForPromises()
- .then(() => {
- wrapper.find('.js-builds-dropdown-button').trigger('click');
- expect(stopMock).toHaveBeenCalled();
- })
- .then(() => {
- expect(restartMock).toHaveBeenCalled();
- });
- });
- });
- });
-
describe('Pipeline filters', () => {
let updateContentMock;
diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
index 9901f476f1b..32d53c0f1f8 100644
--- a/spec/frontend/pipelines/pipelines_table_row_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_row_spec.js
@@ -181,7 +181,9 @@ describe('Pipelines Table Row', () => {
it('should render the provided actions', () => {
expect(wrapper.find('.js-pipelines-retry-button').exists()).toBe(true);
+ expect(wrapper.find('.js-pipelines-retry-button').attributes('title')).toMatch('Retry');
expect(wrapper.find('.js-pipelines-cancel-button').exists()).toBe(true);
+ expect(wrapper.find('.js-pipelines-cancel-button').attributes('title')).toMatch('Cancel');
const dropdownMenu = wrapper.find('.dropdown-menu');
expect(dropdownMenu.text()).toContain(scheduledJobAction.name);
diff --git a/spec/frontend/pipelines/test_reports/mock_data.js b/spec/frontend/pipelines/test_reports/mock_data.js
index 1d03f0b655f..c3ca1429842 100644
--- a/spec/frontend/pipelines/test_reports/mock_data.js
+++ b/spec/frontend/pipelines/test_reports/mock_data.js
@@ -3,10 +3,29 @@ import { TestStatus } from '~/pipelines/constants';
export default [
{
classname: 'spec.test_spec',
+ file: 'spec/trace_spec.rb',
execution_time: 0,
name: 'Test#skipped text',
stack_trace: null,
status: TestStatus.SKIPPED,
system_output: null,
},
+ {
+ classname: 'spec.test_spec',
+ file: 'spec/trace_spec.rb',
+ execution_time: 0,
+ name: 'Test#error text',
+ stack_trace: null,
+ status: TestStatus.ERROR,
+ system_output: null,
+ },
+ {
+ classname: 'spec.test_spec',
+ file: 'spec/trace_spec.rb',
+ execution_time: 0,
+ name: 'Test#unknown text',
+ stack_trace: null,
+ status: TestStatus.UNKNOWN,
+ system_output: null,
+ },
];
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index 2feb6aa5799..838e0606375 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -1,6 +1,7 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { getJSONFixture } from 'helpers/fixtures';
+import { GlButton } from '@gitlab/ui';
import SuiteTable from '~/pipelines/components/test_reports/test_suite_table.vue';
import * as getters from '~/pipelines/stores/test_reports/getters';
import { TestStatus } from '~/pipelines/constants';
@@ -61,18 +62,27 @@ describe('Test reports suite table', () => {
expect(allCaseRows().length).toBe(testCases.length);
});
- it('renders the correct icon for each status', () => {
- const failedTest = testCases.findIndex(x => x.status === TestStatus.FAILED);
- const skippedTest = testCases.findIndex(x => x.status === TestStatus.SKIPPED);
- const successTest = testCases.findIndex(x => x.status === TestStatus.SUCCESS);
+ it.each([
+ TestStatus.ERROR,
+ TestStatus.FAILED,
+ TestStatus.SKIPPED,
+ TestStatus.SUCCESS,
+ 'unknown',
+ ])('renders the correct icon for test case with %s status', status => {
+ const test = testCases.findIndex(x => x.status === status);
+ const row = findCaseRowAtIndex(test);
- const failedRow = findCaseRowAtIndex(failedTest);
- const skippedRow = findCaseRowAtIndex(skippedTest);
- const successRow = findCaseRowAtIndex(successTest);
+ expect(findIconForRow(row, status).exists()).toBe(true);
+ });
+
+ it('renders the file name for the test with a copy button', () => {
+ const { file } = testCases[0];
+ const row = findCaseRowAtIndex(0);
+ const button = row.find(GlButton);
- expect(findIconForRow(failedRow, TestStatus.FAILED).exists()).toBe(true);
- expect(findIconForRow(skippedRow, TestStatus.SKIPPED).exists()).toBe(true);
- expect(findIconForRow(successRow, TestStatus.SUCCESS).exists()).toBe(true);
+ expect(row.text()).toContain(file);
+ expect(button.exists()).toBe(true);
+ expect(button.attributes('data-clipboard-text')).toBe(file);
});
});
});
diff --git a/spec/frontend/project_find_file_spec.js b/spec/frontend/project_find_file_spec.js
index 757a02a04a3..6a50f68a4e9 100644
--- a/spec/frontend/project_find_file_spec.js
+++ b/spec/frontend/project_find_file_spec.js
@@ -1,11 +1,12 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { TEST_HOST } from 'helpers/test_constants';
-import { sanitize } from 'dompurify';
+import { sanitize } from '~/lib/dompurify';
import ProjectFindFile from '~/project_find_file';
import axios from '~/lib/utils/axios_utils';
-jest.mock('dompurify', () => ({
+jest.mock('~/lib/dompurify', () => ({
+ addHook: jest.fn(),
sanitize: jest.fn(val => val),
}));
diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js
new file mode 100644
index 00000000000..ebd4ee45dab
--- /dev/null
+++ b/spec/frontend/projects/commit_box/info/load_branches_spec.js
@@ -0,0 +1,68 @@
+import axios from 'axios';
+import waitForPromises from 'helpers/wait_for_promises';
+import MockAdapter from 'axios-mock-adapter';
+import { loadBranches } from '~/projects/commit_box/info/load_branches';
+
+const mockCommitPath = '/commit/abcd/branches';
+const mockBranchesRes =
+ '<a href="/-/commits/master">master</a><span><a href="/-/commits/my-branch">my-branch</a></span>';
+
+describe('~/projects/commit_box/info/load_branches', () => {
+ let mock;
+ let el;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockCommitPath).reply(200, mockBranchesRes);
+
+ el = document.createElement('div');
+ el.dataset.commitPath = mockCommitPath;
+ el.innerHTML = '<div class="commit-info branches"><span class="spinner"/></div>';
+ });
+
+ it('loads and renders branches info', async () => {
+ loadBranches(el);
+ await waitForPromises();
+
+ expect(el.innerHTML).toBe(`<div class="commit-info branches">${mockBranchesRes}</div>`);
+ });
+
+ it('does not load when no container is provided', async () => {
+ loadBranches(null);
+ await waitForPromises();
+
+ expect(mock.history.get).toHaveLength(0);
+ });
+
+ describe('when braches request returns unsafe content', () => {
+ beforeEach(() => {
+ mock
+ .onGet(mockCommitPath)
+ .reply(200, '<a onload="alert(\'xss!\');" href="/-/commits/master">master</a>');
+ });
+
+ it('displays sanitized html', async () => {
+ loadBranches(el);
+ await waitForPromises();
+
+ expect(el.innerHTML).toBe(
+ '<div class="commit-info branches"><a href="/-/commits/master">master</a></div>',
+ );
+ });
+ });
+
+ describe('when braches request fails', () => {
+ beforeEach(() => {
+ mock.onGet(mockCommitPath).reply(500, 'Error!');
+ });
+
+ it('attempts to load and renders an error', async () => {
+ loadBranches(el);
+ await waitForPromises();
+
+ expect(el.innerHTML).toBe(
+ '<div class="commit-info branches">Failed to load branches. Please try again.</div>',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 455467e7b29..a0fd6012546 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -17,6 +17,7 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
role="button"
diff --git a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
index 692b8f6cf52..4630415f61c 100644
--- a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
@@ -18,6 +18,7 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
role="button"
@@ -84,6 +85,7 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
<template>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-modal-action-cancel"
icon=""
@@ -98,6 +100,7 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
<!---->
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-modal-action-primary"
disabled="true"
diff --git a/spec/frontend/projects/settings/access_dropdown_spec.js b/spec/frontend/projects/settings/access_dropdown_spec.js
index 3b375c5610f..41b9c0c3763 100644
--- a/spec/frontend/projects/settings/access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/access_dropdown_spec.js
@@ -14,6 +14,7 @@ describe('AccessDropdown', () => {
`);
const $dropdown = $('#dummy-dropdown');
$dropdown.data('defaultLabel', defaultLabel);
+ gon.features = { deployKeysOnProtectedBranches: true };
const options = {
$dropdown,
accessLevelsData: {
@@ -37,6 +38,9 @@ describe('AccessDropdown', () => {
{ type: LEVEL_TYPES.GROUP },
{ type: LEVEL_TYPES.GROUP },
{ type: LEVEL_TYPES.GROUP },
+ { type: LEVEL_TYPES.DEPLOY_KEY },
+ { type: LEVEL_TYPES.DEPLOY_KEY },
+ { type: LEVEL_TYPES.DEPLOY_KEY },
];
beforeEach(() => {
@@ -49,7 +53,7 @@ describe('AccessDropdown', () => {
const label = dropdown.toggleLabel();
- expect(label).toBe('1 role, 2 users, 3 groups');
+ expect(label).toBe('1 role, 2 users, 3 deploy keys, 3 groups');
expect($dropdownToggleText).not.toHaveClass('is-default');
});
@@ -122,6 +126,21 @@ describe('AccessDropdown', () => {
expect($dropdownToggleText).not.toHaveClass('is-default');
});
});
+
+ describe('with users and deploy keys', () => {
+ beforeEach(() => {
+ const selectedTypes = [LEVEL_TYPES.DEPLOY_KEY, LEVEL_TYPES.USER];
+ dropdown.setSelectedItems(dummyItems.filter(item => selectedTypes.includes(item.type)));
+ $dropdownToggleText.addClass('is-default');
+ });
+
+ it('displays number of deploy keys', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('2 users, 3 deploy keys');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+ });
});
describe('userRowHtml', () => {
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index 0f3b699f6b2..62aeb4ddee5 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -218,9 +218,7 @@ describe('ServiceDeskRoot', () => {
.$nextTick()
.then(waitForPromises)
.then(() => {
- expect(wrapper.html()).toContain(
- 'An error occurred while saving the template. Please check if the template exists.',
- );
+ expect(wrapper.html()).toContain('An error occured while making the changes:');
});
});
});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 00b1d5cfbe2..5eee22f479e 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -313,7 +313,7 @@ describe('Ref selector component', () => {
findBranchesSection()
.find('[data-testid="section-header"]')
.text(),
- ).toBe('Branches 123');
+ ).toMatchInterpolatedText('Branches 123');
});
it("does not render an error message in the branches section's body", () => {
@@ -392,7 +392,7 @@ describe('Ref selector component', () => {
findTagsSection()
.find('[data-testid="section-header"]')
.text(),
- ).toBe('Tags 456');
+ ).toMatchInterpolatedText('Tags 456');
});
it("does not render an error message in the tags section's body", () => {
@@ -460,7 +460,7 @@ describe('Ref selector component', () => {
findCommitsSection()
.find('[data-testid="section-header"]')
.text(),
- ).toBe('Commits 1');
+ ).toMatchInterpolatedText('Commits 1');
});
it("does not render an error message in the comits section's body", () => {
diff --git a/spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js b/spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js
new file mode 100644
index 00000000000..17821d8be31
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlSprintf } from '@gitlab/ui';
+import component from '~/registry/explorer/components/details_page/partial_cleanup_alert.vue';
+import { DELETE_ALERT_TITLE, DELETE_ALERT_LINK_TEXT } from '~/registry/explorer/constants';
+
+describe('Partial Cleanup alert', () => {
+ let wrapper;
+
+ const findAlert = () => wrapper.find(GlAlert);
+ const findRunLink = () => wrapper.find('[data-testid="run-link"');
+ const findHelpLink = () => wrapper.find('[data-testid="help-link"');
+
+ const mountComponent = () => {
+ wrapper = shallowMount(component, {
+ stubs: { GlSprintf },
+ propsData: {
+ runCleanupPoliciesHelpPagePath: 'foo',
+ cleanupPoliciesHelpPagePath: 'bar',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it(`gl-alert has the correct properties`, () => {
+ mountComponent();
+
+ expect(findAlert().props()).toMatchObject({
+ title: DELETE_ALERT_TITLE,
+ variant: 'warning',
+ });
+ });
+
+ it('has the right text', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toMatchInterpolatedText(DELETE_ALERT_LINK_TEXT);
+ });
+
+ it('contains run link', () => {
+ mountComponent();
+
+ const link = findRunLink();
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toMatchObject({
+ href: 'foo',
+ target: '_blank',
+ });
+ });
+
+ it('contains help link', () => {
+ mountComponent();
+
+ const link = findHelpLink();
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toMatchObject({
+ href: 'bar',
+ target: '_blank',
+ });
+ });
+
+ it('GlAlert dismiss event triggers a dismiss event', () => {
+ mountComponent();
+
+ findAlert().vm.$emit('dismiss');
+ expect(wrapper.emitted('dismiss')).toEqual([[]]);
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
index c5b4b3fa5d8..ce446e6d93e 100644
--- a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
@@ -9,6 +9,8 @@ import {
ROW_SCHEDULED_FOR_DELETION,
LIST_DELETE_BUTTON_DISABLED,
REMOVE_REPOSITORY_LABEL,
+ ASYNC_DELETE_IMAGE_ERROR_MESSAGE,
+ CLEANUP_TIMED_OUT_ERROR_MESSAGE,
} from '~/registry/explorer/constants';
import { RouterLink } from '../../stubs';
import { imagesListResponse } from '../../mock_data';
@@ -21,6 +23,7 @@ describe('Image List Row', () => {
const findTagsCount = () => wrapper.find('[data-testid="tagsCount"]');
const findDeleteBtn = () => wrapper.find(DeleteButton);
const findClipboardButton = () => wrapper.find(ClipboardButton);
+ const findWarningIcon = () => wrapper.find('[data-testid="warning-icon"]');
const mountComponent = props => {
wrapper = shallowMount(Component, {
@@ -74,6 +77,26 @@ describe('Image List Row', () => {
expect(button.props('text')).toBe(item.location);
expect(button.props('title')).toBe(item.location);
});
+
+ describe('warning icon', () => {
+ it.each`
+ failedDelete | cleanup_policy_started_at | shown | title
+ ${true} | ${true} | ${true} | ${ASYNC_DELETE_IMAGE_ERROR_MESSAGE}
+ ${false} | ${true} | ${true} | ${CLEANUP_TIMED_OUT_ERROR_MESSAGE}
+ ${false} | ${false} | ${false} | ${''}
+ `(
+ 'when failedDelete is $failedDelete and cleanup_policy_started_at is $cleanup_policy_started_at',
+ ({ cleanup_policy_started_at, failedDelete, shown, title }) => {
+ mountComponent({ item: { ...item, failedDelete, cleanup_policy_started_at } });
+ const icon = findWarningIcon();
+ expect(icon.exists()).toBe(shown);
+ if (shown) {
+ const tooltip = getBinding(icon.element, 'gl-tooltip');
+ expect(tooltip.value.title).toBe(title);
+ }
+ },
+ );
+ });
});
describe('delete button', () => {
diff --git a/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js b/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
index 7a27f8fa431..3c997093d46 100644
--- a/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlSprintf, GlLink } from '@gitlab/ui';
+import { GlSprintf } from '@gitlab/ui';
import Component from '~/registry/explorer/components/list_page/registry_header.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import {
@@ -19,12 +19,8 @@ describe('registry_header', () => {
const findTitleArea = () => wrapper.find(TitleArea);
const findCommandsSlot = () => wrapper.find('[data-testid="commands-slot"]');
- const findInfoArea = () => wrapper.find('[data-testid="info-area"]');
- const findIntroText = () => wrapper.find('[data-testid="default-intro"]');
const findImagesCountSubHeader = () => wrapper.find('[data-testid="images-count"]');
const findExpirationPolicySubHeader = () => wrapper.find('[data-testid="expiration-policy"]');
- const findDisabledExpirationPolicyMessage = () =>
- wrapper.find('[data-testid="expiration-disabled-message"]');
const mountComponent = (propsData, slots) => {
wrapper = shallowMount(Component, {
@@ -123,44 +119,18 @@ describe('registry_header', () => {
});
});
- describe('info area', () => {
- it('exists', () => {
- mountComponent();
-
- expect(findInfoArea().exists()).toBe(true);
- });
-
+ describe('info messages', () => {
describe('default message', () => {
- beforeEach(() => {
- return mountComponent({ helpPagePath: 'bar' });
- });
-
- it('exists', () => {
- expect(findIntroText().exists()).toBe(true);
- });
-
- it('has the correct copy', () => {
- expect(findIntroText().text()).toMatchInterpolatedText(LIST_INTRO_TEXT);
- });
+ it('is correctly bound to title_area props', () => {
+ mountComponent({ helpPagePath: 'foo' });
- it('has the correct link', () => {
- expect(
- findIntroText()
- .find(GlLink)
- .attributes('href'),
- ).toBe('bar');
+ expect(findTitleArea().props('infoMessages')).toEqual([
+ { text: LIST_INTRO_TEXT, link: 'foo' },
+ ]);
});
});
describe('expiration policy info message', () => {
- describe('when there are no images', () => {
- it('is hidden', () => {
- mountComponent();
-
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(false);
- });
- });
-
describe('when there are images', () => {
describe('when expiration policy is disabled', () => {
beforeEach(() => {
@@ -170,43 +140,27 @@ describe('registry_header', () => {
imagesCount: 1,
});
});
- it('message exist', () => {
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(true);
- });
- it('has the correct copy', () => {
- expect(findDisabledExpirationPolicyMessage().text()).toMatchInterpolatedText(
- EXPIRATION_POLICY_DISABLED_MESSAGE,
- );
- });
- it('has the correct link', () => {
- expect(
- findDisabledExpirationPolicyMessage()
- .find(GlLink)
- .attributes('href'),
- ).toBe('foo');
+ it('the prop is correctly bound', () => {
+ expect(findTitleArea().props('infoMessages')).toEqual([
+ { text: LIST_INTRO_TEXT, link: '' },
+ { text: EXPIRATION_POLICY_DISABLED_MESSAGE, link: 'foo' },
+ ]);
});
});
- describe('when expiration policy is enabled', () => {
+ describe.each`
+ desc | props
+ ${'when there are no images'} | ${{ expirationPolicy: { enabled: false }, imagesCount: 0 }}
+ ${'when expiration policy is enabled'} | ${{ expirationPolicy: { enabled: true }, imagesCount: 1 }}
+ ${'when the expiration policy is completely disabled'} | ${{ expirationPolicy: { enabled: false }, imagesCount: 1, hideExpirationPolicyData: true }}
+ `('$desc', ({ props }) => {
it('message does not exist', () => {
- mountComponent({
- expirationPolicy: { enabled: true },
- imagesCount: 1,
- });
-
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(false);
- });
- });
- describe('when the expiration policy is completely disabled', () => {
- it('message does not exist', () => {
- mountComponent({
- expirationPolicy: { enabled: true },
- imagesCount: 1,
- hideExpirationPolicyData: true,
- });
+ mountComponent(props);
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(false);
+ expect(findTitleArea().props('infoMessages')).toEqual([
+ { text: LIST_INTRO_TEXT, link: '' },
+ ]);
});
});
});
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 66e8a4aea0d..86b52c4f06a 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -3,6 +3,7 @@ import { GlPagination } from '@gitlab/ui';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/details.vue';
import DeleteAlert from '~/registry/explorer/components/details_page/delete_alert.vue';
+import PartialCleanupAlert from '~/registry/explorer/components/details_page/partial_cleanup_alert.vue';
import DetailsHeader from '~/registry/explorer/components/details_page/details_header.vue';
import TagsLoader from '~/registry/explorer/components/details_page/tags_loader.vue';
import TagsList from '~/registry/explorer/components/details_page/tags_list.vue';
@@ -30,8 +31,10 @@ describe('Details Page', () => {
const findDeleteAlert = () => wrapper.find(DeleteAlert);
const findDetailsHeader = () => wrapper.find(DetailsHeader);
const findEmptyTagsState = () => wrapper.find(EmptyTagsState);
+ const findPartialCleanupAlert = () => wrapper.find(PartialCleanupAlert);
- const routeId = window.btoa(JSON.stringify({ name: 'foo', tags_path: 'bar' }));
+ const routeIdGenerator = override =>
+ window.btoa(JSON.stringify({ name: 'foo', tags_path: 'bar', ...override }));
const tagsArrayToSelectedTags = tags =>
tags.reduce((acc, c) => {
@@ -39,7 +42,7 @@ describe('Details Page', () => {
return acc;
}, {});
- const mountComponent = options => {
+ const mountComponent = ({ options, routeParams } = {}) => {
wrapper = shallowMount(component, {
store,
stubs: {
@@ -48,7 +51,7 @@ describe('Details Page', () => {
mocks: {
$route: {
params: {
- id: routeId,
+ id: routeIdGenerator(routeParams),
},
},
},
@@ -224,7 +227,7 @@ describe('Details Page', () => {
findDeleteModal().vm.$emit('confirmDelete');
expect(dispatchSpy).toHaveBeenCalledWith('requestDeleteTag', {
tag: store.state.tags[0],
- params: routeId,
+ params: routeIdGenerator(),
});
});
});
@@ -239,7 +242,7 @@ describe('Details Page', () => {
findDeleteModal().vm.$emit('confirmDelete');
expect(dispatchSpy).toHaveBeenCalledWith('requestDeleteTags', {
ids: store.state.tags.map(t => t.name),
- params: routeId,
+ params: routeIdGenerator(),
});
});
});
@@ -273,11 +276,57 @@ describe('Details Page', () => {
it('has the correct props', () => {
store.commit(SET_INITIAL_STATE, { ...config });
mountComponent({
- data: () => ({
- deleteAlertType,
- }),
+ options: {
+ data: () => ({
+ deleteAlertType,
+ }),
+ },
});
expect(findDeleteAlert().props()).toEqual({ ...config, deleteAlertType });
});
});
+
+ describe('Partial Cleanup Alert', () => {
+ const config = {
+ runCleanupPoliciesHelpPagePath: 'foo',
+ cleanupPoliciesHelpPagePath: 'bar',
+ };
+
+ describe('when expiration_policy_started is not null', () => {
+ const routeParams = { cleanup_policy_started_at: Date.now().toString() };
+
+ it('exists', () => {
+ mountComponent({ routeParams });
+
+ expect(findPartialCleanupAlert().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ store.commit(SET_INITIAL_STATE, { ...config });
+
+ mountComponent({ routeParams });
+
+ expect(findPartialCleanupAlert().props()).toEqual({ ...config });
+ });
+
+ it('dismiss hides the component', async () => {
+ mountComponent({ routeParams });
+
+ expect(findPartialCleanupAlert().exists()).toBe(true);
+ findPartialCleanupAlert().vm.$emit('dismiss');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findPartialCleanupAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('when expiration_policy_started is null', () => {
+ it('the component is hidden', () => {
+ mountComponent();
+
+ expect(findPartialCleanupAlert().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap b/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
deleted file mode 100644
index 11393c89d06..00000000000
--- a/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
+++ /dev/null
@@ -1,7 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Registry Settings App renders 1`] = `
-<div>
- <settings-form-stub />
-</div>
-`;
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
index 9551ee72e51..a784396f47a 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
@@ -1,28 +1,35 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
+import createMockApollo from 'jest/helpers/mock_apollo_helper';
import component from '~/registry/settings/components/registry_settings_app.vue';
+import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.graphql';
import SettingsForm from '~/registry/settings/components/settings_form.vue';
-import { createStore } from '~/registry/settings/store/';
-import { SET_SETTINGS, SET_INITIAL_STATE } from '~/registry/settings/store/mutation_types';
import { FETCH_SETTINGS_ERROR_MESSAGE } from '~/registry/shared/constants';
import {
UNAVAILABLE_FEATURE_INTRO_TEXT,
UNAVAILABLE_USER_FEATURE_TEXT,
} from '~/registry/settings/constants';
-import { stringifiedFormOptions } from '../../shared/mock_data';
+import { expirationPolicyPayload, emptyExpirationPolicyPayload } from '../mock_data';
+
+const localVue = createLocalVue();
describe('Registry Settings App', () => {
let wrapper;
- let store;
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ isAdmin: false,
+ adminSettingsPath: 'settingsPath',
+ enableHistoricEntries: false,
+ };
const findSettingsComponent = () => wrapper.find(SettingsForm);
const findAlert = () => wrapper.find(GlAlert);
- const mountComponent = ({ dispatchMock = 'mockResolvedValue' } = {}) => {
- const dispatchSpy = jest.spyOn(store, 'dispatch');
- dispatchSpy[dispatchMock]();
-
+ const mountComponent = (provide = defaultProvidedValues, config) => {
wrapper = shallowMount(component, {
stubs: {
GlSprintf,
@@ -32,71 +39,72 @@ describe('Registry Settings App', () => {
show: jest.fn(),
},
},
- store,
+ provide,
+ ...config,
});
};
- beforeEach(() => {
- store = createStore();
- });
+ const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[expirationPolicyQuery, resolver]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+ mountComponent(provide, {
+ localVue,
+ apolloProvider: fakeApollo,
+ });
+
+ return requestHandlers.map(request => request[1]);
+ };
afterEach(() => {
wrapper.destroy();
});
- it('renders', () => {
- store.commit(SET_SETTINGS, { foo: 'bar' });
- mountComponent();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('call the store function to load the data on mount', () => {
- mountComponent();
- expect(store.dispatch).toHaveBeenCalledWith('fetchSettings');
- });
+ it('renders the setting form', async () => {
+ const requests = mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyPayload()),
+ });
+ await Promise.all(requests);
- it('renders the setting form', () => {
- store.commit(SET_SETTINGS, { foo: 'bar' });
- mountComponent();
expect(findSettingsComponent().exists()).toBe(true);
});
describe('the form is disabled', () => {
- beforeEach(() => {
- store.commit(SET_SETTINGS, undefined);
+ it('the form is hidden', () => {
mountComponent();
- });
- it('the form is hidden', () => {
expect(findSettingsComponent().exists()).toBe(false);
});
it('shows an alert', () => {
+ mountComponent();
+
const text = findAlert().text();
expect(text).toContain(UNAVAILABLE_FEATURE_INTRO_TEXT);
expect(text).toContain(UNAVAILABLE_USER_FEATURE_TEXT);
});
describe('an admin is visiting the page', () => {
- beforeEach(() => {
- store.commit(SET_INITIAL_STATE, {
- ...stringifiedFormOptions,
- isAdmin: true,
- adminSettingsPath: 'foo',
- });
- });
-
it('shows the admin part of the alert message', () => {
+ mountComponent({ ...defaultProvidedValues, isAdmin: true });
+
const sprintf = findAlert().find(GlSprintf);
expect(sprintf.text()).toBe('administration settings');
- expect(sprintf.find(GlLink).attributes('href')).toBe('foo');
+ expect(sprintf.find(GlLink).attributes('href')).toBe(
+ defaultProvidedValues.adminSettingsPath,
+ );
});
});
});
describe('fetchSettingsError', () => {
beforeEach(() => {
- mountComponent({ dispatchMock: 'mockRejectedValue' });
+ const requests = mountComponentWithApollo({
+ resolver: jest.fn().mockRejectedValue(new Error('GraphQL error')),
+ });
+ return Promise.all(requests);
});
it('the form is hidden', () => {
@@ -107,4 +115,23 @@ describe('Registry Settings App', () => {
expect(findAlert().html()).toContain(FETCH_SETTINGS_ERROR_MESSAGE);
});
});
+
+ describe('empty API response', () => {
+ it.each`
+ enableHistoricEntries | isShown
+ ${true} | ${true}
+ ${false} | ${false}
+ `('is $isShown that the form is shown', async ({ enableHistoricEntries, isShown }) => {
+ const requests = mountComponentWithApollo({
+ provide: {
+ ...defaultProvidedValues,
+ enableHistoricEntries,
+ },
+ resolver: jest.fn().mockResolvedValue(emptyExpirationPolicyPayload()),
+ });
+ await Promise.all(requests);
+
+ expect(findSettingsComponent().exists()).toBe(isShown);
+ });
+ });
});
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
index 6f9518808db..4346cfadcc8 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -1,30 +1,37 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'jest/helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Tracking from '~/tracking';
import component from '~/registry/settings/components/settings_form.vue';
import expirationPolicyFields from '~/registry/shared/components/expiration_policy_fields.vue';
-import { createStore } from '~/registry/settings/store/';
+import updateContainerExpirationPolicyMutation from '~/registry/settings/graphql/mutations/update_container_expiration_policy.graphql';
+import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.graphql';
import {
UPDATE_SETTINGS_ERROR_MESSAGE,
UPDATE_SETTINGS_SUCCESS_MESSAGE,
} from '~/registry/shared/constants';
-import { stringifiedFormOptions } from '../../shared/mock_data';
+import { GlCard, GlLoadingIcon } from '../../shared/stubs';
+import { expirationPolicyPayload, expirationPolicyMutationPayload } from '../mock_data';
+
+const localVue = createLocalVue();
describe('Settings Form', () => {
let wrapper;
- let store;
- let dispatchSpy;
-
- const GlLoadingIcon = { name: 'gl-loading-icon-stub', template: '<svg></svg>' };
- const GlCard = {
- name: 'gl-card-stub',
- template: `
- <div>
- <slot name="header"></slot>
- <slot></slot>
- <slot name="footer"></slot>
- </div>
- `,
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ };
+
+ const {
+ data: {
+ project: { containerExpirationPolicy },
+ },
+ } = expirationPolicyPayload();
+
+ const defaultProps = {
+ value: { ...containerExpirationPolicy },
};
const trackingPayload = {
@@ -35,14 +42,21 @@ describe('Settings Form', () => {
const findFields = () => wrapper.find(expirationPolicyFields);
const findCancelButton = () => wrapper.find({ ref: 'cancel-button' });
const findSaveButton = () => wrapper.find({ ref: 'save-button' });
- const findLoadingIcon = (parent = wrapper) => parent.find(GlLoadingIcon);
- const mountComponent = (data = {}) => {
+ const mountComponent = ({
+ props = defaultProps,
+ data,
+ config,
+ provide = defaultProvidedValues,
+ mocks,
+ } = {}) => {
wrapper = shallowMount(component, {
stubs: {
GlCard,
GlLoadingIcon,
},
+ propsData: { ...props },
+ provide,
data() {
return {
...data,
@@ -52,15 +66,42 @@ describe('Settings Form', () => {
$toast: {
show: jest.fn(),
},
+ ...mocks,
+ },
+ ...config,
+ });
+ };
+
+ const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [
+ [updateContainerExpirationPolicyMutation, resolver],
+ [expirationPolicyQuery, jest.fn().mockResolvedValue(expirationPolicyPayload())],
+ ];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ fakeApollo.defaultClient.cache.writeQuery({
+ query: expirationPolicyQuery,
+ variables: {
+ projectPath: provide.projectPath,
},
- store,
+ ...expirationPolicyPayload(),
});
+
+ mountComponent({
+ provide,
+ config: {
+ localVue,
+ apolloProvider: fakeApollo,
+ },
+ });
+
+ return requestHandlers.map(resolvers => resolvers[1]);
};
beforeEach(() => {
- store = createStore();
- store.dispatch('setInitialState', stringifiedFormOptions);
- dispatchSpy = jest.spyOn(store, 'dispatch');
jest.spyOn(Tracking, 'event');
});
@@ -72,32 +113,36 @@ describe('Settings Form', () => {
it('v-model change update the settings property', () => {
mountComponent();
findFields().vm.$emit('input', { newValue: 'foo' });
- expect(dispatchSpy).toHaveBeenCalledWith('updateSettings', { settings: 'foo' });
+ expect(wrapper.emitted('input')).toEqual([['foo']]);
});
it('v-model change update the api error property', () => {
const apiErrors = { baz: 'bar' };
- mountComponent({ apiErrors });
+ mountComponent({ data: { apiErrors } });
expect(findFields().props('apiErrors')).toEqual(apiErrors);
findFields().vm.$emit('input', { newValue: 'foo', modified: 'baz' });
expect(findFields().props('apiErrors')).toEqual({});
});
- });
- describe('form', () => {
- let form;
- beforeEach(() => {
- mountComponent();
- form = findForm();
- dispatchSpy.mockReturnValue();
+ it('shows the default option when none are selected', () => {
+ mountComponent({ props: { value: {} } });
+ expect(findFields().props('value')).toEqual({
+ cadence: 'EVERY_DAY',
+ keepN: 'TEN_TAGS',
+ olderThan: 'NINETY_DAYS',
+ });
});
+ });
+ describe('form', () => {
describe('form reset event', () => {
beforeEach(() => {
- form.trigger('reset');
+ mountComponent();
+
+ findForm().trigger('reset');
});
it('calls the appropriate function', () => {
- expect(dispatchSpy).toHaveBeenCalledWith('resetSettings');
+ expect(wrapper.emitted('reset')).toEqual([[]]);
});
it('tracks the reset event', () => {
@@ -108,54 +153,96 @@ describe('Settings Form', () => {
describe('form submit event ', () => {
it('save has type submit', () => {
mountComponent();
+
expect(findSaveButton().attributes('type')).toBe('submit');
});
- it('dispatches the saveSettings action', () => {
- dispatchSpy.mockResolvedValue();
- form.trigger('submit');
- expect(dispatchSpy).toHaveBeenCalledWith('saveSettings');
+ it('dispatches the correct apollo mutation', async () => {
+ const [expirationPolicyMutationResolver] = mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+ await expirationPolicyMutationResolver();
+ expect(expirationPolicyMutationResolver).toHaveBeenCalled();
});
it('tracks the submit event', () => {
- dispatchSpy.mockResolvedValue();
- form.trigger('submit');
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'submit_form', trackingPayload);
});
it('show a success toast when submit succeed', async () => {
- dispatchSpy.mockResolvedValue();
- form.trigger('submit');
- await waitForPromises();
+ const handlers = mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+ await Promise.all(handlers);
+ await wrapper.vm.$nextTick();
+
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
type: 'success',
});
});
describe('when submit fails', () => {
- it('shows an error', async () => {
- dispatchSpy.mockRejectedValue({ response: {} });
- form.trigger('submit');
- await waitForPromises();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
- type: 'error',
+ describe('user recoverable errors', () => {
+ it('when there is an error is shown in a toast', async () => {
+ const handlers = mountComponentWithApollo({
+ resolver: jest
+ .fn()
+ .mockResolvedValue(expirationPolicyMutationPayload({ errors: ['foo'] })),
+ });
+
+ findForm().trigger('submit');
+ await Promise.all(handlers);
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('foo', {
+ type: 'error',
+ });
});
});
+ describe('global errors', () => {
+ it('shows an error', async () => {
+ const handlers = mountComponentWithApollo({
+ resolver: jest.fn().mockRejectedValue(expirationPolicyMutationPayload()),
+ });
- it('parses the error messages', async () => {
- dispatchSpy.mockRejectedValue({
- response: {
- data: {
- message: {
- foo: 'bar',
- 'container_expiration_policy.name': ['baz'],
+ findForm().trigger('submit');
+ await Promise.all(handlers);
+ await wrapper.vm.$nextTick();
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
+ type: 'error',
+ });
+ });
+
+ it('parses the error messages', async () => {
+ const mutate = jest.fn().mockRejectedValue({
+ graphQLErrors: [
+ {
+ extensions: {
+ problems: [{ path: ['name'], message: 'baz' }],
+ },
},
- },
- },
+ ],
+ });
+ mountComponent({ mocks: { $apollo: { mutate } } });
+
+ findForm().trigger('submit');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+
+ expect(findFields().props('apiErrors')).toEqual({ name: 'baz' });
});
- form.trigger('submit');
- await waitForPromises();
- expect(findFields().props('apiErrors')).toEqual({ name: 'baz' });
});
});
});
@@ -163,51 +250,78 @@ describe('Settings Form', () => {
describe('form actions', () => {
describe('cancel button', () => {
- beforeEach(() => {
- store.commit('SET_SETTINGS', { foo: 'bar' });
+ it('has type reset', () => {
mountComponent();
- });
- it('has type reset', () => {
expect(findCancelButton().attributes('type')).toBe('reset');
});
- it('is disabled when isEdited is false', () =>
- wrapper.vm.$nextTick().then(() => {
- expect(findCancelButton().attributes('disabled')).toBe('true');
- }));
-
- it('is disabled isLoading is true', () => {
- store.commit('TOGGLE_LOADING');
- store.commit('UPDATE_SETTINGS', { settings: { foo: 'baz' } });
- return wrapper.vm.$nextTick().then(() => {
- expect(findCancelButton().attributes('disabled')).toBe('true');
- store.commit('TOGGLE_LOADING');
- });
- });
+ it.each`
+ isLoading | isEdited | mutationLoading | isDisabled
+ ${true} | ${true} | ${true} | ${true}
+ ${false} | ${true} | ${true} | ${true}
+ ${false} | ${false} | ${true} | ${true}
+ ${true} | ${false} | ${false} | ${true}
+ ${false} | ${false} | ${false} | ${true}
+ ${false} | ${true} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and isEdited is $isEdited and mutationLoading is $mutationLoading is $isDisabled that the is disabled',
+ ({ isEdited, isLoading, mutationLoading, isDisabled }) => {
+ mountComponent({
+ props: { ...defaultProps, isEdited, isLoading },
+ data: { mutationLoading },
+ });
- it('is enabled when isLoading is false and isEdited is true', () => {
- store.commit('UPDATE_SETTINGS', { settings: { foo: 'baz' } });
- return wrapper.vm.$nextTick().then(() => {
- expect(findCancelButton().attributes('disabled')).toBe(undefined);
- });
- });
+ const expectation = isDisabled ? 'true' : undefined;
+ expect(findCancelButton().attributes('disabled')).toBe(expectation);
+ },
+ );
});
- describe('when isLoading is true', () => {
- beforeEach(() => {
- store.commit('TOGGLE_LOADING');
+ describe('submit button', () => {
+ it('has type submit', () => {
mountComponent();
- });
- afterEach(() => {
- store.commit('TOGGLE_LOADING');
- });
- it('submit button is disabled and shows a spinner', () => {
- const button = findSaveButton();
- expect(button.attributes('disabled')).toBeTruthy();
- expect(findLoadingIcon(button).exists()).toBe(true);
+ expect(findSaveButton().attributes('type')).toBe('submit');
});
+ it.each`
+ isLoading | fieldsAreValid | mutationLoading | isDisabled
+ ${true} | ${true} | ${true} | ${true}
+ ${false} | ${true} | ${true} | ${true}
+ ${false} | ${false} | ${true} | ${true}
+ ${true} | ${false} | ${false} | ${true}
+ ${false} | ${false} | ${false} | ${true}
+ ${false} | ${true} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and fieldsAreValid is $fieldsAreValid and mutationLoading is $mutationLoading is $isDisabled that the is disabled',
+ ({ fieldsAreValid, isLoading, mutationLoading, isDisabled }) => {
+ mountComponent({
+ props: { ...defaultProps, isLoading },
+ data: { mutationLoading, fieldsAreValid },
+ });
+
+ const expectation = isDisabled ? 'true' : undefined;
+ expect(findSaveButton().attributes('disabled')).toBe(expectation);
+ },
+ );
+
+ it.each`
+ isLoading | mutationLoading | showLoading
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and mutationLoading is $mutationLoading is $showLoading that the loading icon is shown',
+ ({ isLoading, mutationLoading, showLoading }) => {
+ mountComponent({
+ props: { ...defaultProps, isLoading },
+ data: { mutationLoading },
+ });
+
+ expect(findSaveButton().props('loading')).toBe(showLoading);
+ },
+ );
});
});
});
diff --git a/spec/frontend/registry/settings/graphql/cache_updated_spec.js b/spec/frontend/registry/settings/graphql/cache_updated_spec.js
new file mode 100644
index 00000000000..e5f69a08285
--- /dev/null
+++ b/spec/frontend/registry/settings/graphql/cache_updated_spec.js
@@ -0,0 +1,56 @@
+import { updateContainerExpirationPolicy } from '~/registry/settings/graphql/utils/cache_update';
+import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.graphql';
+
+describe('Registry settings cache update', () => {
+ let client;
+
+ const payload = {
+ data: {
+ updateContainerExpirationPolicy: {
+ containerExpirationPolicy: {
+ enabled: true,
+ },
+ },
+ },
+ };
+
+ const cacheMock = {
+ project: {
+ containerExpirationPolicy: {
+ enabled: false,
+ },
+ },
+ };
+
+ const queryAndVariables = {
+ query: expirationPolicyQuery,
+ variables: { projectPath: 'foo' },
+ };
+
+ beforeEach(() => {
+ client = {
+ readQuery: jest.fn().mockReturnValue(cacheMock),
+ writeQuery: jest.fn(),
+ };
+ });
+ describe('Registry settings cache update', () => {
+ it('calls readQuery', () => {
+ updateContainerExpirationPolicy('foo')(client, payload);
+ expect(client.readQuery).toHaveBeenCalledWith(queryAndVariables);
+ });
+
+ it('writes the correct result in the cache', () => {
+ updateContainerExpirationPolicy('foo')(client, payload);
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ ...queryAndVariables,
+ data: {
+ project: {
+ containerExpirationPolicy: {
+ enabled: true,
+ },
+ },
+ },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/mock_data.js b/spec/frontend/registry/settings/mock_data.js
new file mode 100644
index 00000000000..7f3772ce7fe
--- /dev/null
+++ b/spec/frontend/registry/settings/mock_data.js
@@ -0,0 +1,40 @@
+export const expirationPolicyPayload = override => ({
+ data: {
+ project: {
+ containerExpirationPolicy: {
+ cadence: 'EVERY_DAY',
+ enabled: true,
+ keepN: 'TEN_TAGS',
+ nameRegex: 'asdasdssssdfdf',
+ nameRegexKeep: 'sss',
+ olderThan: 'FOURTEEN_DAYS',
+ ...override,
+ },
+ },
+ },
+});
+
+export const emptyExpirationPolicyPayload = () => ({
+ data: {
+ project: {
+ containerExpirationPolicy: {},
+ },
+ },
+});
+
+export const expirationPolicyMutationPayload = ({ override, errors = [] } = {}) => ({
+ data: {
+ updateContainerExpirationPolicy: {
+ containerExpirationPolicy: {
+ cadence: 'EVERY_DAY',
+ enabled: true,
+ keepN: 'TEN_TAGS',
+ nameRegex: 'asdasdssssdfdf',
+ nameRegexKeep: 'sss',
+ olderThan: 'FOURTEEN_DAYS',
+ ...override,
+ },
+ errors,
+ },
+ },
+});
diff --git a/spec/frontend/registry/settings/store/actions_spec.js b/spec/frontend/registry/settings/store/actions_spec.js
deleted file mode 100644
index 51b89f96ef2..00000000000
--- a/spec/frontend/registry/settings/store/actions_spec.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import testAction from 'helpers/vuex_action_helper';
-import Api from '~/api';
-import * as actions from '~/registry/settings/store/actions';
-import * as types from '~/registry/settings/store/mutation_types';
-
-describe('Actions Registry Store', () => {
- describe.each`
- actionName | mutationName | payload
- ${'setInitialState'} | ${types.SET_INITIAL_STATE} | ${'foo'}
- ${'updateSettings'} | ${types.UPDATE_SETTINGS} | ${'foo'}
- ${'toggleLoading'} | ${types.TOGGLE_LOADING} | ${undefined}
- ${'resetSettings'} | ${types.RESET_SETTINGS} | ${undefined}
- `(
- '$actionName invokes $mutationName with payload $payload',
- ({ actionName, mutationName, payload }) => {
- it('should set state', done => {
- testAction(actions[actionName], payload, {}, [{ type: mutationName, payload }], [], done);
- });
- },
- );
-
- describe('receiveSettingsSuccess', () => {
- it('calls SET_SETTINGS', () => {
- testAction(
- actions.receiveSettingsSuccess,
- 'foo',
- {},
- [{ type: types.SET_SETTINGS, payload: 'foo' }],
- [],
- );
- });
- });
-
- describe('fetchSettings', () => {
- const state = {
- projectId: 'bar',
- };
-
- const payload = {
- data: {
- container_expiration_policy: 'foo',
- },
- };
-
- it('should fetch the data from the API', done => {
- Api.project = jest.fn().mockResolvedValue(payload);
- testAction(
- actions.fetchSettings,
- null,
- state,
- [],
- [
- { type: 'toggleLoading' },
- { type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
- { type: 'toggleLoading' },
- ],
- done,
- );
- });
- });
-
- describe('saveSettings', () => {
- const state = {
- projectId: 'bar',
- settings: 'baz',
- };
-
- const payload = {
- data: {
- tag_expiration_policies: 'foo',
- },
- };
-
- it('should fetch the data from the API', done => {
- Api.updateProject = jest.fn().mockResolvedValue(payload);
- testAction(
- actions.saveSettings,
- null,
- state,
- [],
- [
- { type: 'toggleLoading' },
- { type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
- { type: 'toggleLoading' },
- ],
- done,
- );
- });
- });
-});
diff --git a/spec/frontend/registry/settings/store/getters_spec.js b/spec/frontend/registry/settings/store/getters_spec.js
deleted file mode 100644
index b781d09466c..00000000000
--- a/spec/frontend/registry/settings/store/getters_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import * as getters from '~/registry/settings/store/getters';
-import * as utils from '~/registry/shared/utils';
-import { formOptions } from '../../shared/mock_data';
-
-describe('Getters registry settings store', () => {
- const settings = {
- enabled: true,
- cadence: 'foo',
- keep_n: 'bar',
- older_than: 'baz',
- name_regex: 'name-foo',
- name_regex_keep: 'name-keep-bar',
- };
-
- describe.each`
- getter | variable | formOption
- ${'getCadence'} | ${'cadence'} | ${'cadence'}
- ${'getKeepN'} | ${'keep_n'} | ${'keepN'}
- ${'getOlderThan'} | ${'older_than'} | ${'olderThan'}
- `('Options getter', ({ getter, variable, formOption }) => {
- beforeEach(() => {
- utils.findDefaultOption = jest.fn();
- });
-
- it(`${getter} returns ${variable} when ${variable} exists in settings`, () => {
- expect(getters[getter]({ settings })).toBe(settings[variable]);
- });
-
- it(`${getter} calls findDefaultOption when ${variable} does not exists in settings`, () => {
- getters[getter]({ settings: {}, formOptions });
- expect(utils.findDefaultOption).toHaveBeenCalledWith(formOptions[formOption]);
- });
- });
-
- describe('getSettings', () => {
- it('returns the content of settings', () => {
- const computedGetters = {
- getCadence: settings.cadence,
- getOlderThan: settings.older_than,
- getKeepN: settings.keep_n,
- };
- expect(getters.getSettings({ settings }, computedGetters)).toEqual(settings);
- });
- });
-
- describe('getIsEdited', () => {
- it('returns false when original is equal to settings', () => {
- const same = { foo: 'bar' };
- expect(getters.getIsEdited({ original: same, settings: same })).toBe(false);
- });
-
- it('returns true when original is different from settings', () => {
- expect(getters.getIsEdited({ original: { foo: 'bar' }, settings: { foo: 'baz' } })).toBe(
- true,
- );
- });
- });
-
- describe('getIsDisabled', () => {
- it.each`
- original | enableHistoricEntries | result
- ${undefined} | ${false} | ${true}
- ${{ foo: 'bar' }} | ${undefined} | ${false}
- ${{}} | ${false} | ${false}
- `(
- 'returns $result when original is $original and enableHistoricEntries is $enableHistoricEntries',
- ({ original, enableHistoricEntries, result }) => {
- expect(getters.getIsDisabled({ original, enableHistoricEntries })).toBe(result);
- },
- );
- });
-});
diff --git a/spec/frontend/registry/settings/store/mutations_spec.js b/spec/frontend/registry/settings/store/mutations_spec.js
deleted file mode 100644
index 1d85e38eb36..00000000000
--- a/spec/frontend/registry/settings/store/mutations_spec.js
+++ /dev/null
@@ -1,80 +0,0 @@
-import mutations from '~/registry/settings/store/mutations';
-import * as types from '~/registry/settings/store/mutation_types';
-import createState from '~/registry/settings/store/state';
-import { formOptions, stringifiedFormOptions } from '../../shared/mock_data';
-
-describe('Mutations Registry Store', () => {
- let mockState;
-
- beforeEach(() => {
- mockState = createState();
- });
-
- describe('SET_INITIAL_STATE', () => {
- it('should set the initial state', () => {
- const payload = {
- projectId: 'foo',
- enableHistoricEntries: false,
- adminSettingsPath: 'foo',
- isAdmin: true,
- };
- const expectedState = { ...mockState, ...payload, formOptions };
- mutations[types.SET_INITIAL_STATE](mockState, {
- ...payload,
- ...stringifiedFormOptions,
- });
-
- expect(mockState).toEqual(expectedState);
- });
- });
-
- describe('UPDATE_SETTINGS', () => {
- it('should update the settings', () => {
- mockState.settings = { foo: 'bar' };
- const payload = { foo: 'baz' };
- const expectedState = { ...mockState, settings: payload };
- mutations[types.UPDATE_SETTINGS](mockState, { settings: payload });
- expect(mockState.settings).toEqual(expectedState.settings);
- });
- });
-
- describe('SET_SETTINGS', () => {
- it('should set the settings and original', () => {
- const payload = { foo: 'baz' };
- const expectedState = { ...mockState, settings: payload };
- mutations[types.SET_SETTINGS](mockState, payload);
- expect(mockState.settings).toEqual(expectedState.settings);
- expect(mockState.original).toEqual(expectedState.settings);
- });
-
- it('should keep the default state when settings is not present', () => {
- const originalSettings = { ...mockState.settings };
- mutations[types.SET_SETTINGS](mockState);
- expect(mockState.settings).toEqual(originalSettings);
- expect(mockState.original).toEqual(undefined);
- });
- });
-
- describe('RESET_SETTINGS', () => {
- it('should copy original over settings', () => {
- mockState.settings = { foo: 'bar' };
- mockState.original = { foo: 'baz' };
- mutations[types.RESET_SETTINGS](mockState);
- expect(mockState.settings).toEqual(mockState.original);
- });
-
- it('if original is undefined it should initialize to empty object', () => {
- mockState.settings = { foo: 'bar' };
- mockState.original = undefined;
- mutations[types.RESET_SETTINGS](mockState);
- expect(mockState.settings).toEqual({});
- });
- });
-
- describe('TOGGLE_LOADING', () => {
- it('should toggle the loading', () => {
- mutations[types.TOGGLE_LOADING](mockState);
- expect(mockState.isLoading).toEqual(true);
- });
- });
-});
diff --git a/spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap b/spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap
new file mode 100644
index 00000000000..032007bba51
--- /dev/null
+++ b/spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap
@@ -0,0 +1,101 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Utils formOptionsGenerator returns an object containing cadence 1`] = `
+Array [
+ Object {
+ "default": true,
+ "key": "EVERY_DAY",
+ "label": "Every day",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_WEEK",
+ "label": "Every week",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_TWO_WEEKS",
+ "label": "Every two weeks",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_MONTH",
+ "label": "Every month",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_THREE_MONTHS",
+ "label": "Every three months",
+ },
+]
+`;
+
+exports[`Utils formOptionsGenerator returns an object containing keepN 1`] = `
+Array [
+ Object {
+ "default": false,
+ "key": "ONE_TAG",
+ "label": "1 tag per image name",
+ "variable": 1,
+ },
+ Object {
+ "default": false,
+ "key": "FIVE_TAGS",
+ "label": "5 tags per image name",
+ "variable": 5,
+ },
+ Object {
+ "default": true,
+ "key": "TEN_TAGS",
+ "label": "10 tags per image name",
+ "variable": 10,
+ },
+ Object {
+ "default": false,
+ "key": "TWENTY_FIVE_TAGS",
+ "label": "25 tags per image name",
+ "variable": 25,
+ },
+ Object {
+ "default": false,
+ "key": "FIFTY_TAGS",
+ "label": "50 tags per image name",
+ "variable": 50,
+ },
+ Object {
+ "default": false,
+ "key": "ONE_HUNDRED_TAGS",
+ "label": "100 tags per image name",
+ "variable": 100,
+ },
+]
+`;
+
+exports[`Utils formOptionsGenerator returns an object containing olderThan 1`] = `
+Array [
+ Object {
+ "default": false,
+ "key": "SEVEN_DAYS",
+ "label": "7 days until tags are automatically removed",
+ "variable": 7,
+ },
+ Object {
+ "default": false,
+ "key": "FOURTEEN_DAYS",
+ "label": "14 days until tags are automatically removed",
+ "variable": 14,
+ },
+ Object {
+ "default": false,
+ "key": "THIRTY_DAYS",
+ "label": "30 days until tags are automatically removed",
+ "variable": 30,
+ },
+ Object {
+ "default": true,
+ "key": "NINETY_DAYS",
+ "label": "90 days until tags are automatically removed",
+ "variable": 90,
+ },
+]
+`;
diff --git a/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js b/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
index ee765ffd1c0..bee9bca5369 100644
--- a/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
+++ b/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
@@ -40,13 +40,13 @@ describe('Expiration Policy Form', () => {
});
describe.each`
- elementName | modelName | value | disabledByToggle
- ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
- ${'interval'} | ${'older_than'} | ${'foo'} | ${'disabled'}
- ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
- ${'latest'} | ${'keep_n'} | ${'foo'} | ${'disabled'}
- ${'name-matching'} | ${'name_regex'} | ${'foo'} | ${'disabled'}
- ${'keep-name'} | ${'name_regex_keep'} | ${'bar'} | ${'disabled'}
+ elementName | modelName | value | disabledByToggle
+ ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
+ ${'interval'} | ${'olderThan'} | ${'foo'} | ${'disabled'}
+ ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
+ ${'latest'} | ${'keepN'} | ${'foo'} | ${'disabled'}
+ ${'name-matching'} | ${'nameRegex'} | ${'foo'} | ${'disabled'}
+ ${'keep-name'} | ${'nameRegexKeep'} | ${'bar'} | ${'disabled'}
`(
`${FORM_ELEMENTS_ID_PREFIX}-$elementName form element`,
({ elementName, modelName, value, disabledByToggle }) => {
@@ -128,9 +128,9 @@ describe('Expiration Policy Form', () => {
});
describe.each`
- modelName | elementName
- ${'name_regex'} | ${'name-matching'}
- ${'name_regex_keep'} | ${'keep-name'}
+ modelName | elementName
+ ${'nameRegex'} | ${'name-matching'}
+ ${'nameRegexKeep'} | ${'keep-name'}
`('regex textarea validation', ({ modelName, elementName }) => {
const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
diff --git a/spec/frontend/registry/shared/stubs.js b/spec/frontend/registry/shared/stubs.js
new file mode 100644
index 00000000000..f6b88d70e49
--- /dev/null
+++ b/spec/frontend/registry/shared/stubs.js
@@ -0,0 +1,11 @@
+export const GlLoadingIcon = { name: 'gl-loading-icon-stub', template: '<svg></svg>' };
+export const GlCard = {
+ name: 'gl-card-stub',
+ template: `
+<div>
+ <slot name="header"></slot>
+ <slot></slot>
+ <slot name="footer"></slot>
+</div>
+`,
+};
diff --git a/spec/frontend/registry/shared/utils_spec.js b/spec/frontend/registry/shared/utils_spec.js
new file mode 100644
index 00000000000..edb0c3261be
--- /dev/null
+++ b/spec/frontend/registry/shared/utils_spec.js
@@ -0,0 +1,37 @@
+import {
+ formOptionsGenerator,
+ optionLabelGenerator,
+ olderThanTranslationGenerator,
+} from '~/registry/shared/utils';
+
+describe('Utils', () => {
+ describe('optionLabelGenerator', () => {
+ it('returns an array with a set label', () => {
+ const result = optionLabelGenerator(
+ [{ variable: 1 }, { variable: 2 }],
+ olderThanTranslationGenerator,
+ );
+ expect(result).toEqual([
+ { variable: 1, label: '1 day until tags are automatically removed' },
+ { variable: 2, label: '2 days until tags are automatically removed' },
+ ]);
+ });
+ });
+
+ describe('formOptionsGenerator', () => {
+ it('returns an object containing olderThan', () => {
+ expect(formOptionsGenerator().olderThan).toBeDefined();
+ expect(formOptionsGenerator().olderThan).toMatchSnapshot();
+ });
+
+ it('returns an object containing cadence', () => {
+ expect(formOptionsGenerator().cadence).toBeDefined();
+ expect(formOptionsGenerator().cadence).toMatchSnapshot();
+ });
+
+ it('returns an object containing keepN', () => {
+ expect(formOptionsGenerator().keepN).toBeDefined();
+ expect(formOptionsGenerator().keepN).toMatchSnapshot();
+ });
+ });
+});
diff --git a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
index 1b938c93df8..db33a9cdce1 100644
--- a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
@@ -19,9 +19,8 @@ describe('RelatedMergeRequests', () => {
mockData = getJSONFixture(FIXTURE_PATH);
// put the fixture in DOM as the component expects
- document.body.innerHTML = `<div id="js-issuable-app-initial-data">${JSON.stringify(
- mockData,
- )}</div>`;
+ document.body.innerHTML = `<div id="js-issuable-app"></div>`;
+ document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(mockData);
mock = new MockAdapter(axios);
mock.onGet(`${API_ENDPOINT}?per_page=100`).reply(200, mockData, { 'x-total': 2 });
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index f56e296d106..25c108e45bc 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -1,113 +1,245 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`releases/util.js convertGraphQLResponse matches snapshot 1`] = `
+exports[`releases/util.js convertAllReleasesGraphQLResponse matches snapshot 1`] = `
Object {
"data": Array [
Object {
"_links": Object {
- "editUrl": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10/edit",
- "issuesUrl": null,
- "mergeRequestsUrl": null,
- "self": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10",
- "selfUrl": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10",
+ "editUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/edit",
+ "issuesUrl": "http://localhost/releases-namespace/releases-project/-/issues?release_tag=v1.1&scope=all&state=opened",
+ "mergeRequestsUrl": "http://localhost/releases-namespace/releases-project/-/merge_requests?release_tag=v1.1&scope=all&state=opened",
+ "self": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
+ "selfUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
},
"assets": Object {
- "count": 7,
+ "count": 8,
"links": Array [
Object {
- "directAssetUrl": "http://0.0.0.0:3000/root/release-test/-/releases/v5.32/permanent/path/to/runbook",
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-3",
"external": true,
- "id": "gid://gitlab/Releases::Link/69",
- "linkType": "other",
- "name": "An example link",
- "url": "https://example.com/link",
+ "id": "gid://gitlab/Releases::Link/13",
+ "linkType": "image",
+ "name": "Image",
+ "url": "https://example.com/image",
},
Object {
- "directAssetUrl": "https://example.com/package",
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-2",
"external": true,
- "id": "gid://gitlab/Releases::Link/68",
+ "id": "gid://gitlab/Releases::Link/12",
"linkType": "package",
- "name": "An example package link",
+ "name": "Package",
"url": "https://example.com/package",
},
Object {
- "directAssetUrl": "https://example.com/image",
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-1",
+ "external": false,
+ "id": "gid://gitlab/Releases::Link/11",
+ "linkType": "runbook",
+ "name": "Runbook",
+ "url": "http://localhost/releases-namespace/releases-project/runbook",
+ },
+ Object {
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/linux-amd64",
"external": true,
- "id": "gid://gitlab/Releases::Link/67",
- "linkType": "image",
- "name": "An example image",
- "url": "https://example.com/image",
+ "id": "gid://gitlab/Releases::Link/10",
+ "linkType": "other",
+ "name": "linux-amd64 binaries",
+ "url": "https://downloads.example.com/bin/gitlab-linux-amd64",
},
],
"sources": Array [
Object {
"format": "zip",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.zip",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.zip",
},
Object {
"format": "tar.gz",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.gz",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.gz",
},
Object {
"format": "tar.bz2",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.bz2",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.bz2",
},
Object {
"format": "tar",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar",
},
],
},
"author": Object {
- "avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png",
- "username": "root",
- "webUrl": "http://0.0.0.0:3000/root",
+ "avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon",
+ "username": "administrator",
+ "webUrl": "http://localhost/administrator",
},
"commit": Object {
- "shortId": "92e7ea2e",
- "title": "Testing a change.",
+ "shortId": "b83d6e39",
+ "title": "Merge branch 'branch-merged' into 'master'",
},
- "commitPath": "http://0.0.0.0:3000/root/release-test/-/commit/92e7ea2ee4496fe0d00ff69830ba0564d3d1e5a7",
- "descriptionHtml": "<p data-sourcepos=\\"1:1-1:24\\" dir=\\"auto\\">This is version <strong>1.0</strong>!</p>",
+ "commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
+ "descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
"evidences": Array [
Object {
- "collectedAt": "2020-08-21T20:15:19Z",
- "filepath": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10/evidences/34.json",
- "sha": "22bde8e8b93d870a29ddc339287a1fbb598f45d1396d",
+ "collectedAt": "2018-12-03T00:00:00Z",
+ "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
+ "sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
},
],
"milestones": Array [
Object {
- "description": "",
- "id": "gid://gitlab/Milestone/60",
+ "description": "The 12.4 milestone",
+ "id": "gid://gitlab/Milestone/124",
"issueStats": Object {
- "closed": 0,
- "total": 0,
+ "closed": 1,
+ "total": 4,
},
"stats": undefined,
"title": "12.4",
"webPath": undefined,
- "webUrl": "/root/release-test/-/milestones/2",
+ "webUrl": "/releases-namespace/releases-project/-/milestones/2",
},
Object {
- "description": "Milestone 12.3",
- "id": "gid://gitlab/Milestone/59",
+ "description": "The 12.3 milestone",
+ "id": "gid://gitlab/Milestone/123",
"issueStats": Object {
- "closed": 1,
- "total": 2,
+ "closed": 3,
+ "total": 5,
},
"stats": undefined,
"title": "12.3",
"webPath": undefined,
- "webUrl": "/root/release-test/-/milestones/1",
+ "webUrl": "/releases-namespace/releases-project/-/milestones/1",
},
],
- "name": "Release 1.0",
- "releasedAt": "2020-08-21T20:15:18Z",
- "tagName": "v5.10",
- "tagPath": "/root/release-test/-/tags/v5.10",
- "upcomingRelease": false,
+ "name": "The first release",
+ "releasedAt": "2018-12-10T00:00:00Z",
+ "tagName": "v1.1",
+ "tagPath": "/releases-namespace/releases-project/-/tags/v1.1",
+ "upcomingRelease": true,
},
],
+ "paginationInfo": Object {
+ "endCursor": "eyJpZCI6IjEifQ",
+ "hasNextPage": false,
+ "hasPreviousPage": false,
+ "startCursor": "eyJpZCI6IjEifQ",
+ },
+}
+`;
+
+exports[`releases/util.js convertOneReleaseGraphQLResponse matches snapshot 1`] = `
+Object {
+ "data": Object {
+ "_links": Object {
+ "editUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/edit",
+ "issuesUrl": "http://localhost/releases-namespace/releases-project/-/issues?release_tag=v1.1&scope=all&state=opened",
+ "mergeRequestsUrl": "http://localhost/releases-namespace/releases-project/-/merge_requests?release_tag=v1.1&scope=all&state=opened",
+ "self": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
+ "selfUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
+ },
+ "assets": Object {
+ "count": 8,
+ "links": Array [
+ Object {
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-3",
+ "external": true,
+ "id": "gid://gitlab/Releases::Link/13",
+ "linkType": "image",
+ "name": "Image",
+ "url": "https://example.com/image",
+ },
+ Object {
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-2",
+ "external": true,
+ "id": "gid://gitlab/Releases::Link/12",
+ "linkType": "package",
+ "name": "Package",
+ "url": "https://example.com/package",
+ },
+ Object {
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-1",
+ "external": false,
+ "id": "gid://gitlab/Releases::Link/11",
+ "linkType": "runbook",
+ "name": "Runbook",
+ "url": "http://localhost/releases-namespace/releases-project/runbook",
+ },
+ Object {
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/linux-amd64",
+ "external": true,
+ "id": "gid://gitlab/Releases::Link/10",
+ "linkType": "other",
+ "name": "linux-amd64 binaries",
+ "url": "https://downloads.example.com/bin/gitlab-linux-amd64",
+ },
+ ],
+ "sources": Array [
+ Object {
+ "format": "zip",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.zip",
+ },
+ Object {
+ "format": "tar.gz",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.gz",
+ },
+ Object {
+ "format": "tar.bz2",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.bz2",
+ },
+ Object {
+ "format": "tar",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar",
+ },
+ ],
+ },
+ "author": Object {
+ "avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon",
+ "username": "administrator",
+ "webUrl": "http://localhost/administrator",
+ },
+ "commit": Object {
+ "shortId": "b83d6e39",
+ "title": "Merge branch 'branch-merged' into 'master'",
+ },
+ "commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
+ "descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
+ "evidences": Array [
+ Object {
+ "collectedAt": "2018-12-03T00:00:00Z",
+ "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
+ "sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
+ },
+ ],
+ "milestones": Array [
+ Object {
+ "description": "The 12.4 milestone",
+ "id": "gid://gitlab/Milestone/124",
+ "issueStats": Object {
+ "closed": 1,
+ "total": 4,
+ },
+ "stats": undefined,
+ "title": "12.4",
+ "webPath": undefined,
+ "webUrl": "/releases-namespace/releases-project/-/milestones/2",
+ },
+ Object {
+ "description": "The 12.3 milestone",
+ "id": "gid://gitlab/Milestone/123",
+ "issueStats": Object {
+ "closed": 3,
+ "total": 5,
+ },
+ "stats": undefined,
+ "title": "12.3",
+ "webPath": undefined,
+ "webUrl": "/releases-namespace/releases-project/-/milestones/1",
+ },
+ ],
+ "name": "The first release",
+ "releasedAt": "2018-12-10T00:00:00Z",
+ "tagName": "v1.1",
+ "tagPath": "/releases-namespace/releases-project/-/tags/v1.1",
+ "upcomingRelease": true,
+ },
}
`;
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index e9727801c1a..d92bdc3b99a 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -3,12 +3,15 @@ import { mount } from '@vue/test-utils';
import { merge } from 'lodash';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseEditNewApp from '~/releases/components/app_edit_new.vue';
-import { release as originalRelease, milestones as originalMilestones } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { BACK_URL_PARAM } from '~/releases/constants';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
+const originalRelease = getJSONFixture('api/releases/release.json');
+const originalMilestones = originalRelease.milestones;
+
describe('Release edit/new component', () => {
let wrapper;
let release;
@@ -17,7 +20,7 @@ describe('Release edit/new component', () => {
let state;
let mock;
- const factory = ({ featureFlags = {}, store: storeUpdates = {} } = {}) => {
+ const factory = async ({ featureFlags = {}, store: storeUpdates = {} } = {}) => {
state = {
release,
markdownDocsPath: 'path/to/markdown/docs',
@@ -65,6 +68,8 @@ describe('Release edit/new component', () => {
},
});
+ await wrapper.vm.$nextTick();
+
wrapper.element.querySelectorAll('input').forEach(input => jest.spyOn(input, 'focus'));
};
@@ -86,7 +91,9 @@ describe('Release edit/new component', () => {
const findForm = () => wrapper.find('form');
describe(`basic functionality tests: all tests unrelated to the "${BACK_URL_PARAM}" parameter`, () => {
- beforeEach(factory);
+ beforeEach(async () => {
+ await factory();
+ });
it('calls initializeRelease when the component is created', () => {
expect(actions.initializeRelease).toHaveBeenCalledTimes(1);
@@ -128,7 +135,9 @@ describe('Release edit/new component', () => {
});
describe(`when the URL does not contain a "${BACK_URL_PARAM}" parameter`, () => {
- beforeEach(factory);
+ beforeEach(async () => {
+ await factory();
+ });
it(`renders a "Cancel" button with an href pointing to "${BACK_URL_PARAM}"`, () => {
const cancelButton = wrapper.find('.js-cancel-button');
@@ -139,12 +148,12 @@ describe('Release edit/new component', () => {
describe(`when the URL contains a "${BACK_URL_PARAM}" parameter`, () => {
const backUrl = 'https://example.gitlab.com/back/url';
- beforeEach(() => {
+ beforeEach(async () => {
commonUtils.getParameterByName = jest
.fn()
.mockImplementation(paramToGet => ({ [BACK_URL_PARAM]: backUrl }[paramToGet]));
- factory();
+ await factory();
});
it('renders a "Cancel" button with an href pointing to the main Releases page', () => {
@@ -154,8 +163,8 @@ describe('Release edit/new component', () => {
});
describe('when creating a new release', () => {
- beforeEach(() => {
- factory({
+ beforeEach(async () => {
+ await factory({
store: {
modules: {
detail: {
@@ -174,7 +183,9 @@ describe('Release edit/new component', () => {
});
describe('when editing an existing release', () => {
- beforeEach(factory);
+ beforeEach(async () => {
+ await factory();
+ });
it('renders the submit button with the text "Save changes"', () => {
expect(findSubmitButton().text()).toBe('Save changes');
@@ -182,33 +193,17 @@ describe('Release edit/new component', () => {
});
describe('asset links form', () => {
- const findAssetLinksForm = () => wrapper.find(AssetLinksForm);
-
- describe('when the release_asset_link_editing feature flag is disabled', () => {
- beforeEach(() => {
- factory({ featureFlags: { releaseAssetLinkEditing: false } });
- });
-
- it('does not render the asset links portion of the form', () => {
- expect(findAssetLinksForm().exists()).toBe(false);
- });
- });
-
- describe('when the release_asset_link_editing feature flag is enabled', () => {
- beforeEach(() => {
- factory({ featureFlags: { releaseAssetLinkEditing: true } });
- });
+ beforeEach(factory);
- it('renders the asset links portion of the form', () => {
- expect(findAssetLinksForm().exists()).toBe(true);
- });
+ it('renders the asset links portion of the form', () => {
+ expect(wrapper.find(AssetLinksForm).exists()).toBe(true);
});
});
describe('validation', () => {
describe('when the form is valid', () => {
- beforeEach(() => {
- factory({
+ beforeEach(async () => {
+ await factory({
store: {
modules: {
detail: {
@@ -227,8 +222,8 @@ describe('Release edit/new component', () => {
});
describe('when the form is invalid', () => {
- beforeEach(() => {
- factory({
+ beforeEach(async () => {
+ await factory({
store: {
modules: {
detail: {
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index bcb87509cc3..9f1577c2f1e 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -2,27 +2,33 @@ import { range as rge } from 'lodash';
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleasesApp from '~/releases/components/app_index.vue';
import createStore from '~/releases/stores';
import createListModule from '~/releases/stores/modules/list';
import api from '~/api';
-import {
- pageInfoHeadersWithoutPagination,
- pageInfoHeadersWithPagination,
- release2 as release,
- releases,
-} from '../mock_data';
+import { pageInfoHeadersWithoutPagination, pageInfoHeadersWithPagination } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+import ReleasesPagination from '~/releases/components/releases_pagination.vue';
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ ...jest.requireActual('~/lib/utils/common_utils'),
+ getParameterByName: jest.fn().mockImplementation(paramName => {
+ return `${paramName}_param_value`;
+ }),
+}));
const localVue = createLocalVue();
localVue.use(Vuex);
+const release = getJSONFixture('api/releases/release.json');
+const releases = [release];
+
describe('Releases App ', () => {
let wrapper;
let fetchReleaseSpy;
- const releasesPagination = rge(21).map(index => ({
+ const paginatedReleases = rge(21).map(index => ({
...convertObjectPropsToCamelCase(release, { deep: true }),
tagName: `${index}.00`,
}));
@@ -70,9 +76,13 @@ describe('Releases App ', () => {
createComponent();
});
- it('calls fetchRelease with the page parameter', () => {
+ it('calls fetchRelease with the page, before, and after parameters', () => {
expect(fetchReleaseSpy).toHaveBeenCalledTimes(1);
- expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), { page: null });
+ expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), {
+ page: 'page_param_value',
+ before: 'before_param_value',
+ after: 'after_param_value',
+ });
});
});
@@ -91,7 +101,7 @@ describe('Releases App ', () => {
expect(wrapper.contains('.js-loading')).toBe(true);
expect(wrapper.contains('.js-empty-state')).toBe(false);
expect(wrapper.contains('.js-success-state')).toBe(false);
- expect(wrapper.contains(TablePagination)).toBe(false);
+ expect(wrapper.contains(ReleasesPagination)).toBe(false);
});
});
@@ -108,7 +118,7 @@ describe('Releases App ', () => {
expect(wrapper.contains('.js-loading')).toBe(false);
expect(wrapper.contains('.js-empty-state')).toBe(false);
expect(wrapper.contains('.js-success-state')).toBe(true);
- expect(wrapper.contains(TablePagination)).toBe(true);
+ expect(wrapper.contains(ReleasesPagination)).toBe(true);
});
});
@@ -116,7 +126,7 @@ describe('Releases App ', () => {
beforeEach(() => {
jest
.spyOn(api, 'releases')
- .mockResolvedValue({ data: releasesPagination, headers: pageInfoHeadersWithPagination });
+ .mockResolvedValue({ data: paginatedReleases, headers: pageInfoHeadersWithPagination });
createComponent();
});
@@ -125,7 +135,7 @@ describe('Releases App ', () => {
expect(wrapper.contains('.js-loading')).toBe(false);
expect(wrapper.contains('.js-empty-state')).toBe(false);
expect(wrapper.contains('.js-success-state')).toBe(true);
- expect(wrapper.contains(TablePagination)).toBe(true);
+ expect(wrapper.contains(ReleasesPagination)).toBe(true);
});
});
@@ -154,7 +164,7 @@ describe('Releases App ', () => {
const newReleasePath = 'path/to/new/release';
beforeEach(() => {
- createComponent({ ...defaultInitialState, newReleasePath });
+ createComponent({ newReleasePath });
});
it('renders the "New release" button', () => {
@@ -174,4 +184,27 @@ describe('Releases App ', () => {
});
});
});
+
+ describe('when the back button is pressed', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(api, 'releases')
+ .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
+
+ createComponent();
+
+ fetchReleaseSpy.mockClear();
+
+ window.dispatchEvent(new PopStateEvent('popstate'));
+ });
+
+ it('calls fetchRelease with the page parameter', () => {
+ expect(fetchReleaseSpy).toHaveBeenCalledTimes(1);
+ expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), {
+ page: 'page_param_value',
+ before: 'before_param_value',
+ after: 'after_param_value',
+ });
+ });
+ });
});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 502a1053663..181fa0150f1 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -1,11 +1,13 @@
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseShowApp from '~/releases/components/app_show.vue';
-import { release as originalRelease } from '../mock_data';
+import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release show component', () => {
let wrapper;
let release;
@@ -33,7 +35,7 @@ describe('Release show component', () => {
wrapper = shallowMount(ReleaseShowApp, { store });
};
- const findLoadingSkeleton = () => wrapper.find(GlSkeletonLoading);
+ const findLoadingSkeleton = () => wrapper.find(ReleaseSkeletonLoader);
const findReleaseBlock = () => wrapper.find(ReleaseBlock);
it('calls fetchRelease when the component is created', () => {
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index 582c0b32716..6794a56debc 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -1,7 +1,7 @@
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
+import { getJSONFixture } from 'helpers/fixtures';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
-import { release as originalRelease } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { ENTER_KEY } from '~/lib/utils/keys';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
@@ -9,6 +9,8 @@ import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release edit component', () => {
let wrapper;
let release;
@@ -54,11 +56,6 @@ describe('Release edit component', () => {
wrapper = mount(AssetLinksForm, {
localVue,
store,
- provide: {
- glFeatures: {
- releaseAssetLinkType: true,
- },
- },
});
};
@@ -223,10 +220,18 @@ describe('Release edit component', () => {
});
});
- it('selects the default asset type if no type was provided by the backend', () => {
- const selected = wrapper.find({ ref: 'typeSelect' }).element.value;
+ describe('when no link type was provided by the backend', () => {
+ beforeEach(() => {
+ delete release.assets.links[0].linkType;
+
+ factory({ release });
+ });
+
+ it('selects the default asset type', () => {
+ const selected = wrapper.find({ ref: 'typeSelect' }).element.value;
- expect(selected).toBe(DEFAULT_ASSET_LINK_TYPE);
+ expect(selected).toBe(DEFAULT_ASSET_LINK_TYPE);
+ });
});
});
diff --git a/spec/frontend/releases/components/evidence_block_spec.js b/spec/frontend/releases/components/evidence_block_spec.js
index ba60a79e464..b8c78f90fc2 100644
--- a/spec/frontend/releases/components/evidence_block_spec.js
+++ b/spec/frontend/releases/components/evidence_block_spec.js
@@ -1,11 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlLink, GlIcon } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import { truncateSha } from '~/lib/utils/text_utility';
-import { release as originalRelease } from '../mock_data';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Evidence Block', () => {
let wrapper;
let release;
@@ -35,7 +37,7 @@ describe('Evidence Block', () => {
});
it('renders the title for the dowload link', () => {
- expect(wrapper.find(GlLink).text()).toBe('v1.1.2-evidences-1.json');
+ expect(wrapper.find(GlLink).text()).toBe(`v1.1-evidences-1.json`);
});
it('renders the correct hover text for the download', () => {
@@ -43,7 +45,7 @@ describe('Evidence Block', () => {
});
it('renders the correct file link for download', () => {
- expect(wrapper.find(GlLink).attributes().download).toBe('v1.1.2-evidences-1.json');
+ expect(wrapper.find(GlLink).attributes().download).toBe(`v1.1-evidences-1.json`);
});
describe('sha text', () => {
diff --git a/spec/frontend/releases/components/release_block_assets_spec.js b/spec/frontend/releases/components/release_block_assets_spec.js
index 3453ecbf8ab..126ca27e8a6 100644
--- a/spec/frontend/releases/components/release_block_assets_spec.js
+++ b/spec/frontend/releases/components/release_block_assets_spec.js
@@ -1,10 +1,12 @@
import { mount } from '@vue/test-utils';
import { GlCollapse } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
-import { cloneDeep } from 'lodash';
+import { getJSONFixture } from 'helpers/fixtures';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockAssets from '~/releases/components/release_block_assets.vue';
import { ASSET_LINK_TYPE } from '~/releases/constants';
-import { assets } from '../mock_data';
+
+const { assets } = getJSONFixture('api/releases/release.json');
describe('Release block assets', () => {
let wrapper;
@@ -20,9 +22,6 @@ describe('Release block assets', () => {
const createComponent = (propsData = defaultProps) => {
wrapper = mount(ReleaseBlockAssets, {
- provide: {
- glFeatures: { releaseAssetLinkType: true },
- },
propsData,
});
};
@@ -31,7 +30,7 @@ describe('Release block assets', () => {
wrapper.findAll('h5').filter(h5 => h5.text() === sections[type]);
beforeEach(() => {
- defaultProps = { assets: cloneDeep(assets) };
+ defaultProps = { assets: convertObjectPropsToCamelCase(assets, { deep: true }) };
});
describe('with default props', () => {
@@ -43,7 +42,7 @@ describe('Release block assets', () => {
const accordionButton = findAccordionButton();
expect(accordionButton.exists()).toBe(true);
- expect(trimText(accordionButton.text())).toBe('Assets 5');
+ expect(trimText(accordionButton.text())).toBe('Assets 8');
});
it('renders the accordion as expanded by default', () => {
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index bde01cc0e00..f1c0c24f8ca 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -1,11 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlLink, GlIcon } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import { cloneDeep } from 'lodash';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
-import { release as originalRelease } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
const mockFutureDate = new Date(9999, 0, 0).toISOString();
let mockIsFutureRelease = false;
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
index 9c6cbc86d3c..f2159871395 100644
--- a/spec/frontend/releases/components/release_block_header_spec.js
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -1,11 +1,13 @@
import { shallowMount } from '@vue/test-utils';
import { merge } from 'lodash';
import { GlLink } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import { release as originalRelease } from '../mock_data';
import { BACK_URL_PARAM } from '~/releases/constants';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release block header', () => {
let wrapper;
let release;
@@ -49,7 +51,7 @@ describe('Release block header', () => {
});
it('renders the title as text', () => {
- expect(findHeader().text()).toBe(release.name);
+ expect(findHeader().text()).toContain(release.name);
expect(findHeaderLink().exists()).toBe(false);
});
});
diff --git a/spec/frontend/releases/components/release_block_metadata_spec.js b/spec/frontend/releases/components/release_block_metadata_spec.js
deleted file mode 100644
index 6f184e45600..00000000000
--- a/spec/frontend/releases/components/release_block_metadata_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { trimText } from 'helpers/text_helper';
-import { cloneDeep } from 'lodash';
-import ReleaseBlockMetadata from '~/releases/components/release_block_metadata.vue';
-import { release as originalRelease } from '../mock_data';
-import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-
-const mockFutureDate = new Date(9999, 0, 0).toISOString();
-let mockIsFutureRelease = false;
-
-jest.mock('~/vue_shared/mixins/timeago', () => ({
- methods: {
- timeFormatted() {
- return mockIsFutureRelease ? 'in 1 month' : '7 fortnights ago';
- },
- tooltipTitle() {
- return 'February 30, 2401';
- },
- },
-}));
-
-describe('Release block metadata', () => {
- let wrapper;
- let release;
-
- const factory = (releaseUpdates = {}) => {
- wrapper = mount(ReleaseBlockMetadata, {
- propsData: {
- release: {
- ...convertObjectPropsToCamelCase(release, { deep: true }),
- ...releaseUpdates,
- },
- },
- });
- };
-
- beforeEach(() => {
- release = cloneDeep(originalRelease);
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- mockIsFutureRelease = false;
- });
-
- const findReleaseDateInfo = () => wrapper.find('.js-release-date-info');
-
- describe('with all props provided', () => {
- beforeEach(() => factory());
-
- it('renders the release time info', () => {
- expect(trimText(findReleaseDateInfo().text())).toBe(`released 7 fortnights ago`);
- });
- });
-
- describe('with a future release date', () => {
- beforeEach(() => {
- mockIsFutureRelease = true;
- factory({ releasedAt: mockFutureDate });
- });
-
- it('renders the release date without the author name', () => {
- expect(trimText(findReleaseDateInfo().text())).toBe(`will be released in 1 month`);
- });
- });
-});
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index 0e79c45b337..45f4eaa01a9 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -1,11 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlProgressBar, GlLink, GlBadge, GlButton } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseBlockMilestoneInfo from '~/releases/components/release_block_milestone_info.vue';
-import { milestones as originalMilestones } from '../mock_data';
import { MAX_MILESTONES_TO_DISPLAY } from '~/releases/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const { milestones: originalMilestones } = getJSONFixture('api/releases/release.json');
+
describe('Release block milestone info', () => {
let wrapper;
let milestones;
@@ -35,7 +37,7 @@ describe('Release block milestone info', () => {
beforeEach(() => factory({ milestones }));
it('renders the correct percentage', () => {
- expect(milestoneProgressBarContainer().text()).toContain('41% complete');
+ expect(milestoneProgressBarContainer().text()).toContain('44% complete');
});
it('renders a progress bar that displays the correct percentage', () => {
@@ -44,14 +46,24 @@ describe('Release block milestone info', () => {
expect(progressBar.exists()).toBe(true);
expect(progressBar.attributes()).toEqual(
expect.objectContaining({
- value: '22',
- max: '54',
+ value: '4',
+ max: '9',
}),
);
});
it('renders a list of links to all associated milestones', () => {
- expect(trimText(milestoneListContainer().text())).toContain('Milestones 13.6 • 13.5');
+ // The API currently returns the milestones in a non-deterministic order,
+ // which causes the frontend fixture used by this test to return the
+ // milestones in one order locally and a different order in the CI pipeline.
+ // This is a bug and is tracked here: https://gitlab.com/gitlab-org/gitlab/-/issues/259012
+ // When this bug is fixed this expectation should be updated to
+ // assert the expected order.
+ const containerText = trimText(milestoneListContainer().text());
+ expect(
+ containerText.includes('Milestones 12.4 • 12.3') ||
+ containerText.includes('Milestones 12.3 • 12.4'),
+ ).toBe(true);
milestones.forEach((m, i) => {
const milestoneLink = milestoneListContainer()
@@ -65,7 +77,7 @@ describe('Release block milestone info', () => {
});
it('renders the "Issues" section with a total count of issues associated to the milestone(s)', () => {
- const totalIssueCount = 54;
+ const totalIssueCount = 9;
const issuesContainerText = trimText(issuesContainer().text());
expect(issuesContainerText).toContain(`Issues ${totalIssueCount}`);
@@ -73,7 +85,7 @@ describe('Release block milestone info', () => {
const badge = issuesContainer().find(GlBadge);
expect(badge.text()).toBe(totalIssueCount.toString());
- expect(issuesContainerText).toContain('Open: 32 • Closed: 22');
+ expect(issuesContainerText).toContain('Open: 5 • Closed: 4');
});
});
diff --git a/spec/frontend/releases/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index a7f1388664b..633c6690529 100644
--- a/spec/frontend/releases/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -1,15 +1,16 @@
import $ from 'jquery';
import { mount } from '@vue/test-utils';
-import { GlIcon } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import timeagoMixin from '~/vue_shared/mixins/timeago';
-import { release as originalRelease } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { BACK_URL_PARAM } from '~/releases/constants';
import * as urlUtility from '~/lib/utils/url_utility';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release block', () => {
let wrapper;
let release;
@@ -21,7 +22,6 @@ describe('Release block', () => {
},
provide: {
glFeatures: {
- releaseIssueSummary: true,
...featureFlags,
},
},
@@ -46,7 +46,7 @@ describe('Release block', () => {
beforeEach(() => factory(release));
it("renders the block with an id equal to the release's tag name", () => {
- expect(wrapper.attributes().id).toBe('v0.3');
+ expect(wrapper.attributes().id).toBe(release.tagName);
});
it(`renders an edit button that links to the "Edit release" page with a "${BACK_URL_PARAM}" parameter`, () => {
@@ -69,50 +69,10 @@ describe('Release block', () => {
expect(wrapper.text()).toContain(timeagoMixin.methods.timeFormatted(release.releasedAt));
});
- it('renders number of assets provided', () => {
- expect(wrapper.find('.js-assets-count').text()).toContain(release.assets.count);
- });
-
- it('renders dropdown with the sources', () => {
- expect(wrapper.findAll('.js-sources-dropdown li').length).toEqual(
- release.assets.sources.length,
- );
-
- expect(wrapper.find('.js-sources-dropdown li a').attributes().href).toEqual(
- release.assets.sources[0].url,
- );
-
- expect(wrapper.find('.js-sources-dropdown li a').text()).toContain(
- release.assets.sources[0].format,
- );
- });
-
- it('renders list with the links provided', () => {
- expect(wrapper.findAll('.js-assets-list li').length).toEqual(release.assets.links.length);
-
- expect(wrapper.find('.js-assets-list li a').attributes().href).toEqual(
- release.assets.links[0].directAssetUrl,
- );
-
- expect(wrapper.find('.js-assets-list li a').text()).toContain(release.assets.links[0].name);
- });
-
it('renders author avatar', () => {
expect(wrapper.find('.user-avatar-link').exists()).toBe(true);
});
- describe('external label', () => {
- it('renders external label when link is external', () => {
- expect(wrapper.find('.js-assets-list li a').text()).toContain('external source');
- });
-
- it('does not render external label when link is not external', () => {
- expect(wrapper.find('.js-assets-list li:nth-child(2) a').text()).not.toContain(
- 'external source',
- );
- });
- });
-
it('renders the footer', () => {
expect(wrapper.find(ReleaseBlockFooter).exists()).toBe(true);
});
@@ -171,18 +131,14 @@ describe('Release block', () => {
});
describe('evidence block', () => {
- it('renders the evidence block when the evidence is available and the feature flag is true', () =>
- factory(release, { releaseEvidenceCollection: true }).then(() =>
- expect(wrapper.find(EvidenceBlock).exists()).toBe(true),
- ));
-
- it('does not render the evidence block when the evidence is available but the feature flag is false', () =>
- factory(release, { releaseEvidenceCollection: true }).then(() =>
- expect(wrapper.find(EvidenceBlock).exists()).toBe(true),
- ));
+ it('renders the evidence block when the evidence is available', () => {
+ return factory(release).then(() => {
+ expect(wrapper.find(EvidenceBlock).exists()).toBe(true);
+ });
+ });
it('does not render the evidence block when there is no evidence', () => {
- release.evidenceSha = null;
+ release.evidences = [];
return factory(release).then(() => {
expect(wrapper.find(EvidenceBlock).exists()).toBe(false);
@@ -239,51 +195,4 @@ describe('Release block', () => {
});
});
});
-
- describe('when the releaseIssueSummary feature flag is disabled', () => {
- describe('with default props', () => {
- beforeEach(() => factory(release, { releaseIssueSummary: false }));
-
- it('renders the milestone icon', () => {
- expect(
- milestoneListLabel()
- .find(GlIcon)
- .exists(),
- ).toBe(true);
- });
-
- it('renders the label as "Milestones" if more than one milestone is passed in', () => {
- expect(
- milestoneListLabel()
- .find('.js-label-text')
- .text(),
- ).toEqual('Milestones');
- });
-
- it('renders a link to the milestone with a tooltip', () => {
- const milestone = release.milestones[0];
- const milestoneLink = wrapper.find('.js-milestone-link');
-
- expect(milestoneLink.exists()).toBe(true);
-
- expect(milestoneLink.text()).toBe(milestone.title);
-
- expect(milestoneLink.attributes('href')).toBe(milestone.webUrl);
-
- expect(milestoneLink.attributes('title')).toBe(milestone.description);
- });
- });
-
- it('renders the label as "Milestone" if only a single milestone is passed in', () => {
- release.milestones = release.milestones.slice(0, 1);
-
- return factory(release, { releaseIssueSummary: false }).then(() => {
- expect(
- milestoneListLabel()
- .find('.js-label-text')
- .text(),
- ).toEqual('Milestone');
- });
- });
- });
});
diff --git a/spec/frontend/releases/components/release_skeleton_loader_spec.js b/spec/frontend/releases/components/release_skeleton_loader_spec.js
new file mode 100644
index 00000000000..7fbf864568a
--- /dev/null
+++ b/spec/frontend/releases/components/release_skeleton_loader_spec.js
@@ -0,0 +1,15 @@
+import { mount } from '@vue/test-utils';
+import { GlSkeletonLoader } from '@gitlab/ui';
+import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
+
+describe('release_skeleton_loader.vue', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = mount(ReleaseSkeletonLoader);
+ });
+
+ it('renders a GlSkeletonLoader', () => {
+ expect(wrapper.find(GlSkeletonLoader).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/releases/components/releases_pagination_graphql_spec.js b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
index b01a28eb6c3..bba5e532e5e 100644
--- a/spec/frontend/releases/components/releases_pagination_graphql_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
@@ -29,7 +29,7 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
listModule.state.graphQlPageInfo = pageInfo;
- listModule.actions.fetchReleasesGraphQl = jest.fn();
+ listModule.actions.fetchReleases = jest.fn();
wrapper = mount(ReleasesPaginationGraphql, {
store: createStore({
@@ -141,8 +141,8 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
findNextButton().trigger('click');
});
- it('calls fetchReleasesGraphQl with the correct after cursor', () => {
- expect(listModule.actions.fetchReleasesGraphQl.mock.calls).toEqual([
+ it('calls fetchReleases with the correct after cursor', () => {
+ expect(listModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { after: cursors.endCursor }],
]);
});
@@ -159,8 +159,8 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
findPrevButton().trigger('click');
});
- it('calls fetchReleasesGraphQl with the correct before cursor', () => {
- expect(listModule.actions.fetchReleasesGraphQl.mock.calls).toEqual([
+ it('calls fetchReleases with the correct before cursor', () => {
+ expect(listModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { before: cursors.startCursor }],
]);
});
diff --git a/spec/frontend/releases/components/releases_pagination_rest_spec.js b/spec/frontend/releases/components/releases_pagination_rest_spec.js
index 4fd3e085fc9..59c0c31413a 100644
--- a/spec/frontend/releases/components/releases_pagination_rest_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_rest_spec.js
@@ -20,9 +20,9 @@ describe('~/releases/components/releases_pagination_rest.vue', () => {
const createComponent = pageInfo => {
listModule = createListModule({ projectId });
- listModule.state.pageInfo = pageInfo;
+ listModule.state.restPageInfo = pageInfo;
- listModule.actions.fetchReleasesRest = jest.fn();
+ listModule.actions.fetchReleases = jest.fn();
wrapper = mount(ReleasesPaginationRest, {
store: createStore({
@@ -57,8 +57,8 @@ describe('~/releases/components/releases_pagination_rest.vue', () => {
findGlPagination().vm.$emit('input', newPage);
});
- it('calls fetchReleasesRest with the correct page', () => {
- expect(listModule.actions.fetchReleasesRest.mock.calls).toEqual([
+ it('calls fetchReleases with the correct page', () => {
+ expect(listModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { page: newPage }],
]);
});
diff --git a/spec/frontend/releases/mock_data.js b/spec/frontend/releases/mock_data.js
index 58cd69a2f6a..c89182faa44 100644
--- a/spec/frontend/releases/mock_data.js
+++ b/spec/frontend/releases/mock_data.js
@@ -1,139 +1,3 @@
-import { ASSET_LINK_TYPE } from '~/releases/constants';
-
-export const milestones = [
- {
- id: 50,
- iid: 2,
- project_id: 18,
- title: '13.6',
- description: 'The 13.6 milestone!',
- state: 'active',
- created_at: '2019-08-27T17:22:38.280Z',
- updated_at: '2019-08-27T17:22:38.280Z',
- due_date: '2019-09-19',
- start_date: '2019-08-31',
- web_url: 'http://0.0.0.0:3001/root/release-test/-/milestones/2',
- issue_stats: {
- total: 33,
- closed: 19,
- },
- },
- {
- id: 49,
- iid: 1,
- project_id: 18,
- title: '13.5',
- description: 'The 13.5 milestone!',
- state: 'active',
- created_at: '2019-08-26T17:55:48.643Z',
- updated_at: '2019-08-26T17:55:48.643Z',
- due_date: '2019-10-11',
- start_date: '2019-08-19',
- web_url: 'http://0.0.0.0:3001/root/release-test/-/milestones/1',
- issue_stats: {
- total: 21,
- closed: 3,
- },
- },
-];
-
-export const release = {
- name: 'New release',
- tag_name: 'v0.3',
- tag_path: '/root/release-test/-/tags/v0.3',
- description: 'A super nice release!',
- description_html: '<p data-sourcepos="1:1-1:21" dir="auto">A super nice release!</p>',
- created_at: '2019-08-26T17:54:04.952Z',
- released_at: '2019-08-26T17:54:04.807Z',
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://0.0.0.0:3001/root',
- },
- commit: {
- id: 'c22b0728d1b465f82898c884d32b01aa642f96c1',
- short_id: 'c22b0728',
- created_at: '2019-08-26T17:47:07.000Z',
- parent_ids: [],
- title: 'Initial commit',
- message: 'Initial commit',
- author_name: 'Administrator',
- author_email: 'admin@example.com',
- authored_date: '2019-08-26T17:47:07.000Z',
- committer_name: 'Administrator',
- committer_email: 'admin@example.com',
- committed_date: '2019-08-26T17:47:07.000Z',
- },
- commit_path: '/root/release-test/commit/c22b0728d1b465f82898c884d32b01aa642f96c1',
- upcoming_release: false,
- milestones,
- evidences: [
- {
- filepath:
- 'https://20592.qa-tunnel.gitlab.info/root/test-deployments/-/releases/v1.1.2/evidences/1.json',
- sha: 'fb3a125fd69a0e5048ebfb0ba43eb32ce4911520dd8d',
- collected_at: '2018-10-19 15:43:20 +0200',
- },
- {
- filepath:
- 'https://20592.qa-tunnel.gitlab.info/root/test-deployments/-/releases/v1.1.2/evidences/2.json',
- sha: '6ebd17a66e6a861175735416e49cf677678029805712dd71bb805c609e2d9108',
- collected_at: '2018-10-19 15:43:20 +0200',
- },
- {
- filepath:
- 'https://20592.qa-tunnel.gitlab.info/root/test-deployments/-/releases/v1.1.2/evidences/3.json',
- sha: '2f65beaf275c3cb4b4e24fb01d481cc475d69c957830833f15338384816b5cba',
- collected_at: '2018-10-19 15:43:20 +0200',
- },
- ],
- assets: {
- count: 5,
- sources: [
- {
- format: 'zip',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.zip',
- },
- {
- format: 'tar.gz',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar.gz',
- },
- {
- format: 'tar.bz2',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar.bz2',
- },
- {
- format: 'tar',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar',
- },
- ],
- links: [
- {
- id: 1,
- name: 'my link',
- url: 'https://google.com',
- direct_asset_url: 'https://redirected.google.com',
- external: true,
- },
- {
- id: 2,
- name: 'my second link',
- url:
- 'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/artifacts/v11.6.0-rc4/download?job=rspec-mysql+41%2F50',
- direct_asset_url: 'https://redirected.google.com',
- external: false,
- },
- ],
- },
- _links: {
- self: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3',
- edit_url: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3/edit',
- },
-};
-
export const pageInfoHeadersWithoutPagination = {
'X-NEXT-PAGE': '',
'X-PAGE': '1',
@@ -151,202 +15,3 @@ export const pageInfoHeadersWithPagination = {
'X-TOTAL': '21',
'X-TOTAL-PAGES': '2',
};
-
-export const assets = {
- count: 5,
- sources: [
- {
- format: 'zip',
- url: 'https://example.gitlab.com/path/to/zip',
- },
- ],
- links: [
- {
- linkType: ASSET_LINK_TYPE.IMAGE,
- url: 'https://example.gitlab.com/path/to/image',
- directAssetUrl: 'https://example.gitlab.com/path/to/image',
- name: 'Example image link',
- },
- {
- linkType: ASSET_LINK_TYPE.PACKAGE,
- url: 'https://example.gitlab.com/path/to/package',
- directAssetUrl: 'https://example.gitlab.com/path/to/package',
- name: 'Example package link',
- },
- {
- linkType: ASSET_LINK_TYPE.RUNBOOK,
- url: 'https://example.gitlab.com/path/to/runbook',
- directAssetUrl: 'https://example.gitlab.com/path/to/runbook',
- name: 'Example runbook link',
- },
- {
- linkType: ASSET_LINK_TYPE.OTHER,
- url: 'https://example.gitlab.com/path/to/link',
- directAssetUrl: 'https://example.gitlab.com/path/to/link',
- name: 'Example link',
- },
- ],
-};
-
-export const release2 = {
- name: 'Bionic Beaver',
- tag_name: '18.04',
- description: '## changelog\n\n* line 1\n* line2',
- description_html: '<div><h2>changelog</h2><ul><li>line1</li<li>line 2</li></ul></div>',
- author_name: 'Release bot',
- author_email: 'release-bot@example.com',
- created_at: '2012-05-28T05:00:00-07:00',
- commit: {
- id: '2695effb5807a22ff3d138d593fd856244e155e7',
- short_id: '2695effb',
- title: 'Initial commit',
- created_at: '2017-07-26T11:08:53.000+02:00',
- parent_ids: ['2a4b78934375d7f53875269ffd4f45fd83a84ebe'],
- message: 'Initial commit',
- author: {
- avatar_url: 'uploads/-/system/user/avatar/johndoe/avatar.png',
- id: 482476,
- name: 'John Doe',
- path: '/johndoe',
- state: 'active',
- status_tooltip_html: null,
- username: 'johndoe',
- web_url: 'https://gitlab.com/johndoe',
- },
- authored_date: '2012-05-28T04:42:42-07:00',
- committer_name: 'Jack Smith',
- committer_email: 'jack@example.com',
- committed_date: '2012-05-28T04:42:42-07:00',
- },
- assets,
-};
-
-export const releases = [release, release2];
-
-export const graphqlReleasesResponse = {
- data: {
- project: {
- releases: {
- count: 39,
- nodes: [
- {
- name: 'Release 1.0',
- tagName: 'v5.10',
- tagPath: '/root/release-test/-/tags/v5.10',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:24" dir="auto">This is version <strong>1.0</strong>!</p>',
- releasedAt: '2020-08-21T20:15:18Z',
- upcomingRelease: false,
- assets: {
- count: 7,
- sources: {
- nodes: [
- {
- format: 'zip',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.zip',
- },
- {
- format: 'tar.gz',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.gz',
- },
- {
- format: 'tar.bz2',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.bz2',
- },
- {
- format: 'tar',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar',
- },
- ],
- },
- links: {
- nodes: [
- {
- id: 'gid://gitlab/Releases::Link/69',
- name: 'An example link',
- url: 'https://example.com/link',
- directAssetUrl:
- 'http://0.0.0.0:3000/root/release-test/-/releases/v5.32/permanent/path/to/runbook',
- linkType: 'OTHER',
- external: true,
- },
- {
- id: 'gid://gitlab/Releases::Link/68',
- name: 'An example package link',
- url: 'https://example.com/package',
- directAssetUrl: 'https://example.com/package',
- linkType: 'PACKAGE',
- external: true,
- },
- {
- id: 'gid://gitlab/Releases::Link/67',
- name: 'An example image',
- url: 'https://example.com/image',
- directAssetUrl: 'https://example.com/image',
- linkType: 'IMAGE',
- external: true,
- },
- ],
- },
- },
- evidences: {
- nodes: [
- {
- filepath:
- 'http://0.0.0.0:3000/root/release-test/-/releases/v5.10/evidences/34.json',
- collectedAt: '2020-08-21T20:15:19Z',
- sha: '22bde8e8b93d870a29ddc339287a1fbb598f45d1396d',
- },
- ],
- },
- links: {
- editUrl: 'http://0.0.0.0:3000/root/release-test/-/releases/v5.10/edit',
- issuesUrl: null,
- mergeRequestsUrl: null,
- selfUrl: 'http://0.0.0.0:3000/root/release-test/-/releases/v5.10',
- },
- commit: {
- sha: '92e7ea2ee4496fe0d00ff69830ba0564d3d1e5a7',
- webUrl:
- 'http://0.0.0.0:3000/root/release-test/-/commit/92e7ea2ee4496fe0d00ff69830ba0564d3d1e5a7',
- title: 'Testing a change.',
- },
- author: {
- webUrl: 'http://0.0.0.0:3000/root',
- avatarUrl: '/uploads/-/system/user/avatar/1/avatar.png',
- username: 'root',
- },
- milestones: {
- nodes: [
- {
- id: 'gid://gitlab/Milestone/60',
- title: '12.4',
- description: '',
- webPath: '/root/release-test/-/milestones/2',
- stats: {
- totalIssuesCount: 0,
- closedIssuesCount: 0,
- },
- },
- {
- id: 'gid://gitlab/Milestone/59',
- title: '12.3',
- description: 'Milestone 12.3',
- webPath: '/root/release-test/-/milestones/1',
- stats: {
- totalIssuesCount: 2,
- closedIssuesCount: 1,
- },
- },
- ],
- },
- },
- ],
- },
- },
- },
-};
diff --git a/spec/frontend/releases/stores/getters_spec.js b/spec/frontend/releases/stores/getters_spec.js
new file mode 100644
index 00000000000..01e10567cf0
--- /dev/null
+++ b/spec/frontend/releases/stores/getters_spec.js
@@ -0,0 +1,22 @@
+import * as getters from '~/releases/stores/getters';
+
+describe('~/releases/stores/getters.js', () => {
+ it.each`
+ graphqlReleaseData | graphqlReleasesPage | graphqlMilestoneStats | result
+ ${false} | ${false} | ${false} | ${false}
+ ${false} | ${false} | ${true} | ${false}
+ ${false} | ${true} | ${false} | ${false}
+ ${false} | ${true} | ${true} | ${false}
+ ${true} | ${false} | ${false} | ${false}
+ ${true} | ${false} | ${true} | ${false}
+ ${true} | ${true} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${true}
+ `(
+ 'returns $result with feature flag values graphqlReleaseData=$graphqlReleaseData, graphqlReleasesPage=$graphqlReleasesPage, and graphqlMilestoneStats=$graphqlMilestoneStats',
+ ({ result: expectedResult, ...featureFlags }) => {
+ const actualResult = getters.useGraphQLEndpoint({ featureFlags });
+
+ expect(actualResult).toBe(expectedResult);
+ },
+ );
+});
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 1b2a705e8f4..d38f6766d4e 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,10 +1,10 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import { cloneDeep } from 'lodash';
import * as actions from '~/releases/stores/modules/detail/actions';
import * as types from '~/releases/stores/modules/detail/mutation_types';
-import { release as originalRelease } from '../../../mock_data';
import createState from '~/releases/stores/modules/detail/state';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
@@ -21,6 +21,8 @@ jest.mock('~/lib/utils/url_utility', () => ({
joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
}));
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release detail actions', () => {
let state;
let release;
@@ -32,6 +34,12 @@ describe('Release detail actions', () => {
isExistingRelease: true,
};
+ const rootState = {
+ featureFlags: {
+ graphqlIndividualReleasePage: false,
+ },
+ };
+
state = {
...createState({
projectId: '18',
@@ -42,6 +50,7 @@ describe('Release detail actions', () => {
updateReleaseApiDocsPath: 'path/to/api/docs',
}),
...getters,
+ ...rootState,
...updates,
};
};
@@ -152,7 +161,7 @@ describe('Release detail actions', () => {
});
it(`shows a flash message`, () => {
- return actions.fetchRelease({ commit: jest.fn(), state }).then(() => {
+ return actions.fetchRelease({ commit: jest.fn(), state, rootState: state }).then(() => {
expect(createFlash).toHaveBeenCalledTimes(1);
expect(createFlash).toHaveBeenCalledWith(
'Something went wrong while getting the release details',
@@ -207,6 +216,15 @@ describe('Release detail actions', () => {
});
});
+ describe('updateReleaseGroupMilestones', () => {
+ it(`commits ${types.UPDATE_RELEASE_GROUP_MILESTONES} with the updated release group milestones`, () => {
+ const newReleaseGroupMilestones = ['v0.0', 'v0.1'];
+ return testAction(actions.updateReleaseGroupMilestones, newReleaseGroupMilestones, state, [
+ { type: types.UPDATE_RELEASE_GROUP_MILESTONES, payload: newReleaseGroupMilestones },
+ ]);
+ });
+ });
+
describe('addEmptyAssetLink', () => {
it(`commits ${types.ADD_EMPTY_ASSET_LINK}`, () => {
return testAction(actions.addEmptyAssetLink, undefined, state, [
@@ -265,32 +283,14 @@ describe('Release detail actions', () => {
describe('receiveSaveReleaseSuccess', () => {
it(`commits ${types.RECEIVE_SAVE_RELEASE_SUCCESS}`, () =>
- testAction(actions.receiveSaveReleaseSuccess, undefined, { ...state, featureFlags: {} }, [
+ testAction(actions.receiveSaveReleaseSuccess, release, state, [
{ type: types.RECEIVE_SAVE_RELEASE_SUCCESS },
]));
- describe('when the releaseShowPage feature flag is enabled', () => {
- beforeEach(() => {
- const rootState = { featureFlags: { releaseShowPage: true } };
- actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state, rootState }, release);
- });
-
- it("redirects to the release's dedicated page", () => {
- expect(redirectTo).toHaveBeenCalledTimes(1);
- expect(redirectTo).toHaveBeenCalledWith(release._links.self);
- });
- });
-
- describe('when the releaseShowPage feature flag is disabled', () => {
- beforeEach(() => {
- const rootState = { featureFlags: { releaseShowPage: false } };
- actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state, rootState }, release);
- });
-
- it("redirects to the project's main Releases page", () => {
- expect(redirectTo).toHaveBeenCalledTimes(1);
- expect(redirectTo).toHaveBeenCalledWith(state.releasesPagePath);
- });
+ it("redirects to the release's dedicated page", () => {
+ actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state }, release);
+ expect(redirectTo).toHaveBeenCalledTimes(1);
+ expect(redirectTo).toHaveBeenCalledWith(release._links.self);
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index cd7c6b7d275..f3e84262754 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -1,10 +1,12 @@
+import { getJSONFixture } from 'helpers/fixtures';
import createState from '~/releases/stores/modules/detail/state';
import mutations from '~/releases/stores/modules/detail/mutations';
import * as types from '~/releases/stores/modules/detail/mutation_types';
-import { release as originalRelease } from '../../../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release detail mutations', () => {
let state;
let release;
@@ -30,6 +32,7 @@ describe('Release detail mutations', () => {
name: '',
description: '',
milestones: [],
+ groupMilestones: [],
assets: {
links: [],
},
@@ -112,6 +115,26 @@ describe('Release detail mutations', () => {
});
});
+ describe(`${types.UPDATE_RELEASE_MILESTONES}`, () => {
+ it("updates the release's milestones", () => {
+ state.release = release;
+ const newReleaseMilestones = ['v0.0', 'v0.1'];
+ mutations[types.UPDATE_RELEASE_MILESTONES](state, newReleaseMilestones);
+
+ expect(state.release.milestones).toBe(newReleaseMilestones);
+ });
+ });
+
+ describe(`${types.UPDATE_RELEASE_GROUP_MILESTONES}`, () => {
+ it("updates the release's group milestones", () => {
+ state.release = release;
+ const newReleaseGroupMilestones = ['v0.0', 'v0.1'];
+ mutations[types.UPDATE_RELEASE_GROUP_MILESTONES](state, newReleaseGroupMilestones);
+
+ expect(state.release.groupMilestones).toBe(newReleaseGroupMilestones);
+ });
+ });
+
describe(`${types.REQUEST_SAVE_RELEASE}`, () => {
it('set state.isUpdatingRelease to true', () => {
mutations[types.REQUEST_SAVE_RELEASE](state);
diff --git a/spec/frontend/releases/stores/modules/list/actions_spec.js b/spec/frontend/releases/stores/modules/list/actions_spec.js
index 95e30659d6c..4e235e1d00f 100644
--- a/spec/frontend/releases/stores/modules/list/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/list/actions_spec.js
@@ -1,31 +1,42 @@
import { cloneDeep } from 'lodash';
import testAction from 'helpers/vuex_action_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import {
- requestReleases,
fetchReleases,
- receiveReleasesSuccess,
+ fetchReleasesGraphQl,
+ fetchReleasesRest,
receiveReleasesError,
} from '~/releases/stores/modules/list/actions';
import createState from '~/releases/stores/modules/list/state';
import * as types from '~/releases/stores/modules/list/mutation_types';
import api from '~/api';
-import { gqClient, convertGraphQLResponse } from '~/releases/util';
-import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { gqClient, convertAllReleasesGraphQLResponse } from '~/releases/util';
import {
- pageInfoHeadersWithoutPagination,
- releases as originalReleases,
- graphqlReleasesResponse as originalGraphqlReleasesResponse,
-} from '../../../mock_data';
+ normalizeHeaders,
+ parseIntPagination,
+ convertObjectPropsToCamelCase,
+} from '~/lib/utils/common_utils';
+import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
import allReleasesQuery from '~/releases/queries/all_releases.query.graphql';
+import { PAGE_SIZE } from '~/releases/constants';
+
+const originalRelease = getJSONFixture('api/releases/release.json');
+const originalReleases = [originalRelease];
+
+const originalGraphqlReleasesResponse = getJSONFixture(
+ 'graphql/releases/queries/all_releases.query.graphql.json',
+);
describe('Releases State actions', () => {
let mockedState;
- let pageInfo;
let releases;
let graphqlReleasesResponse;
const projectPath = 'root/test-project';
const projectId = 19;
+ const before = 'testBeforeCursor';
+ const after = 'testAfterCursor';
+ const page = 2;
beforeEach(() => {
mockedState = {
@@ -33,178 +44,261 @@ describe('Releases State actions', () => {
projectId,
projectPath,
}),
- featureFlags: {
- graphqlReleaseData: true,
- graphqlReleasesPage: true,
- graphqlMilestoneStats: true,
- },
};
- pageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
graphqlReleasesResponse = cloneDeep(originalGraphqlReleasesResponse);
});
- describe('requestReleases', () => {
- it('should commit REQUEST_RELEASES mutation', done => {
- testAction(requestReleases, null, mockedState, [{ type: types.REQUEST_RELEASES }], [], done);
+ describe('when all the necessary GraphQL feature flags are enabled', () => {
+ beforeEach(() => {
+ mockedState.useGraphQLEndpoint = true;
+ });
+
+ describe('fetchReleases', () => {
+ it('dispatches fetchReleasesGraphQl with before and after parameters', () => {
+ return testAction(
+ fetchReleases,
+ { before, after, page },
+ mockedState,
+ [],
+ [
+ {
+ type: 'fetchReleasesGraphQl',
+ payload: { before, after },
+ },
+ ],
+ );
+ });
});
});
- describe('fetchReleases', () => {
- describe('success', () => {
- it('dispatches requestReleases and receiveReleasesSuccess', done => {
- jest.spyOn(gqClient, 'query').mockImplementation(({ query, variables }) => {
- expect(query).toBe(allReleasesQuery);
- expect(variables).toEqual({
- fullPath: projectPath,
+ describe('when at least one of the GraphQL feature flags is disabled', () => {
+ beforeEach(() => {
+ mockedState.useGraphQLEndpoint = false;
+ });
+
+ describe('fetchReleases', () => {
+ it('dispatches fetchReleasesRest with a page parameter', () => {
+ return testAction(
+ fetchReleases,
+ { before, after, page },
+ mockedState,
+ [],
+ [
+ {
+ type: 'fetchReleasesRest',
+ payload: { page },
+ },
+ ],
+ );
+ });
+ });
+ });
+
+ describe('fetchReleasesGraphQl', () => {
+ describe('GraphQL query variables', () => {
+ let vuexParams;
+
+ beforeEach(() => {
+ jest.spyOn(gqClient, 'query');
+
+ vuexParams = { dispatch: jest.fn(), commit: jest.fn(), state: mockedState };
+ });
+
+ describe('when neither a before nor an after parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesGraphQl(vuexParams, { before: undefined, after: undefined });
+ });
+
+ it('makes a GraphQl query with a first variable', () => {
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: allReleasesQuery,
+ variables: { fullPath: projectPath, first: PAGE_SIZE },
});
- return Promise.resolve(graphqlReleasesResponse);
});
+ });
- testAction(
- fetchReleases,
+ describe('when only a before parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesGraphQl(vuexParams, { before, after: undefined });
+ });
+
+ it('makes a GraphQl query with last and before variables', () => {
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: allReleasesQuery,
+ variables: { fullPath: projectPath, last: PAGE_SIZE, before },
+ });
+ });
+ });
+
+ describe('when only an after parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesGraphQl(vuexParams, { before: undefined, after });
+ });
+
+ it('makes a GraphQl query with first and after variables', () => {
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: allReleasesQuery,
+ variables: { fullPath: projectPath, first: PAGE_SIZE, after },
+ });
+ });
+ });
+
+ describe('when both before and after parameters are provided', () => {
+ it('throws an error', () => {
+ const callFetchReleasesGraphQl = () => {
+ fetchReleasesGraphQl(vuexParams, { before, after });
+ };
+
+ expect(callFetchReleasesGraphQl).toThrowError(
+ 'Both a `before` and an `after` parameter were provided to fetchReleasesGraphQl. These parameters cannot be used together.',
+ );
+ });
+ });
+ });
+
+ describe('when the request is successful', () => {
+ beforeEach(() => {
+ jest.spyOn(gqClient, 'query').mockResolvedValue(graphqlReleasesResponse);
+ });
+
+ it(`commits ${types.REQUEST_RELEASES} and ${types.RECEIVE_RELEASES_SUCCESS}`, () => {
+ const convertedResponse = convertAllReleasesGraphQLResponse(graphqlReleasesResponse);
+
+ return testAction(
+ fetchReleasesGraphQl,
{},
mockedState,
- [],
[
{
- type: 'requestReleases',
+ type: types.REQUEST_RELEASES,
},
{
- payload: convertGraphQLResponse(graphqlReleasesResponse),
- type: 'receiveReleasesSuccess',
+ type: types.RECEIVE_RELEASES_SUCCESS,
+ payload: {
+ data: convertedResponse.data,
+ graphQlPageInfo: convertedResponse.paginationInfo,
+ },
},
],
- done,
+ [],
);
});
});
- describe('error', () => {
- it('dispatches requestReleases and receiveReleasesError', done => {
- jest.spyOn(gqClient, 'query').mockRejectedValue();
+ describe('when the request fails', () => {
+ beforeEach(() => {
+ jest.spyOn(gqClient, 'query').mockRejectedValue(new Error('Something went wrong!'));
+ });
- testAction(
- fetchReleases,
+ it(`commits ${types.REQUEST_RELEASES} and dispatch receiveReleasesError`, () => {
+ return testAction(
+ fetchReleasesGraphQl,
{},
mockedState,
- [],
[
{
- type: 'requestReleases',
+ type: types.REQUEST_RELEASES,
},
+ ],
+ [
{
type: 'receiveReleasesError',
},
],
- done,
);
});
});
+ });
+
+ describe('fetchReleasesRest', () => {
+ describe('REST query parameters', () => {
+ let vuexParams;
- describe('when the graphqlReleaseData feature flag is disabled', () => {
beforeEach(() => {
- mockedState.featureFlags.graphqlReleasesPage = false;
- });
+ jest
+ .spyOn(api, 'releases')
+ .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
- describe('success', () => {
- it('dispatches requestReleases and receiveReleasesSuccess', done => {
- jest.spyOn(api, 'releases').mockImplementation((id, options) => {
- expect(id).toBe(projectId);
- expect(options.page).toBe('1');
- return Promise.resolve({ data: releases, headers: pageInfoHeadersWithoutPagination });
- });
+ vuexParams = { dispatch: jest.fn(), commit: jest.fn(), state: mockedState };
+ });
- testAction(
- fetchReleases,
- {},
- mockedState,
- [],
- [
- {
- type: 'requestReleases',
- },
- {
- payload: { data: releases, headers: pageInfoHeadersWithoutPagination },
- type: 'receiveReleasesSuccess',
- },
- ],
- done,
- );
+ describe('when a page parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesRest(vuexParams, { page: 2 });
});
- it('dispatches requestReleases and receiveReleasesSuccess on page two', done => {
- jest.spyOn(api, 'releases').mockImplementation((_, options) => {
- expect(options.page).toBe('2');
- return Promise.resolve({ data: releases, headers: pageInfoHeadersWithoutPagination });
- });
-
- testAction(
- fetchReleases,
- { page: '2' },
- mockedState,
- [],
- [
- {
- type: 'requestReleases',
- },
- {
- payload: { data: releases, headers: pageInfoHeadersWithoutPagination },
- type: 'receiveReleasesSuccess',
- },
- ],
- done,
- );
+ it('makes a REST query with a page query parameter', () => {
+ expect(api.releases).toHaveBeenCalledWith(projectId, { page });
});
});
+ });
- describe('error', () => {
- it('dispatches requestReleases and receiveReleasesError', done => {
- jest.spyOn(api, 'releases').mockReturnValue(Promise.reject());
+ describe('when the request is successful', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(api, 'releases')
+ .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
+ });
- testAction(
- fetchReleases,
- {},
- mockedState,
- [],
- [
- {
- type: 'requestReleases',
- },
- {
- type: 'receiveReleasesError',
+ it(`commits ${types.REQUEST_RELEASES} and ${types.RECEIVE_RELEASES_SUCCESS}`, () => {
+ return testAction(
+ fetchReleasesRest,
+ {},
+ mockedState,
+ [
+ {
+ type: types.REQUEST_RELEASES,
+ },
+ {
+ type: types.RECEIVE_RELEASES_SUCCESS,
+ payload: {
+ data: convertObjectPropsToCamelCase(releases, { deep: true }),
+ restPageInfo: parseIntPagination(
+ normalizeHeaders(pageInfoHeadersWithoutPagination),
+ ),
},
- ],
- done,
- );
- });
+ },
+ ],
+ [],
+ );
});
});
- });
- describe('receiveReleasesSuccess', () => {
- it('should commit RECEIVE_RELEASES_SUCCESS mutation', done => {
- testAction(
- receiveReleasesSuccess,
- { data: releases, headers: pageInfoHeadersWithoutPagination },
- mockedState,
- [{ type: types.RECEIVE_RELEASES_SUCCESS, payload: { pageInfo, data: releases } }],
- [],
- done,
- );
+ describe('when the request fails', () => {
+ beforeEach(() => {
+ jest.spyOn(api, 'releases').mockRejectedValue(new Error('Something went wrong!'));
+ });
+
+ it(`commits ${types.REQUEST_RELEASES} and dispatch receiveReleasesError`, () => {
+ return testAction(
+ fetchReleasesRest,
+ {},
+ mockedState,
+ [
+ {
+ type: types.REQUEST_RELEASES,
+ },
+ ],
+ [
+ {
+ type: 'receiveReleasesError',
+ },
+ ],
+ );
+ });
});
});
describe('receiveReleasesError', () => {
- it('should commit RECEIVE_RELEASES_ERROR mutation', done => {
- testAction(
+ it('should commit RECEIVE_RELEASES_ERROR mutation', () => {
+ return testAction(
receiveReleasesError,
null,
mockedState,
[{ type: types.RECEIVE_RELEASES_ERROR }],
[],
- done,
);
});
});
diff --git a/spec/frontend/releases/stores/modules/list/mutations_spec.js b/spec/frontend/releases/stores/modules/list/mutations_spec.js
index 27ad05846e7..521418cbddb 100644
--- a/spec/frontend/releases/stores/modules/list/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/list/mutations_spec.js
@@ -1,16 +1,29 @@
+import { getJSONFixture } from 'helpers/fixtures';
import createState from '~/releases/stores/modules/list/state';
import mutations from '~/releases/stores/modules/list/mutations';
import * as types from '~/releases/stores/modules/list/mutation_types';
-import { parseIntPagination } from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination, releases } from '../../../mock_data';
+import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
+import { convertAllReleasesGraphQLResponse } from '~/releases/util';
+
+const originalRelease = getJSONFixture('api/releases/release.json');
+const originalReleases = [originalRelease];
+
+const graphqlReleasesResponse = getJSONFixture(
+ 'graphql/releases/queries/all_releases.query.graphql.json',
+);
describe('Releases Store Mutations', () => {
let stateCopy;
- let pageInfo;
+ let restPageInfo;
+ let graphQlPageInfo;
+ let releases;
beforeEach(() => {
stateCopy = createState({});
- pageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
+ restPageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
+ graphQlPageInfo = convertAllReleasesGraphQLResponse(graphqlReleasesResponse).paginationInfo;
+ releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
});
describe('REQUEST_RELEASES', () => {
@@ -23,7 +36,11 @@ describe('Releases Store Mutations', () => {
describe('RECEIVE_RELEASES_SUCCESS', () => {
beforeEach(() => {
- mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, { pageInfo, data: releases });
+ mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, {
+ restPageInfo,
+ graphQlPageInfo,
+ data: releases,
+ });
});
it('sets is loading to false', () => {
@@ -38,18 +55,29 @@ describe('Releases Store Mutations', () => {
expect(stateCopy.releases).toEqual(releases);
});
- it('sets pageInfo', () => {
- expect(stateCopy.pageInfo).toEqual(pageInfo);
+ it('sets restPageInfo', () => {
+ expect(stateCopy.restPageInfo).toEqual(restPageInfo);
+ });
+
+ it('sets graphQlPageInfo', () => {
+ expect(stateCopy.graphQlPageInfo).toEqual(graphQlPageInfo);
});
});
describe('RECEIVE_RELEASES_ERROR', () => {
it('resets data', () => {
+ mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, {
+ restPageInfo,
+ graphQlPageInfo,
+ data: releases,
+ });
+
mutations[types.RECEIVE_RELEASES_ERROR](stateCopy);
expect(stateCopy.isLoading).toEqual(false);
expect(stateCopy.releases).toEqual([]);
- expect(stateCopy.pageInfo).toEqual({});
+ expect(stateCopy.restPageInfo).toEqual({});
+ expect(stateCopy.graphQlPageInfo).toEqual({});
});
});
});
diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js
index f40e5729188..e7b7766c0d0 100644
--- a/spec/frontend/releases/util_spec.js
+++ b/spec/frontend/releases/util_spec.js
@@ -1,6 +1,19 @@
import { cloneDeep } from 'lodash';
-import { releaseToApiJson, apiJsonToRelease, convertGraphQLResponse } from '~/releases/util';
-import { graphqlReleasesResponse as originalGraphqlReleasesResponse } from './mock_data';
+import { getJSONFixture } from 'helpers/fixtures';
+import {
+ releaseToApiJson,
+ apiJsonToRelease,
+ convertGraphQLRelease,
+ convertAllReleasesGraphQLResponse,
+ convertOneReleaseGraphQLResponse,
+} from '~/releases/util';
+
+const originalAllReleasesQueryResponse = getJSONFixture(
+ 'graphql/releases/queries/all_releases.query.graphql.json',
+);
+const originalOneReleaseQueryResponse = getJSONFixture(
+ 'graphql/releases/queries/one_release.query.graphql.json',
+);
describe('releases/util.js', () => {
describe('releaseToApiJson', () => {
@@ -103,54 +116,61 @@ describe('releases/util.js', () => {
});
});
- describe('convertGraphQLResponse', () => {
- let graphqlReleasesResponse;
- let converted;
+ describe('convertGraphQLRelease', () => {
+ let releaseFromResponse;
+ let convertedRelease;
beforeEach(() => {
- graphqlReleasesResponse = cloneDeep(originalGraphqlReleasesResponse);
- converted = convertGraphQLResponse(graphqlReleasesResponse);
- });
-
- it('matches snapshot', () => {
- expect(converted).toMatchSnapshot();
+ releaseFromResponse = cloneDeep(originalOneReleaseQueryResponse).data.project.release;
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
});
describe('assets', () => {
it("handles asset links that don't have a linkType", () => {
- expect(converted.data[0].assets.links[0].linkType).not.toBeUndefined();
+ expect(convertedRelease.assets.links[0].linkType).not.toBeUndefined();
- delete graphqlReleasesResponse.data.project.releases.nodes[0].assets.links.nodes[0]
- .linkType;
+ delete releaseFromResponse.assets.links.nodes[0].linkType;
- converted = convertGraphQLResponse(graphqlReleasesResponse);
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
- expect(converted.data[0].assets.links[0].linkType).toBeUndefined();
+ expect(convertedRelease.assets.links[0].linkType).toBeUndefined();
});
});
describe('_links', () => {
it("handles releases that don't have any links", () => {
- expect(converted.data[0]._links.selfUrl).not.toBeUndefined();
+ expect(convertedRelease._links.selfUrl).not.toBeUndefined();
- delete graphqlReleasesResponse.data.project.releases.nodes[0].links;
+ delete releaseFromResponse.links;
- converted = convertGraphQLResponse(graphqlReleasesResponse);
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
- expect(converted.data[0]._links.selfUrl).toBeUndefined();
+ expect(convertedRelease._links.selfUrl).toBeUndefined();
});
});
describe('commit', () => {
it("handles releases that don't have any commit info", () => {
- expect(converted.data[0].commit).not.toBeUndefined();
+ expect(convertedRelease.commit).not.toBeUndefined();
- delete graphqlReleasesResponse.data.project.releases.nodes[0].commit;
+ delete releaseFromResponse.commit;
- converted = convertGraphQLResponse(graphqlReleasesResponse);
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
- expect(converted.data[0].commit).toBeUndefined();
+ expect(convertedRelease.commit).toBeUndefined();
});
});
});
+
+ describe('convertAllReleasesGraphQLResponse', () => {
+ it('matches snapshot', () => {
+ expect(convertAllReleasesGraphQLResponse(originalAllReleasesQueryResponse)).toMatchSnapshot();
+ });
+ });
+
+ describe('convertOneReleaseGraphQLResponse', () => {
+ it('matches snapshot', () => {
+ expect(convertOneReleaseGraphQLResponse(originalOneReleaseQueryResponse)).toMatchSnapshot();
+ });
+ });
});
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index cf2e6b00800..aaa8bf168f2 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -77,24 +77,31 @@ exports[`Repository last commit component renders commit widget 1`] = `
</gl-link-stub>
</div>
- <div
- class="commit-sha-group d-flex"
+ <gl-button-group-stub
+ class="gl-ml-4 js-commit-sha-group"
>
- <div
- class="label label-monospace monospace"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-font-monospace"
+ data-testid="last-commit-id-label"
+ icon=""
+ label="true"
+ size="medium"
+ variant="default"
>
-
- 12345678
-
- </div>
+ 12345678
+ </gl-button-stub>
<clipboard-button-stub
- cssclass="btn-default"
+ category="secondary"
+ class="input-group-text"
+ size="medium"
text="123456789"
title="Copy commit SHA"
- tooltipplacement="bottom"
+ tooltipplacement="top"
/>
- </div>
+ </gl-button-group-stub>
</div>
</div>
</div>
@@ -181,24 +188,31 @@ exports[`Repository last commit component renders the signature HTML as returned
</gl-link-stub>
</div>
- <div
- class="commit-sha-group d-flex"
+ <gl-button-group-stub
+ class="gl-ml-4 js-commit-sha-group"
>
- <div
- class="label label-monospace monospace"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-font-monospace"
+ data-testid="last-commit-id-label"
+ icon=""
+ label="true"
+ size="medium"
+ variant="default"
>
-
- 12345678
-
- </div>
+ 12345678
+ </gl-button-stub>
<clipboard-button-stub
- cssclass="btn-default"
+ category="secondary"
+ class="input-group-text"
+ size="medium"
text="123456789"
title="Copy commit SHA"
- tooltipplacement="bottom"
+ tooltipplacement="top"
/>
- </div>
+ </gl-button-group-stub>
</div>
</div>
</div>
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index ca4120576f5..38e5c9aaca5 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -1,5 +1,5 @@
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
-import { GlDeprecatedDropdown } from '@gitlab/ui';
+import { GlDropdown } from '@gitlab/ui';
import Breadcrumbs from '~/repository/components/breadcrumbs.vue';
let vm;
@@ -61,7 +61,7 @@ describe('Repository breadcrumbs component', () => {
vm.setData({ userPermissions: { forkProject: false, createMergeRequestIn: false } });
return vm.vm.$nextTick(() => {
- expect(vm.find(GlDeprecatedDropdown).exists()).toBe(false);
+ expect(vm.find(GlDropdown).exists()).toBe(false);
});
});
@@ -71,7 +71,7 @@ describe('Repository breadcrumbs component', () => {
vm.setData({ userPermissions: { forkProject: true, createMergeRequestIn: true } });
return vm.vm.$nextTick(() => {
- expect(vm.find(GlDeprecatedDropdown).exists()).toBe(true);
+ expect(vm.find(GlDropdown).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index c14a7f0e061..ccba0982c26 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -78,7 +78,7 @@ describe('Repository last commit component', () => {
factory();
return vm.vm.$nextTick(() => {
- expect(vm.find('.label-monospace').text()).toEqual('12345678');
+ expect(vm.find('[data-testid="last-commit-id-label"]').text()).toEqual('12345678');
});
});
diff --git a/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap b/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
index 69b7a3931f8..23c06dc5e68 100644
--- a/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/repository/components/preview/__snapshots__/index_spec.js.snap
@@ -10,9 +10,10 @@ exports[`Repository file preview component renders file HTML 1`] = `
<div
class="file-header-content"
>
- <i
+ <gl-icon-stub
aria-hidden="true"
- class="fa fa-file-text-o fa-fw"
+ name="doc-text"
+ size="16"
/>
<gl-link-stub
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index 954424b5c8a..ddc95feccd6 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -84,6 +84,14 @@ describe('fetchLogsTree', () => {
expect(axios.get.mock.calls.length).toEqual(1);
}));
+ it('calls axios for each path', () =>
+ Promise.all([
+ fetchLogsTree(client, '', '0', resolver),
+ fetchLogsTree(client, '/test', '0', resolver),
+ ]).then(() => {
+ expect(axios.get.mock.calls.length).toEqual(2);
+ }));
+
it('calls entry resolver', () =>
fetchLogsTree(client, '', '0', resolver).then(() => {
expect(resolver.resolve).toHaveBeenCalledWith(
diff --git a/spec/frontend/repository/utils/icon_spec.js b/spec/frontend/repository/utils/icon_spec.js
deleted file mode 100644
index 3d84705f7ea..00000000000
--- a/spec/frontend/repository/utils/icon_spec.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import { getIconName } from '~/repository/utils/icon';
-
-describe('getIconName', () => {
- // Tests the returning font awesome icon name
- // We only test one for each file type to save testing a lot of different
- // file types
- it.each`
- type | path | icon
- ${'tree'} | ${''} | ${'folder'}
- ${'commit'} | ${''} | ${'archive'}
- ${'file'} | ${'test.pdf'} | ${'file-pdf-o'}
- ${'file'} | ${'test.jpg'} | ${'file-image-o'}
- ${'file'} | ${'test.zip'} | ${'file-archive-o'}
- ${'file'} | ${'test.mp3'} | ${'file-audio-o'}
- ${'file'} | ${'test.flv'} | ${'file-video-o'}
- ${'file'} | ${'test.dotx'} | ${'file-word-o'}
- ${'file'} | ${'test.xlsb'} | ${'file-excel-o'}
- ${'file'} | ${'test.ppam'} | ${'file-powerpoint-o'}
- ${'file'} | ${'test.js'} | ${'file-text-o'}
- `('returns $icon for $type with path $path', ({ type, path, icon }) => {
- expect(getIconName(type, path)).toEqual(icon);
- });
-});
diff --git a/spec/frontend/right_sidebar_spec.js b/spec/frontend/right_sidebar_spec.js
index d80d80152a5..3490a99afb4 100644
--- a/spec/frontend/right_sidebar_spec.js
+++ b/spec/frontend/right_sidebar_spec.js
@@ -6,7 +6,9 @@ import Sidebar from '~/right_sidebar';
let $aside = null;
let $toggle = null;
-let $icon = null;
+let $toggleContainer = null;
+let $expandIcon = null;
+let $collapseIcon = null;
let $page = null;
let $labelsIcon = null;
@@ -15,10 +17,11 @@ const assertSidebarState = state => {
const shouldBeCollapsed = state === 'collapsed';
expect($aside.hasClass('right-sidebar-expanded')).toBe(shouldBeExpanded);
expect($page.hasClass('right-sidebar-expanded')).toBe(shouldBeExpanded);
- expect($icon.hasClass('fa-angle-double-right')).toBe(shouldBeExpanded);
+ expect($toggleContainer.data('is-expanded')).toBe(shouldBeExpanded);
+ expect($expandIcon.hasClass('hidden')).toBe(shouldBeExpanded);
expect($aside.hasClass('right-sidebar-collapsed')).toBe(shouldBeCollapsed);
expect($page.hasClass('right-sidebar-collapsed')).toBe(shouldBeCollapsed);
- expect($icon.hasClass('fa-angle-double-left')).toBe(shouldBeCollapsed);
+ expect($collapseIcon.hasClass('hidden')).toBe(shouldBeCollapsed);
};
describe('RightSidebar', () => {
@@ -33,7 +36,9 @@ describe('RightSidebar', () => {
new Sidebar(); // eslint-disable-line no-new
$aside = $('.right-sidebar');
$page = $('.layout-page');
- $icon = $aside.find('i');
+ $toggleContainer = $('.js-sidebar-toggle-container');
+ $expandIcon = $aside.find('.js-sidebar-expand');
+ $collapseIcon = $aside.find('.js-sidebar-collapse');
$toggle = $aside.find('.js-sidebar-toggle');
$labelsIcon = $aside.find('.sidebar-collapsed-icon');
});
diff --git a/spec/frontend/search/components/state_filter_spec.js b/spec/frontend/search/components/state_filter_spec.js
deleted file mode 100644
index 26344f2b592..00000000000
--- a/spec/frontend/search/components/state_filter_spec.js
+++ /dev/null
@@ -1,104 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import StateFilter from '~/search/state_filter/components/state_filter.vue';
-import {
- FILTER_STATES,
- SCOPES,
- FILTER_STATES_BY_SCOPE,
- FILTER_TEXT,
-} from '~/search/state_filter/constants';
-import * as urlUtils from '~/lib/utils/url_utility';
-
-jest.mock('~/lib/utils/url_utility', () => ({
- visitUrl: jest.fn(),
- setUrlParams: jest.fn(),
-}));
-
-function createComponent(props = { scope: 'issues' }) {
- return shallowMount(StateFilter, {
- propsData: {
- ...props,
- },
- });
-}
-
-describe('StateFilter', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findGlDropdown = () => wrapper.find(GlDropdown);
- const findGlDropdownItems = () => findGlDropdown().findAll(GlDropdownItem);
- const findDropdownItemsText = () => findGlDropdownItems().wrappers.map(w => w.text());
- const firstDropDownItem = () => findGlDropdownItems().at(0);
-
- describe('template', () => {
- describe.each`
- scope | showStateDropdown
- ${'issues'} | ${true}
- ${'merge_requests'} | ${true}
- ${'projects'} | ${false}
- ${'milestones'} | ${false}
- ${'users'} | ${false}
- ${'notes'} | ${false}
- ${'wiki_blobs'} | ${false}
- ${'blobs'} | ${false}
- `(`state dropdown`, ({ scope, showStateDropdown }) => {
- beforeEach(() => {
- wrapper = createComponent({ scope });
- });
-
- it(`does${showStateDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
- expect(findGlDropdown().exists()).toBe(showStateDropdown);
- });
- });
-
- describe.each`
- state | label
- ${FILTER_STATES.ANY.value} | ${FILTER_TEXT}
- ${FILTER_STATES.OPEN.value} | ${FILTER_STATES.OPEN.label}
- ${FILTER_STATES.CLOSED.value} | ${FILTER_STATES.CLOSED.label}
- ${FILTER_STATES.MERGED.value} | ${FILTER_STATES.MERGED.label}
- `(`filter text`, ({ state, label }) => {
- describe(`when state is ${state}`, () => {
- beforeEach(() => {
- wrapper = createComponent({ scope: 'issues', state });
- });
-
- it(`sets dropdown label to ${label}`, () => {
- expect(findGlDropdown().attributes('text')).toBe(label);
- });
- });
- });
-
- describe('Filter options', () => {
- it('renders a dropdown item for each filterOption', () => {
- expect(findDropdownItemsText()).toStrictEqual(
- FILTER_STATES_BY_SCOPE[SCOPES.ISSUES].map(v => {
- return v.label;
- }),
- );
- });
-
- it('clicking a dropdown item calls setUrlParams', () => {
- const state = FILTER_STATES[Object.keys(FILTER_STATES)[0]].value;
- firstDropDownItem().vm.$emit('click');
-
- expect(urlUtils.setUrlParams).toHaveBeenCalledWith({ state });
- });
-
- it('clicking a dropdown item calls visitUrl', () => {
- firstDropDownItem().vm.$emit('click');
-
- expect(urlUtils.visitUrl).toHaveBeenCalled();
- });
- });
- });
-});
diff --git a/spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js b/spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js
new file mode 100644
index 00000000000..4a6b5cebe1c
--- /dev/null
+++ b/spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js
@@ -0,0 +1,196 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import * as urlUtils from '~/lib/utils/url_utility';
+import initStore from '~/search/store';
+import DropdownFilter from '~/search/dropdown_filter/components/dropdown_filter.vue';
+import stateFilterData from '~/search/dropdown_filter/constants/state_filter_data';
+import confidentialFilterData from '~/search/dropdown_filter/constants/confidential_filter_data';
+import { MOCK_QUERY } from '../mock_data';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ visitUrl: jest.fn(),
+ setUrlParams: jest.fn(),
+}));
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('DropdownFilter', () => {
+ let wrapper;
+ let store;
+
+ const createStore = options => {
+ store = initStore({ query: MOCK_QUERY, ...options });
+ };
+
+ const createComponent = (props = { filterData: stateFilterData }) => {
+ wrapper = shallowMount(DropdownFilter, {
+ localVue,
+ store,
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ store = null;
+ });
+
+ const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findGlDropdownItems = () => findGlDropdown().findAll(GlDropdownItem);
+ const findDropdownItemsText = () => findGlDropdownItems().wrappers.map(w => w.text());
+ const firstDropDownItem = () => findGlDropdownItems().at(0);
+
+ describe('StatusFilter', () => {
+ describe('template', () => {
+ describe.each`
+ scope | showDropdown
+ ${'issues'} | ${true}
+ ${'merge_requests'} | ${true}
+ ${'projects'} | ${false}
+ ${'milestones'} | ${false}
+ ${'users'} | ${false}
+ ${'notes'} | ${false}
+ ${'wiki_blobs'} | ${false}
+ ${'blobs'} | ${false}
+ `(`dropdown`, ({ scope, showDropdown }) => {
+ beforeEach(() => {
+ createStore({ query: { ...MOCK_QUERY, scope } });
+ createComponent();
+ });
+
+ it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
+ expect(findGlDropdown().exists()).toBe(showDropdown);
+ });
+ });
+
+ describe.each`
+ initialFilter | label
+ ${stateFilterData.filters.ANY.value} | ${`Any ${stateFilterData.header}`}
+ ${stateFilterData.filters.OPEN.value} | ${stateFilterData.filters.OPEN.label}
+ ${stateFilterData.filters.CLOSED.value} | ${stateFilterData.filters.CLOSED.label}
+ `(`filter text`, ({ initialFilter, label }) => {
+ describe(`when initialFilter is ${initialFilter}`, () => {
+ beforeEach(() => {
+ createStore({ query: { ...MOCK_QUERY, [stateFilterData.filterParam]: initialFilter } });
+ createComponent();
+ });
+
+ it(`sets dropdown label to ${label}`, () => {
+ expect(findGlDropdown().attributes('text')).toBe(label);
+ });
+ });
+ });
+ });
+
+ describe('Filter options', () => {
+ beforeEach(() => {
+ createStore();
+ createComponent();
+ });
+
+ it('renders a dropdown item for each filterOption', () => {
+ expect(findDropdownItemsText()).toStrictEqual(
+ stateFilterData.filterByScope[stateFilterData.scopes.ISSUES].map(v => {
+ return v.label;
+ }),
+ );
+ });
+
+ it('clicking a dropdown item calls setUrlParams', () => {
+ const filter = stateFilterData.filters[Object.keys(stateFilterData.filters)[0]].value;
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
+ [stateFilterData.filterParam]: filter,
+ });
+ });
+
+ it('clicking a dropdown item calls visitUrl', () => {
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('ConfidentialFilter', () => {
+ describe('template', () => {
+ describe.each`
+ scope | showDropdown
+ ${'issues'} | ${true}
+ ${'merge_requests'} | ${false}
+ ${'projects'} | ${false}
+ ${'milestones'} | ${false}
+ ${'users'} | ${false}
+ ${'notes'} | ${false}
+ ${'wiki_blobs'} | ${false}
+ ${'blobs'} | ${false}
+ `(`dropdown`, ({ scope, showDropdown }) => {
+ beforeEach(() => {
+ createStore({ query: { ...MOCK_QUERY, scope } });
+ createComponent({ filterData: confidentialFilterData });
+ });
+
+ it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
+ expect(findGlDropdown().exists()).toBe(showDropdown);
+ });
+ });
+
+ describe.each`
+ initialFilter | label
+ ${confidentialFilterData.filters.ANY.value} | ${`Any ${confidentialFilterData.header}`}
+ ${confidentialFilterData.filters.CONFIDENTIAL.value} | ${confidentialFilterData.filters.CONFIDENTIAL.label}
+ ${confidentialFilterData.filters.NOT_CONFIDENTIAL.value} | ${confidentialFilterData.filters.NOT_CONFIDENTIAL.label}
+ `(`filter text`, ({ initialFilter, label }) => {
+ describe(`when initialFilter is ${initialFilter}`, () => {
+ beforeEach(() => {
+ createStore({
+ query: { ...MOCK_QUERY, [confidentialFilterData.filterParam]: initialFilter },
+ });
+ createComponent({ filterData: confidentialFilterData });
+ });
+
+ it(`sets dropdown label to ${label}`, () => {
+ expect(findGlDropdown().attributes('text')).toBe(label);
+ });
+ });
+ });
+ });
+ });
+
+ describe('Filter options', () => {
+ beforeEach(() => {
+ createStore();
+ createComponent({ filterData: confidentialFilterData });
+ });
+
+ it('renders a dropdown item for each filterOption', () => {
+ expect(findDropdownItemsText()).toStrictEqual(
+ confidentialFilterData.filterByScope[confidentialFilterData.scopes.ISSUES].map(v => {
+ return v.label;
+ }),
+ );
+ });
+
+ it('clicking a dropdown item calls setUrlParams', () => {
+ const filter =
+ confidentialFilterData.filters[Object.keys(confidentialFilterData.filters)[0]].value;
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
+ [confidentialFilterData.filterParam]: filter,
+ });
+ });
+
+ it('clicking a dropdown item calls visitUrl', () => {
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/search/dropdown_filter/mock_data.js b/spec/frontend/search/dropdown_filter/mock_data.js
new file mode 100644
index 00000000000..f11ab3d9951
--- /dev/null
+++ b/spec/frontend/search/dropdown_filter/mock_data.js
@@ -0,0 +1,5 @@
+export const MOCK_QUERY = {
+ scope: 'issues',
+ state: 'all',
+ confidential: null,
+};
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
index f4ac2f57261..02d5ca6bdb3 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
@@ -15,13 +15,16 @@ exports[`self monitor component When the self monitor project has not been creat
</h4>
- <gl-deprecated-button-stub
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
class="js-settings-toggle"
- size="md"
- variant="secondary"
+ icon=""
+ size="medium"
+ variant="default"
>
Expand
- </gl-deprecated-button-stub>
+ </gl-button-stub>
<p
class="js-section-sub-header"
@@ -56,6 +59,7 @@ exports[`self monitor component When the self monitor project has not been creat
<gl-modal-stub
cancel-title="Cancel"
+ category="primary"
modalclass=""
modalid="delete-self-monitor-modal"
ok-title="Delete project"
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index ec5f7b0a394..618cc16cdf4 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
import { createStore } from '~/self_monitor/store';
@@ -42,7 +42,7 @@ describe('self monitor component', () => {
it('renders as an expand button by default', () => {
wrapper = shallowMount(SelfMonitor, { store });
- const button = wrapper.find(GlDeprecatedButton);
+ const button = wrapper.find(GlButton);
expect(button.text()).toBe('Expand');
});
diff --git a/spec/frontend/sentry/sentry_config_spec.js b/spec/frontend/sentry/sentry_config_spec.js
index bcc7f29b98d..ed30e4774d9 100644
--- a/spec/frontend/sentry/sentry_config_spec.js
+++ b/spec/frontend/sentry/sentry_config_spec.js
@@ -1,4 +1,4 @@
-import * as Sentry from '@sentry/browser';
+import * as Sentry from '~/sentry/wrapper';
import SentryConfig from '~/sentry/sentry_config';
describe('SentryConfig', () => {
diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
index 22689080063..6b3d65ff037 100644
--- a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
+++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
@@ -11,7 +11,7 @@ exports[`EmptyStateComponent should render content 1`] = `
<p>In order to start using functions as a service, you must first install Knative on your Kubernetes cluster. <gl-link-stub href=\\"/help\\">More information</gl-link-stub>
</p>
<div>
- <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" href=\\"/clusters\\">Install Knative</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" href=\\"/clusters\\">Install Knative</gl-button-stub>
<!---->
</div>
</div>
diff --git a/spec/frontend/serverless/components/missing_prometheus_spec.js b/spec/frontend/serverless/components/missing_prometheus_spec.js
index 9ca4a45dd5f..0bd2e96a068 100644
--- a/spec/frontend/serverless/components/missing_prometheus_spec.js
+++ b/spec/frontend/serverless/components/missing_prometheus_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/serverless/store';
import missingPrometheusComponent from '~/serverless/components/missing_prometheus.vue';
@@ -24,7 +24,7 @@ describe('missingPrometheusComponent', () => {
'Function invocation metrics require Prometheus to be installed first.',
);
- expect(wrapper.find(GlDeprecatedButton).attributes('variant')).toBe('success');
+ expect(wrapper.find(GlButton).attributes('variant')).toBe('success');
});
it('should render no prometheus data message', () => {
diff --git a/spec/frontend/serverless/components/url_spec.js b/spec/frontend/serverless/components/url_spec.js
index 36dc9e73c74..92e4938c2cd 100644
--- a/spec/frontend/serverless/components/url_spec.js
+++ b/spec/frontend/serverless/components/url_spec.js
@@ -19,7 +19,7 @@ describe('urlComponent', () => {
expect(vm.$el.classList.contains('clipboard-group')).toBe(true);
expect(wrapper.find(ClipboardButton).attributes('text')).toEqual(uri);
- expect(vm.$el.querySelector('.url-text-field').innerHTML).toEqual(uri);
+ expect(vm.$el.querySelector('[data-testid="url-text-field"]').innerHTML).toContain(uri);
vm.$destroy();
});
diff --git a/spec/frontend/sidebar/assignee_title_spec.js b/spec/frontend/sidebar/assignee_title_spec.js
index 92fabaa664e..b5d1e5216f8 100644
--- a/spec/frontend/sidebar/assignee_title_spec.js
+++ b/spec/frontend/sidebar/assignee_title_spec.js
@@ -11,6 +11,7 @@ describe('AssigneeTitle component', () => {
propsData: {
numberOfAssignees: 0,
editable: false,
+ changing: false,
...props,
},
});
@@ -62,6 +63,22 @@ describe('AssigneeTitle component', () => {
});
});
+ describe('when changing is false', () => {
+ it('renders "Edit"', () => {
+ wrapper = createComponent({ editable: true });
+
+ expect(wrapper.find('[data-test-id="edit-link"]').text()).toEqual('Edit');
+ });
+ });
+
+ describe('when changing is true', () => {
+ it('renders "Edit"', () => {
+ wrapper = createComponent({ editable: true, changing: true });
+
+ expect(wrapper.find('[data-test-id="edit-link"]').text()).toEqual('Apply');
+ });
+ });
+
it('does not render spinner by default', () => {
wrapper = createComponent({
numberOfAssignees: 0,
diff --git a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
index 1f028f74423..5307be0bf58 100644
--- a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
@@ -155,8 +155,7 @@ describe('Issuable Time Tracker', () => {
it('should show the correct tooltip text', done => {
Vue.nextTick(() => {
expect(vm.showComparisonState).toBe(true);
- const $title = vm.$el.querySelector('.time-tracking-content .compare-meter').dataset
- .originalTitle;
+ const $title = vm.$el.querySelector('.time-tracking-content .compare-meter').title;
expect($title).toBe('Time remaining: 26h 23m');
done();
diff --git a/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js b/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
index 2f11c6a07c2..8c868205295 100644
--- a/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import EditFormButtons from '~/sidebar/components/confidential/edit_form_buttons.vue';
import eventHub from '~/sidebar/event_hub';
@@ -56,11 +55,11 @@ describe('Edit Form Buttons', () => {
});
it('disables the toggle button', () => {
- expect(findConfidentialToggle().attributes('disabled')).toBe('disabled');
+ expect(findConfidentialToggle().props('disabled')).toBe(true);
});
- it('finds the GlLoadingIcon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ it('sets loading on the toggle button', () => {
+ expect(findConfidentialToggle().props('loading')).toBe(true);
});
});
@@ -99,7 +98,7 @@ describe('Edit Form Buttons', () => {
describe('when succeeds', () => {
beforeEach(() => {
createComponent({ data: { isLoading: false }, props: { confidential: true } });
- findConfidentialToggle().trigger('click');
+ findConfidentialToggle().vm.$emit('click', new Event('click'));
});
it('dispatches the correct action', () => {
@@ -109,9 +108,9 @@ describe('Edit Form Buttons', () => {
});
});
- it('resets loading', () => {
+ it('resets loading on the toggle button', () => {
return waitForPromises().then(() => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(findConfidentialToggle().props('loading')).toBe(false);
});
});
@@ -135,7 +134,7 @@ describe('Edit Form Buttons', () => {
props: { confidential: true },
resolved: false,
});
- findConfidentialToggle().trigger('click');
+ findConfidentialToggle().vm.$emit('click', new Event('click'));
});
it('calls flash with the correct message', () => {
diff --git a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
index de1da3456f8..913646c8f8d 100644
--- a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
@@ -1,5 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import EditFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue';
import eventHub from '~/sidebar/event_hub';
import { deprecatedCreateFlash as flash } from '~/flash';
@@ -22,7 +21,6 @@ describe('EditFormButtons', () => {
};
const findLockToggle = () => wrapper.find('[data-testid="lock-toggle"]');
- const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
const createComponent = ({ props = {}, data = {}, resolved = true }) => {
store = issuableType === ISSUABLE_TYPE_ISSUE ? createStore() : createMrStore();
@@ -33,7 +31,7 @@ describe('EditFormButtons', () => {
jest.spyOn(store, 'dispatch').mockRejectedValue();
}
- wrapper = shallowMount(EditFormButtons, {
+ wrapper = mount(EditFormButtons, {
store,
provide: {
fullPath: '',
@@ -78,8 +76,8 @@ describe('EditFormButtons', () => {
expect(findLockToggle().attributes('disabled')).toBe('disabled');
});
- it('displays the GlLoadingIcon', () => {
- expect(findGlLoadingIcon().exists()).toBe(true);
+ it('sets loading on the toggle button', () => {
+ expect(findLockToggle().props('loading')).toBe(true);
});
});
@@ -121,7 +119,7 @@ describe('EditFormButtons', () => {
it('resets loading', async () => {
await wrapper.vm.$nextTick().then(() => {
- expect(findGlLoadingIcon().exists()).toBe(false);
+ expect(findLockToggle().props('loading')).toBe(false);
});
});
@@ -156,7 +154,7 @@ describe('EditFormButtons', () => {
it('resets loading', async () => {
await wrapper.vm.$nextTick().then(() => {
- expect(findGlLoadingIcon().exists()).toBe(false);
+ expect(findLockToggle().props('loading')).toBe(false);
});
});
diff --git a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
index ab1423a9bbb..e8091dcb51d 100644
--- a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
+++ b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import IssuableLockForm from '~/sidebar/components/lock/issuable_lock_form.vue';
import EditForm from '~/sidebar/components/lock/edit_form.vue';
import createStore from '~/notes/stores';
@@ -19,6 +20,8 @@ describe('IssuableLockForm', () => {
const findLockStatus = () => wrapper.find('[data-testid="lock-status"]');
const findEditLink = () => wrapper.find('[data-testid="edit-link"]');
const findEditForm = () => wrapper.find(EditForm);
+ const findSidebarLockStatusTooltip = () =>
+ getBinding(findSidebarCollapseIcon().element, 'gl-tooltip');
const initStore = isLocked => {
if (issuableType === ISSUABLE_TYPE_ISSUE) {
@@ -37,6 +40,9 @@ describe('IssuableLockForm', () => {
isEditable: true,
...props,
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
});
};
@@ -125,6 +131,13 @@ describe('IssuableLockForm', () => {
expect(findEditForm().exists()).toBe(true);
});
});
+
+ it('renders a tooltip with the lock status text', () => {
+ const tooltip = findSidebarLockStatusTooltip();
+
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value.title).toBe(isLocked ? 'Locked' : 'Unlocked');
+ });
});
});
});
diff --git a/spec/frontend/sidebar/reviewer_title_spec.js b/spec/frontend/sidebar/reviewer_title_spec.js
new file mode 100644
index 00000000000..eae266688d5
--- /dev/null
+++ b/spec/frontend/sidebar/reviewer_title_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import Component from '~/sidebar/components/reviewers/reviewer_title.vue';
+
+describe('ReviewerTitle component', () => {
+ let wrapper;
+
+ const createComponent = props => {
+ return shallowMount(Component, {
+ propsData: {
+ numberOfReviewers: 0,
+ editable: false,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('reviewer title', () => {
+ it('renders reviewer', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 1,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.innerText.trim()).toEqual('Reviewer');
+ });
+
+ it('renders 2 reviewers', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 2,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.innerText.trim()).toEqual('2 Reviewers');
+ });
+ });
+
+ describe('gutter toggle', () => {
+ it('does not show toggle by default', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 2,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toBeNull();
+ });
+
+ it('shows toggle when showToggle is true', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 2,
+ editable: false,
+ showToggle: true,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toEqual(expect.any(Object));
+ });
+ });
+
+ it('does not render spinner by default', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: false,
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBeFalsy();
+ });
+
+ it('renders spinner when loading', () => {
+ wrapper = createComponent({
+ loading: true,
+ numberOfReviewers: 0,
+ editable: false,
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBeTruthy();
+ });
+
+ it('does not render edit link when not editable', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.edit-link')).toBeNull();
+ });
+
+ it('renders edit link when editable', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: true,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.edit-link')).not.toBeNull();
+ });
+
+ it('tracks the event when edit is clicked', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: true,
+ });
+
+ const spy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ triggerEvent('.js-sidebar-dropdown-toggle');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
+ label: 'right_sidebar',
+ property: 'reviewer',
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/reviewers_spec.js b/spec/frontend/sidebar/reviewers_spec.js
new file mode 100644
index 00000000000..effcac266f0
--- /dev/null
+++ b/spec/frontend/sidebar/reviewers_spec.js
@@ -0,0 +1,169 @@
+import { mount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
+import { GlIcon } from '@gitlab/ui';
+import Reviewer from '~/sidebar/components/reviewers/reviewers.vue';
+import UsersMock from './mock_data';
+import UsersMockHelper from '../helpers/user_mock_data_helper';
+
+describe('Reviewer component', () => {
+ const getDefaultProps = () => ({
+ rootPath: 'http://localhost:3000',
+ users: [],
+ editable: false,
+ });
+ let wrapper;
+
+ const createWrapper = (propsData = getDefaultProps()) => {
+ wrapper = mount(Reviewer, {
+ propsData,
+ });
+ };
+
+ const findCollapsedChildren = () => wrapper.findAll('.sidebar-collapsed-icon > *');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('No reviewers/users', () => {
+ it('displays no reviewer icon when collapsed', () => {
+ createWrapper();
+ const collapsedChildren = findCollapsedChildren();
+ const userIcon = collapsedChildren.at(0).find(GlIcon);
+
+ expect(collapsedChildren.length).toBe(1);
+ expect(collapsedChildren.at(0).attributes('aria-label')).toBe('None');
+ expect(userIcon.exists()).toBe(true);
+ expect(userIcon.props('name')).toBe('user');
+ });
+ });
+
+ describe('One reviewer/user', () => {
+ it('displays one reviewer icon when collapsed', () => {
+ createWrapper({
+ ...getDefaultProps(),
+ users: [UsersMock.user],
+ });
+
+ const collapsedChildren = findCollapsedChildren();
+ const reviewer = collapsedChildren.at(0);
+
+ expect(collapsedChildren.length).toBe(1);
+ expect(reviewer.find('.avatar').attributes('src')).toBe(UsersMock.user.avatar);
+ expect(reviewer.find('.avatar').attributes('alt')).toBe(`${UsersMock.user.name}'s avatar`);
+
+ expect(trimText(reviewer.find('.author').text())).toBe(UsersMock.user.name);
+ });
+ });
+
+ describe('Two or more reviewers/users', () => {
+ it('displays two reviewer icons when collapsed', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(2);
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const collapsedChildren = findCollapsedChildren();
+
+ expect(collapsedChildren.length).toBe(2);
+
+ const first = collapsedChildren.at(0);
+
+ expect(first.find('.avatar').attributes('src')).toBe(users[0].avatar_url);
+ expect(first.find('.avatar').attributes('alt')).toBe(`${users[0].name}'s avatar`);
+
+ expect(trimText(first.find('.author').text())).toBe(users[0].name);
+
+ const second = collapsedChildren.at(1);
+
+ expect(second.find('.avatar').attributes('src')).toBe(users[1].avatar_url);
+ expect(second.find('.avatar').attributes('alt')).toBe(`${users[1].name}'s avatar`);
+
+ expect(trimText(second.find('.author').text())).toBe(users[1].name);
+ });
+
+ it('displays one reviewer icon and counter when collapsed', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const collapsedChildren = findCollapsedChildren();
+
+ expect(collapsedChildren.length).toBe(2);
+
+ const first = collapsedChildren.at(0);
+
+ expect(first.find('.avatar').attributes('src')).toBe(users[0].avatar_url);
+ expect(first.find('.avatar').attributes('alt')).toBe(`${users[0].name}'s avatar`);
+
+ expect(trimText(first.find('.author').text())).toBe(users[0].name);
+
+ const second = collapsedChildren.at(1);
+
+ expect(trimText(second.find('.avatar-counter').text())).toBe('+2');
+ });
+
+ it('Shows two reviewers', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(2);
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ editable: true,
+ });
+
+ expect(wrapper.findAll('.user-item').length).toBe(users.length);
+ expect(wrapper.find('.user-list-more').exists()).toBe(false);
+ });
+
+ it('shows sorted reviewer where "can merge" users are sorted first', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ users[0].can_merge = false;
+ users[1].can_merge = false;
+ users[2].can_merge = true;
+
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ editable: true,
+ });
+
+ expect(wrapper.vm.sortedReviewers[0].can_merge).toBe(true);
+ });
+
+ it('passes the sorted reviewers to the uncollapsed-reviewer-list', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ users[0].can_merge = false;
+ users[1].can_merge = false;
+ users[2].can_merge = true;
+
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const userItems = wrapper.findAll('.user-list .user-item a');
+
+ expect(userItems.length).toBe(3);
+ expect(userItems.at(0).attributes('title')).toBe(users[2].name);
+ });
+
+ it('passes the sorted reviewers to the collapsed-reviewer-list', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ users[0].can_merge = false;
+ users[1].can_merge = false;
+ users[2].can_merge = true;
+
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const collapsedButton = wrapper.find('.sidebar-collapsed-user button');
+
+ expect(trimText(collapsedButton.text())).toBe(users[2].name);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
index 88e2d2c9514..dc4560d2ae8 100644
--- a/spec/frontend/sidebar/sidebar_assignees_spec.js
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -20,6 +20,7 @@ describe('sidebar assignees', () => {
mediator,
field: '',
projectPath: 'projectPath',
+ changing: false,
...props,
},
provide: {
diff --git a/spec/frontend/sidebar/sidebar_labels_spec.js b/spec/frontend/sidebar/sidebar_labels_spec.js
index 29333a344e1..7a687ffa761 100644
--- a/spec/frontend/sidebar/sidebar_labels_spec.js
+++ b/spec/frontend/sidebar/sidebar_labels_spec.js
@@ -1,6 +1,5 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
-import Vuex from 'vuex';
import {
mockLabels,
mockRegularLabel,
@@ -9,17 +8,11 @@ import axios from '~/lib/utils/axios_utils';
import SidebarLabels from '~/sidebar/components/labels/sidebar_labels.vue';
import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_vue/constants';
import LabelsSelect from '~/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue';
-import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
describe('sidebar labels', () => {
let axiosMock;
let wrapper;
- const store = new Vuex.Store(labelsSelectModule());
-
const defaultProps = {
allowLabelCreate: true,
allowLabelEdit: true,
@@ -39,11 +32,9 @@ describe('sidebar labels', () => {
const mountComponent = () => {
wrapper = shallowMount(SidebarLabels, {
- localVue,
provide: {
...defaultProps,
},
- store,
});
};
@@ -81,7 +72,7 @@ describe('sidebar labels', () => {
});
});
- describe('when labels are changed', () => {
+ describe('when labels are updated', () => {
beforeEach(() => {
mountComponent();
});
@@ -114,7 +105,27 @@ describe('sidebar labels', () => {
const expected = {
[defaultProps.issuableType]: {
- label_ids: [27, 28, 40],
+ label_ids: [27, 28, 29, 40],
+ },
+ };
+
+ expect(axiosMock.history.put[0].data).toEqual(JSON.stringify(expected));
+ });
+ });
+
+ describe('when label `x` is clicked', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('makes an API call to update labels', async () => {
+ findLabelsSelect().vm.$emit('onLabelRemove', 27);
+
+ await axios.waitForAll();
+
+ const expected = {
+ [defaultProps.issuableType]: {
+ label_ids: [26, 28, 29],
},
};
diff --git a/spec/frontend/sidebar/sidebar_store_spec.js b/spec/frontend/sidebar/sidebar_store_spec.js
index 6d063a7cfcf..7c18222f300 100644
--- a/spec/frontend/sidebar/sidebar_store_spec.js
+++ b/spec/frontend/sidebar/sidebar_store_spec.js
@@ -57,16 +57,40 @@ describe('Sidebar store', () => {
expect(testContext.store.isFetching.assignees).toBe(true);
});
- it('adds a new assignee', () => {
- testContext.store.addAssignee(ASSIGNEE);
+ it('resets changing when resetChanging is called', () => {
+ testContext.store.changing = true;
+
+ testContext.store.resetChanging();
- expect(testContext.store.assignees.length).toEqual(1);
+ expect(testContext.store.changing).toBe(false);
});
- it('removes an assignee', () => {
- testContext.store.removeAssignee(ASSIGNEE);
+ describe('when it adds a new assignee', () => {
+ beforeEach(() => {
+ testContext.store.addAssignee(ASSIGNEE);
+ });
- expect(testContext.store.assignees.length).toEqual(0);
+ it('adds a new assignee', () => {
+ expect(testContext.store.assignees).toHaveLength(1);
+ });
+
+ it('sets changing to true', () => {
+ expect(testContext.store.changing).toBe(true);
+ });
+ });
+
+ describe('when it removes an assignee', () => {
+ beforeEach(() => {
+ testContext.store.removeAssignee(ASSIGNEE);
+ });
+
+ it('removes an assignee', () => {
+ expect(testContext.store.assignees).toHaveLength(0);
+ });
+
+ it('sets changing to true', () => {
+ expect(testContext.store.changing).toBe(true);
+ });
});
it('finds an existent assignee', () => {
@@ -86,6 +110,7 @@ describe('Sidebar store', () => {
testContext.store.removeAllAssignees();
expect(testContext.store.assignees.length).toEqual(0);
+ expect(testContext.store.changing).toBe(true);
});
it('sets participants data', () => {
diff --git a/spec/frontend/snippet/snippet_bundle_spec.js b/spec/frontend/snippet/snippet_bundle_spec.js
deleted file mode 100644
index 208d2fea804..00000000000
--- a/spec/frontend/snippet/snippet_bundle_spec.js
+++ /dev/null
@@ -1,87 +0,0 @@
-import { setHTMLFixture } from 'helpers/fixtures';
-import Editor from '~/editor/editor_lite';
-import initEditor from '~/snippet/snippet_bundle';
-
-jest.mock('~/editor/editor_lite', () => jest.fn());
-
-describe('Snippet editor', () => {
- let editorEl;
- let contentEl;
- let fileNameEl;
- let form;
-
- const mockName = 'foo.bar';
- const mockContent = 'Foo Bar';
- const updatedMockContent = 'New Foo Bar';
-
- const mockEditor = {
- updateModelLanguage: jest.fn(),
- getValue: jest.fn().mockReturnValueOnce(updatedMockContent),
- };
- const createInstance = jest.fn().mockImplementation(() => ({ ...mockEditor }));
- Editor.mockImplementation(() => ({
- createInstance,
- }));
-
- function setUpFixture(name, content) {
- setHTMLFixture(`
- <div class="snippet-form-holder">
- <form>
- <input class="js-snippet-file-name" type="text" value="${name}">
- <input class="snippet-file-content" type="hidden" value="${content}">
- <pre id="editor"></pre>
- </form>
- </div>
- `);
- }
-
- function bootstrap(name = '', content = '') {
- setUpFixture(name, content);
- editorEl = document.getElementById('editor');
- contentEl = document.querySelector('.snippet-file-content');
- fileNameEl = document.querySelector('.js-snippet-file-name');
- form = document.querySelector('.snippet-form-holder form');
-
- initEditor();
- }
-
- function createEvent(name) {
- return new Event(name, {
- view: window,
- bubbles: true,
- cancelable: true,
- });
- }
-
- beforeEach(() => {
- bootstrap(mockName, mockContent);
- });
-
- it('correctly initializes Editor', () => {
- expect(createInstance).toHaveBeenCalledWith({
- el: editorEl,
- blobPath: mockName,
- blobContent: mockContent,
- });
- });
-
- it('listens to file name changes and updates syntax highlighting of code', () => {
- expect(mockEditor.updateModelLanguage).not.toHaveBeenCalled();
-
- const event = createEvent('change');
-
- fileNameEl.value = updatedMockContent;
- fileNameEl.dispatchEvent(event);
-
- expect(mockEditor.updateModelLanguage).toHaveBeenCalledWith(updatedMockContent);
- });
-
- it('listens to form submit event and populates the hidden field with most recent version of the content', () => {
- expect(contentEl.value).toBe(mockContent);
-
- const event = createEvent('submit');
-
- form.dispatchEvent(event);
- expect(contentEl.value).toBe(updatedMockContent);
- });
-});
diff --git a/spec/frontend/snippet/snippet_edit_spec.js b/spec/frontend/snippet/snippet_edit_spec.js
deleted file mode 100644
index 7c12c0cac03..00000000000
--- a/spec/frontend/snippet/snippet_edit_spec.js
+++ /dev/null
@@ -1,44 +0,0 @@
-import '~/snippet/snippet_edit';
-import { triggerDOMEvent } from 'jest/helpers/dom_events_helper';
-import { SnippetEditInit } from '~/snippets';
-import initSnippet from '~/snippet/snippet_bundle';
-
-jest.mock('~/snippet/snippet_bundle');
-jest.mock('~/snippets');
-
-describe('Snippet edit form initialization', () => {
- const setFF = flag => {
- gon.features = { snippetsEditVue: flag };
- };
- let features;
-
- beforeEach(() => {
- features = gon.features;
- setFixtures('<div class="snippet-form"></div>');
- });
-
- afterEach(() => {
- gon.features = features;
- });
-
- it.each`
- name | flag | isVue
- ${'Regular'} | ${false} | ${false}
- ${'Vue'} | ${true} | ${true}
- `('correctly initializes $name Snippet Edit form', ({ flag, isVue }) => {
- initSnippet.mockClear();
- SnippetEditInit.mockClear();
-
- setFF(flag);
-
- triggerDOMEvent('DOMContentLoaded');
-
- if (isVue) {
- expect(initSnippet).not.toHaveBeenCalled();
- expect(SnippetEditInit).toHaveBeenCalled();
- } else {
- expect(initSnippet).toHaveBeenCalled();
- expect(SnippetEditInit).not.toHaveBeenCalled();
- }
- });
-});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index 1cf1ee74ddf..b0c253bca65 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -3,15 +3,18 @@
exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
<div
class="file-holder snippet"
+ data-qa-selector="file_holder_container"
>
<blob-header-edit-stub
candelete="true"
data-qa-selector="file_name_field"
id="blob_local_7_file_path"
+ showdelete="true"
value="foo/bar/test.md"
/>
- <blob-content-edit-stub
+ <editor-lite-stub
+ editoroptions="[object Object]"
fileglobalid="blob_local_7"
filename="foo/bar/test.md"
value="Lorem ipsum dolar sit amet,
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 3b101e9e815..93684ed48ee 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -24,7 +24,7 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
</div>
<div
- class="js-vue-markdown-field md-area position-relative js-expanded gfm-form"
+ class="js-vue-markdown-field md-area position-relative gfm-form js-expanded"
>
<markdown-header-stub
linecontent=""
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index b6abb9f389a..c1fad8cebe6 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -148,17 +148,17 @@ describe('Snippet Edit app', () => {
// Ideally we wouldn't call this method directly, but we don't have a way to trigger
// apollo responses yet.
- const loadSnippet = (...edges) => {
- if (edges.length) {
+ const loadSnippet = (...nodes) => {
+ if (nodes.length) {
wrapper.setData({
- snippet: edges[0],
+ snippet: nodes[0],
});
}
wrapper.vm.onSnippetFetch({
data: {
snippets: {
- edges,
+ nodes,
},
},
});
diff --git a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
index 8b2051008d7..055168a1711 100644
--- a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
@@ -19,17 +19,12 @@ const TEST_BLOBS_UNLOADED = TEST_BLOBS.map(blob => ({ ...blob, content: '', isLo
describe('snippets/components/snippet_blob_actions_edit', () => {
let wrapper;
- const createComponent = (props = {}, snippetMultipleFiles = true) => {
+ const createComponent = (props = {}) => {
wrapper = shallowMount(SnippetBlobActionsEdit, {
propsData: {
initBlobs: TEST_BLOBS,
...props,
},
- provide: {
- glFeatures: {
- snippetMultipleFiles,
- },
- },
});
};
@@ -69,28 +64,24 @@ describe('snippets/components/snippet_blob_actions_edit', () => {
wrapper = null;
});
- describe.each`
- featureFlag | label | showDelete | showAdd
- ${true} | ${'Files'} | ${true} | ${true}
- ${false} | ${'File'} | ${false} | ${false}
- `('with feature flag = $featureFlag', ({ featureFlag, label, showDelete, showAdd }) => {
+ describe('multi-file snippets rendering', () => {
beforeEach(() => {
- createComponent({}, featureFlag);
+ createComponent();
});
it('renders label', () => {
- expect(findLabel().text()).toBe(label);
+ expect(findLabel().text()).toBe('Files');
});
- it(`renders delete button (show=${showDelete})`, () => {
+ it(`renders delete button (show=true)`, () => {
expect(findFirstBlobEdit().props()).toMatchObject({
- showDelete,
+ showDelete: true,
canDelete: true,
});
});
- it(`renders add button (show=${showAdd})`, () => {
- expect(findAddButton().exists()).toBe(showAdd);
+ it(`renders add button (show=true)`, () => {
+ expect(findAddButton().exists()).toBe(true);
});
});
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
index fc4da46d722..9d0311fd682 100644
--- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -5,7 +5,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'helpers/test_constants';
import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
-import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
+import EditorLite from '~/vue_shared/components/editor_lite.vue';
import axios from '~/lib/utils/axios_utils';
import { joinPaths } from '~/lib/utils/url_utility';
import { deprecatedCreateFlash as createFlash } from '~/flash';
@@ -48,7 +48,7 @@ describe('Snippet Blob Edit component', () => {
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findHeader = () => wrapper.find(BlobHeaderEdit);
- const findContent = () => wrapper.find(BlobContentEdit);
+ const findContent = () => wrapper.find(EditorLite);
const getLastUpdatedArgs = () => {
const event = wrapper.emitted()['blob-updated'];
@@ -156,7 +156,7 @@ describe('Snippet Blob Edit component', () => {
});
it('shows blob header', () => {
- const { canDelete = true, showDelete = false } = props;
+ const { canDelete = true, showDelete = true } = props;
expect(findHeader().props()).toMatchObject({
canDelete,
@@ -172,11 +172,13 @@ describe('Snippet Blob Edit component', () => {
expect(findContent().exists()).toBe(showContent);
if (showContent) {
- expect(findContent().props()).toEqual({
- value: TEST_BLOB_LOADED.content,
- fileGlobalId: TEST_BLOB_LOADED.id,
- fileName: TEST_BLOB_LOADED.path,
- });
+ expect(findContent().props()).toEqual(
+ expect.objectContaining({
+ value: TEST_BLOB_LOADED.content,
+ fileGlobalId: TEST_BLOB_LOADED.id,
+ fileName: TEST_BLOB_LOADED.path,
+ }),
+ );
}
});
});
diff --git a/spec/frontend/snippets/components/snippet_blob_view_spec.js b/spec/frontend/snippets/components/snippet_blob_view_spec.js
index 9c4b2734a3f..1ccecd7b5ba 100644
--- a/spec/frontend/snippets/components/snippet_blob_view_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_view_spec.js
@@ -140,10 +140,10 @@ describe('Blob Embeddable', () => {
async ({ snippetBlobs, currentBlob, expectedContent }) => {
const apolloData = {
snippets: {
- edges: [
+ nodes: [
{
- node: {
- blobs: snippetBlobs,
+ blobs: {
+ nodes: snippetBlobs,
},
},
],
diff --git a/spec/frontend/snippets_spec.js b/spec/frontend/snippets_spec.js
deleted file mode 100644
index 6c39ff0da27..00000000000
--- a/spec/frontend/snippets_spec.js
+++ /dev/null
@@ -1,70 +0,0 @@
-import snippetEmbed from '~/snippet/snippet_embed';
-import { loadHTMLFixture } from './helpers/fixtures';
-
-describe('Snippets', () => {
- let embedBtn;
- let snippetUrlArea;
- let shareBtn;
- let scriptTag;
-
- const snippetUrl = 'http://test.host/-/snippets/1';
-
- beforeEach(() => {
- loadHTMLFixture('snippets/show.html');
-
- embedBtn = document.querySelector('.js-embed-btn');
- snippetUrlArea = document.querySelector('.js-snippet-url-area');
- shareBtn = document.querySelector('.js-share-btn');
- });
-
- it('selects the fields content when it is clicked', () => {
- jest.spyOn(snippetUrlArea, 'select');
- snippetEmbed();
-
- expect(snippetUrlArea.select).not.toHaveBeenCalled();
- snippetUrlArea.dispatchEvent(new Event('click'));
- expect(snippetUrlArea.select).toHaveBeenCalled();
- });
-
- describe('when the snippet url does not include params', () => {
- beforeEach(() => {
- snippetEmbed();
-
- scriptTag = `<script src="${snippetUrl}.js"></script>`;
- });
-
- it('shows the script tag as default', () => {
- expect(snippetUrlArea.value).toEqual(scriptTag);
- });
-
- it('sets the proper url depending on the button clicked', () => {
- shareBtn.dispatchEvent(new Event('click'));
- expect(snippetUrlArea.value).toEqual(snippetUrl);
-
- embedBtn.dispatchEvent(new Event('click'));
- expect(snippetUrlArea.value).toEqual(scriptTag);
- });
- });
-
- describe('when the snippet url includes params', () => {
- beforeEach(() => {
- scriptTag = `<script src="${snippetUrl}.js?foo=bar"></script>`;
- snippetUrlArea.value = scriptTag;
- snippetUrlArea.dataset.url = `${snippetUrl}?foo=bar`;
-
- snippetEmbed();
- });
-
- it('shows the script tag as default', () => {
- expect(snippetUrlArea.value).toEqual(scriptTag);
- });
-
- it('sets the proper url depending on the button clicked', () => {
- shareBtn.dispatchEvent(new Event('click'));
- expect(snippetUrlArea.value).toEqual(`${snippetUrl}?foo=bar`);
-
- embedBtn.dispatchEvent(new Event('click'));
- expect(snippetUrlArea.value).toEqual(scriptTag);
- });
- });
-});
diff --git a/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js b/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
new file mode 100644
index 00000000000..191f91be076
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/edit_meta_controls_spec.js
@@ -0,0 +1,99 @@
+import { shallowMount } from '@vue/test-utils';
+
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { GlFormInput, GlFormTextarea } from '@gitlab/ui';
+
+import EditMetaControls from '~/static_site_editor/components/edit_meta_controls.vue';
+
+import { mergeRequestMeta } from '../mock_data';
+
+describe('~/static_site_editor/components/edit_meta_controls.vue', () => {
+ useLocalStorageSpy();
+
+ let wrapper;
+ let mockSelect;
+ let mockGlFormInputTitleInstance;
+ const { title, description } = mergeRequestMeta;
+ const newTitle = 'New title';
+ const newDescription = 'New description';
+
+ const buildWrapper = (propsData = {}) => {
+ wrapper = shallowMount(EditMetaControls, {
+ propsData: {
+ title,
+ description,
+ ...propsData,
+ },
+ });
+ };
+
+ const buildMocks = () => {
+ mockSelect = jest.fn();
+ mockGlFormInputTitleInstance = { $el: { select: mockSelect } };
+ wrapper.vm.$refs.title = mockGlFormInputTitleInstance;
+ };
+
+ const findGlFormInputTitle = () => wrapper.find(GlFormInput);
+ const findGlFormTextAreaDescription = () => wrapper.find(GlFormTextarea);
+
+ beforeEach(() => {
+ buildWrapper();
+ buildMocks();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders the title input', () => {
+ expect(findGlFormInputTitle().exists()).toBe(true);
+ });
+
+ it('renders the description input', () => {
+ expect(findGlFormTextAreaDescription().exists()).toBe(true);
+ });
+
+ it('forwards the title prop to the title input', () => {
+ expect(findGlFormInputTitle().attributes().value).toBe(title);
+ });
+
+ it('forwards the description prop to the description input', () => {
+ expect(findGlFormTextAreaDescription().attributes().value).toBe(description);
+ });
+
+ it('calls select on the title input when mounted', () => {
+ expect(mockGlFormInputTitleInstance.$el.select).toHaveBeenCalled();
+ });
+
+ describe('when inputs change', () => {
+ const storageKey = 'sse-merge-request-meta-local-storage-editable';
+
+ afterEach(() => {
+ localStorage.removeItem(storageKey);
+ });
+
+ it.each`
+ findFn | key | value
+ ${findGlFormInputTitle} | ${'title'} | ${newTitle}
+ ${findGlFormTextAreaDescription} | ${'description'} | ${newDescription}
+ `('emits updated settings when $findFn input updates', ({ key, value, findFn }) => {
+ findFn().vm.$emit('input', value);
+
+ const newSettings = { ...mergeRequestMeta, [key]: value };
+
+ expect(wrapper.emitted('updateSettings')[0][0]).toMatchObject(newSettings);
+ });
+
+ it('should remember the input changes', () => {
+ findGlFormInputTitle().vm.$emit('input', newTitle);
+ findGlFormTextAreaDescription().vm.$emit('input', newDescription);
+
+ const newSettings = { title: newTitle, description: newDescription };
+
+ expect(localStorage.setItem).toHaveBeenCalledWith(storageKey, JSON.stringify(newSettings));
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js b/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
new file mode 100644
index 00000000000..7a5685033f3
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/edit_meta_modal_spec.js
@@ -0,0 +1,80 @@
+import { shallowMount } from '@vue/test-utils';
+
+import { GlModal } from '@gitlab/ui';
+
+import EditMetaModal from '~/static_site_editor/components/edit_meta_modal.vue';
+import EditMetaControls from '~/static_site_editor/components/edit_meta_controls.vue';
+
+import { sourcePath, mergeRequestMeta } from '../mock_data';
+
+describe('~/static_site_editor/components/edit_meta_modal.vue', () => {
+ let wrapper;
+ let resetCachedEditable;
+ let mockEditMetaControlsInstance;
+ const { title, description } = mergeRequestMeta;
+
+ const buildWrapper = (propsData = {}) => {
+ wrapper = shallowMount(EditMetaModal, {
+ propsData: {
+ sourcePath,
+ ...propsData,
+ },
+ });
+ };
+
+ const buildMocks = () => {
+ resetCachedEditable = jest.fn();
+ mockEditMetaControlsInstance = { resetCachedEditable };
+ wrapper.vm.$refs.editMetaControls = mockEditMetaControlsInstance;
+ };
+
+ const findGlModal = () => wrapper.find(GlModal);
+ const findEditMetaControls = () => wrapper.find(EditMetaControls);
+
+ beforeEach(() => {
+ buildWrapper();
+ buildMocks();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders the modal', () => {
+ expect(findGlModal().exists()).toBe(true);
+ });
+
+ it('renders the edit meta controls', () => {
+ expect(findEditMetaControls().exists()).toBe(true);
+ });
+
+ it('contains the sourcePath in the title', () => {
+ expect(findEditMetaControls().props('title')).toContain(sourcePath);
+ });
+
+ it('forwards the title prop', () => {
+ expect(findEditMetaControls().props('title')).toBe(title);
+ });
+
+ it('forwards the description prop', () => {
+ expect(findEditMetaControls().props('description')).toBe(description);
+ });
+
+ it('emits the primary event with mergeRequestMeta', () => {
+ findGlModal().vm.$emit('primary', mergeRequestMeta);
+ expect(wrapper.emitted('primary')).toEqual([[mergeRequestMeta]]);
+ });
+
+ it('calls resetCachedEditable on EditMetaControls when primary emits', () => {
+ findGlModal().vm.$emit('primary', mergeRequestMeta);
+ expect(mockEditMetaControlsInstance.resetCachedEditable).toHaveBeenCalled();
+ });
+
+ it('emits the hide event', () => {
+ findGlModal().vm.$emit('hide');
+ expect(wrapper.emitted('hide')).toEqual([[]]);
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/front_matter_controls_spec.js b/spec/frontend/static_site_editor/components/front_matter_controls_spec.js
index 82e8fad643e..8001f2fbd29 100644
--- a/spec/frontend/static_site_editor/components/front_matter_controls_spec.js
+++ b/spec/frontend/static_site_editor/components/front_matter_controls_spec.js
@@ -5,18 +5,11 @@ import { humanize } from '~/lib/utils/text_utility';
import FrontMatterControls from '~/static_site_editor/components/front_matter_controls.vue';
+import { sourceContentHeaderObjYAML as settings } from '../mock_data';
+
describe('~/static_site_editor/components/front_matter_controls.vue', () => {
let wrapper;
- // TODO Refactor and update `sourceContentHeaderObjYAML` in mock_data when !41230 lands
- const settings = {
- layout: 'handbook-page-toc',
- title: 'Handbook',
- twitter_image: '/images/tweets/handbook-gitlab.png',
- suppress_header: true,
- extra_css: ['sales-and-free-trial-common.css', 'form-to-resource.css'],
- };
-
const buildWrapper = (propsData = {}) => {
wrapper = shallowMount(FrontMatterControls, {
propsData: {
diff --git a/spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js b/spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js
new file mode 100644
index 00000000000..0670b240a3f
--- /dev/null
+++ b/spec/frontend/static_site_editor/graphql/resolvers/has_submitted_changes_spec.js
@@ -0,0 +1,27 @@
+import appDataQuery from '~/static_site_editor/graphql/queries/app_data.query.graphql';
+import hasSubmittedChanges from '~/static_site_editor/graphql/resolvers/has_submitted_changes';
+
+describe('static_site_editor/graphql/resolvers/has_submitted_changes', () => {
+ it('updates the cache with the data passed in input', () => {
+ const cachedData = { appData: { original: 'foo' } };
+ const newValue = { input: { hasSubmittedChanges: true } };
+
+ const cache = {
+ readQuery: jest.fn().mockReturnValue(cachedData),
+ writeQuery: jest.fn(),
+ };
+ hasSubmittedChanges(null, newValue, { cache });
+
+ expect(cache.readQuery).toHaveBeenCalledWith({ query: appDataQuery });
+ expect(cache.writeQuery).toHaveBeenCalledWith({
+ query: appDataQuery,
+ data: {
+ appData: {
+ __typename: 'AppData',
+ original: 'foo',
+ hasSubmittedChanges: true,
+ },
+ },
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index d861f6c9cd7..0b08e290227 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -2,11 +2,17 @@ export const sourceContentHeaderYAML = `---
layout: handbook-page-toc
title: Handbook
twitter_image: /images/tweets/handbook-gitlab.png
+suppress_header: true
+extra_css:
+ - sales-and-free-trial-common.css
+ - form-to-resource.css
---`;
export const sourceContentHeaderObjYAML = {
layout: 'handbook-page-toc',
title: 'Handbook',
twitter_image: '/images/tweets/handbook-gitlab.png',
+ suppress_header: true,
+ extra_css: ['sales-and-free-trial-common.css', 'form-to-resource.css'],
};
export const sourceContentSpacing = `\n`;
export const sourceContentBody = `## On this page
@@ -23,7 +29,10 @@ export const username = 'gitlabuser';
export const projectId = '123456';
export const returnUrl = 'https://www.gitlab.com';
export const sourcePath = 'foobar.md.html';
-
+export const mergeRequestMeta = {
+ title: `Update ${sourcePath} file`,
+ description: 'Copy update',
+};
export const savedContentMeta = {
branch: {
label: 'foobar',
diff --git a/spec/frontend/static_site_editor/pages/home_spec.js b/spec/frontend/static_site_editor/pages/home_spec.js
index 41f8a1075c0..2c69e884005 100644
--- a/spec/frontend/static_site_editor/pages/home_spec.js
+++ b/spec/frontend/static_site_editor/pages/home_spec.js
@@ -1,12 +1,13 @@
-import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Home from '~/static_site_editor/pages/home.vue';
import SkeletonLoader from '~/static_site_editor/components/skeleton_loader.vue';
import EditArea from '~/static_site_editor/components/edit_area.vue';
+import EditMetaModal from '~/static_site_editor/components/edit_meta_modal.vue';
import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
import submitContentChangesMutation from '~/static_site_editor/graphql/mutations/submit_content_changes.mutation.graphql';
+import hasSubmittedChangesMutation from '~/static_site_editor/graphql/mutations/has_submitted_changes.mutation.graphql';
import { SUCCESS_ROUTE } from '~/static_site_editor/router/constants';
import { TRACKING_ACTION_INITIALIZE_EDITOR } from '~/static_site_editor/constants';
@@ -17,15 +18,15 @@ import {
sourceContentTitle as title,
sourcePath,
username,
+ mergeRequestMeta,
savedContentMeta,
submitChangesError,
trackingCategory,
+ images,
} from '../mock_data';
const localVue = createLocalVue();
-localVue.use(Vuex);
-
describe('static_site_editor/pages/home', () => {
let wrapper;
let store;
@@ -33,6 +34,19 @@ describe('static_site_editor/pages/home', () => {
let $router;
let mutateMock;
let trackingSpy;
+ const defaultAppData = {
+ isSupportedContent: true,
+ hasSubmittedChanges: false,
+ returnUrl,
+ project,
+ username,
+ sourcePath,
+ };
+ const hasSubmittedChangesMutationPayload = {
+ data: {
+ appData: { ...defaultAppData, hasSubmittedChanges: true },
+ },
+ };
const buildApollo = (queries = {}) => {
mutateMock = jest.fn();
@@ -64,7 +78,7 @@ describe('static_site_editor/pages/home', () => {
},
data() {
return {
- appData: { isSupportedContent: true, returnUrl, project, username, sourcePath },
+ appData: { ...defaultAppData },
sourceContent: { title, content },
...data,
};
@@ -73,6 +87,7 @@ describe('static_site_editor/pages/home', () => {
};
const findEditArea = () => wrapper.find(EditArea);
+ const findEditMetaModal = () => wrapper.find(EditMetaModal);
const findInvalidContentMessage = () => wrapper.find(InvalidContentMessage);
const findSkeletonLoader = () => wrapper.find(SkeletonLoader);
const findSubmitChangesError = () => wrapper.find(SubmitChangesError);
@@ -140,24 +155,51 @@ describe('static_site_editor/pages/home', () => {
});
it('displays invalid content message when content is not supported', () => {
- buildWrapper({ appData: { isSupportedContent: false } });
+ buildWrapper({ appData: { ...defaultAppData, isSupportedContent: false } });
expect(findInvalidContentMessage().exists()).toBe(true);
});
it('does not display invalid content message when content is supported', () => {
- buildWrapper({ appData: { isSupportedContent: true } });
+ buildWrapper();
expect(findInvalidContentMessage().exists()).toBe(false);
});
- describe('when submitting changes fails', () => {
- beforeEach(() => {
- mutateMock.mockRejectedValue(new Error(submitChangesError));
+ it('renders an EditMetaModal component', () => {
+ buildWrapper();
+
+ expect(findEditMetaModal().exists()).toBe(true);
+ });
+ describe('when preparing submission', () => {
+ it('calls the show method when the edit-area submit event is emitted', () => {
buildWrapper();
+
+ const mockInstance = { show: jest.fn() };
+ wrapper.vm.$refs.editMetaModal = mockInstance;
+
findEditArea().vm.$emit('submit', { content });
+ return wrapper.vm.$nextTick().then(() => {
+ expect(mockInstance.show).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when submitting changes fails', () => {
+ const setupMutateMock = () => {
+ mutateMock
+ .mockResolvedValueOnce(hasSubmittedChangesMutationPayload)
+ .mockRejectedValueOnce(new Error(submitChangesError));
+ };
+
+ beforeEach(() => {
+ setupMutateMock();
+
+ buildWrapper({ content });
+ findEditMetaModal().vm.$emit('primary', mergeRequestMeta);
+
return wrapper.vm.$nextTick();
});
@@ -166,6 +208,8 @@ describe('static_site_editor/pages/home', () => {
});
it('retries submitting changes when retry button is clicked', () => {
+ setupMutateMock();
+
findSubmitChangesError().vm.$emit('retry');
expect(mutateMock).toHaveBeenCalled();
@@ -180,26 +224,35 @@ describe('static_site_editor/pages/home', () => {
});
});
- it('does not display submit changes error when an error does not exist', () => {
- buildWrapper();
-
- expect(findSubmitChangesError().exists()).toBe(false);
- });
-
describe('when submitting changes succeeds', () => {
const newContent = `new ${content}`;
beforeEach(() => {
- mutateMock.mockResolvedValueOnce({ data: { submitContentChanges: savedContentMeta } });
+ mutateMock.mockResolvedValueOnce(hasSubmittedChangesMutationPayload).mockResolvedValueOnce({
+ data: {
+ submitContentChanges: savedContentMeta,
+ },
+ });
- buildWrapper();
- findEditArea().vm.$emit('submit', { content: newContent });
+ buildWrapper({ content: newContent, images });
+ findEditMetaModal().vm.$emit('primary', mergeRequestMeta);
return wrapper.vm.$nextTick();
});
+ it('dispatches hasSubmittedChanges mutation', () => {
+ expect(mutateMock).toHaveBeenNthCalledWith(1, {
+ mutation: hasSubmittedChangesMutation,
+ variables: {
+ input: {
+ hasSubmittedChanges: true,
+ },
+ },
+ });
+ });
+
it('dispatches submitContentChanges mutation', () => {
- expect(mutateMock).toHaveBeenCalledWith({
+ expect(mutateMock).toHaveBeenNthCalledWith(2, {
mutation: submitContentChangesMutation,
variables: {
input: {
@@ -207,6 +260,8 @@ describe('static_site_editor/pages/home', () => {
project,
sourcePath,
username,
+ images,
+ mergeRequestMeta,
},
},
});
@@ -217,6 +272,12 @@ describe('static_site_editor/pages/home', () => {
});
});
+ it('does not display submit changes error when an error does not exist', () => {
+ buildWrapper();
+
+ expect(findSubmitChangesError().exists()).toBe(false);
+ });
+
it('tracks when editor is initialized on the mounted lifecycle hook', () => {
buildWrapper();
expect(trackingSpy).toHaveBeenCalledWith(
diff --git a/spec/frontend/static_site_editor/pages/success_spec.js b/spec/frontend/static_site_editor/pages/success_spec.js
index 3e19e2413e7..3fc69dc4586 100644
--- a/spec/frontend/static_site_editor/pages/success_spec.js
+++ b/spec/frontend/static_site_editor/pages/success_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
-import { GlEmptyState, GlButton } from '@gitlab/ui';
+import { GlButton, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import Success from '~/static_site_editor/pages/success.vue';
import { savedContentMeta, returnUrl, sourcePath } from '../mock_data';
import { HOME_ROUTE } from '~/static_site_editor/router/constants';
-describe('static_site_editor/pages/success', () => {
+describe('~/static_site_editor/pages/success.vue', () => {
const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
let wrapper;
let router;
@@ -15,14 +15,15 @@ describe('static_site_editor/pages/success', () => {
};
};
- const buildWrapper = (data = {}) => {
+ const buildWrapper = (data = {}, appData = {}) => {
wrapper = shallowMount(Success, {
mocks: {
$router: router,
},
stubs: {
- GlEmptyState,
GlButton,
+ GlEmptyState,
+ GlLoadingIcon,
},
propsData: {
mergeRequestsIllustrationPath,
@@ -33,6 +34,8 @@ describe('static_site_editor/pages/success', () => {
appData: {
returnUrl,
sourcePath,
+ hasSubmittedChanges: true,
+ ...appData,
},
...data,
};
@@ -40,8 +43,9 @@ describe('static_site_editor/pages/success', () => {
});
};
- const findEmptyState = () => wrapper.find(GlEmptyState);
const findReturnUrlButton = () => wrapper.find(GlButton);
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
beforeEach(() => {
buildRouter();
@@ -52,50 +56,76 @@ describe('static_site_editor/pages/success', () => {
wrapper = null;
});
- it('renders empty state with a link to the created merge request', () => {
- buildWrapper();
+ describe('when savedContentMeta is valid', () => {
+ it('renders empty state with a link to the created merge request', () => {
+ buildWrapper();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props()).toMatchObject({
+ primaryButtonText: 'View merge request',
+ primaryButtonLink: savedContentMeta.mergeRequest.url,
+ title: 'Your merge request has been created',
+ svgPath: mergeRequestsIllustrationPath,
+ svgHeight: 146,
+ });
+ });
- expect(findEmptyState().exists()).toBe(true);
- expect(findEmptyState().props()).toMatchObject({
- primaryButtonText: 'View merge request',
- primaryButtonLink: savedContentMeta.mergeRequest.url,
- title: 'Your merge request has been created',
- svgPath: mergeRequestsIllustrationPath,
+ it('displays merge request instructions in the empty state', () => {
+ buildWrapper();
+
+ expect(findEmptyState().text()).toContain(
+ 'To see your changes live you will need to do the following things:',
+ );
+ expect(findEmptyState().text()).toContain('1. Add a clear title to describe the change.');
+ expect(findEmptyState().text()).toContain(
+ '2. Add a description to explain why the change is being made.',
+ );
+ expect(findEmptyState().text()).toContain(
+ '3. Assign a person to review and accept the merge request.',
+ );
});
- });
- it('displays merge request instructions in the empty state', () => {
- buildWrapper();
-
- expect(findEmptyState().text()).toContain(
- 'To see your changes live you will need to do the following things:',
- );
- expect(findEmptyState().text()).toContain('1. Add a clear title to describe the change.');
- expect(findEmptyState().text()).toContain(
- '2. Add a description to explain why the change is being made.',
- );
- expect(findEmptyState().text()).toContain(
- '3. Assign a person to review and accept the merge request.',
- );
- });
+ it('displays return to site button', () => {
+ buildWrapper();
+
+ expect(findReturnUrlButton().text()).toBe('Return to site');
+ expect(findReturnUrlButton().attributes().href).toBe(returnUrl);
+ });
- it('displays return to site button', () => {
- buildWrapper();
+ it('displays source path', () => {
+ buildWrapper();
- expect(findReturnUrlButton().text()).toBe('Return to site');
- expect(findReturnUrlButton().attributes().href).toBe(returnUrl);
+ expect(wrapper.text()).toContain(`Update ${sourcePath} file`);
+ });
});
- it('displays source path', () => {
- buildWrapper();
+ describe('when savedContentMeta is invalid', () => {
+ it('renders empty state with a loader', () => {
+ buildWrapper({ savedContentMeta: null });
- expect(wrapper.text()).toContain(`Update ${sourcePath} file`);
- });
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props()).toMatchObject({
+ title: 'Creating your merge request',
+ svgPath: mergeRequestsIllustrationPath,
+ });
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
- it('redirects to the HOME route when content has not been submitted', () => {
- buildWrapper({ savedContentMeta: null });
+ it('displays helper info in the empty state', () => {
+ buildWrapper({ savedContentMeta: null });
- expect(router.push).toHaveBeenCalledWith(HOME_ROUTE);
- expect(wrapper.html()).toBe('');
+ expect(findEmptyState().text()).toContain(
+ 'You can set an assignee to get your changes reviewed and deployed once your merge request is created',
+ );
+ expect(findEmptyState().text()).toContain(
+ 'A link to view the merge request will appear once ready',
+ );
+ });
+
+ it('redirects to the HOME route when content has not been submitted', () => {
+ buildWrapper({ savedContentMeta: null }, { hasSubmittedChanges: false });
+
+ expect(router.push).toHaveBeenCalledWith(HOME_ROUTE);
+ });
});
});
diff --git a/spec/frontend/static_site_editor/services/front_matterify_spec.js b/spec/frontend/static_site_editor/services/front_matterify_spec.js
new file mode 100644
index 00000000000..dbaedc30849
--- /dev/null
+++ b/spec/frontend/static_site_editor/services/front_matterify_spec.js
@@ -0,0 +1,47 @@
+import {
+ sourceContentYAML as content,
+ sourceContentHeaderObjYAML as yamlFrontMatterObj,
+ sourceContentSpacing as spacing,
+ sourceContentBody as body,
+} from '../mock_data';
+
+import { frontMatterify, stringify } from '~/static_site_editor/services/front_matterify';
+
+describe('static_site_editor/services/front_matterify', () => {
+ const frontMatterifiedContent = {
+ source: content,
+ matter: yamlFrontMatterObj,
+ spacing,
+ content: body,
+ delimiter: '---',
+ type: 'yaml',
+ };
+ const frontMatterifiedBody = {
+ source: body,
+ matter: null,
+ spacing: null,
+ content: body,
+ delimiter: null,
+ type: null,
+ };
+
+ describe('frontMatterify', () => {
+ it.each`
+ frontMatterified | target
+ ${frontMatterify(content)} | ${frontMatterifiedContent}
+ ${frontMatterify(body)} | ${frontMatterifiedBody}
+ `('returns $target from $frontMatterified', ({ frontMatterified, target }) => {
+ expect(frontMatterified).toEqual(target);
+ });
+ });
+
+ describe('stringify', () => {
+ it.each`
+ stringified | target
+ ${stringify(frontMatterifiedContent)} | ${content}
+ ${stringify(frontMatterifiedBody)} | ${body}
+ `('returns $target from $stringified', ({ stringified, target }) => {
+ expect(stringified).toBe(target);
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index d464e6b1895..5018da7300b 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -19,6 +19,7 @@ import {
commitBranchResponse,
commitMultipleResponse,
createMergeRequestResponse,
+ mergeRequestMeta,
sourcePath,
sourceContentYAML as content,
trackingCategory,
@@ -28,11 +29,20 @@ import {
jest.mock('~/static_site_editor/services/generate_branch_name');
describe('submitContentChanges', () => {
- const mergeRequestTitle = `Update ${sourcePath} file`;
const branch = 'branch-name';
let trackingSpy;
let origPage;
+ const buildPayload = (overrides = {}) => ({
+ username,
+ projectId,
+ sourcePath,
+ content,
+ images,
+ mergeRequestMeta,
+ ...overrides,
+ });
+
beforeEach(() => {
jest.spyOn(Api, 'createBranch').mockResolvedValue({ data: commitBranchResponse });
jest.spyOn(Api, 'commitMultiple').mockResolvedValue({ data: commitMultipleResponse });
@@ -53,7 +63,7 @@ describe('submitContentChanges', () => {
});
it('creates a branch named after the username and target branch', () => {
- return submitContentChanges({ username, projectId }).then(() => {
+ return submitContentChanges(buildPayload()).then(() => {
expect(Api.createBranch).toHaveBeenCalledWith(projectId, {
ref: DEFAULT_TARGET_BRANCH,
branch,
@@ -64,16 +74,16 @@ describe('submitContentChanges', () => {
it('notifies error when branch could not be created', () => {
Api.createBranch.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ return expect(submitContentChanges(buildPayload())).rejects.toThrow(
SUBMIT_CHANGES_BRANCH_ERROR,
);
});
it('commits the content changes to the branch when creating branch succeeds', () => {
- return submitContentChanges({ username, projectId, sourcePath, content, images }).then(() => {
+ return submitContentChanges(buildPayload()).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
branch,
- commit_message: mergeRequestTitle,
+ commit_message: mergeRequestMeta.title,
actions: [
{
action: 'update',
@@ -93,16 +103,11 @@ describe('submitContentChanges', () => {
it('does not commit an image if it has been removed from the content', () => {
const contentWithoutImages = '## Content without images';
- return submitContentChanges({
- username,
- projectId,
- sourcePath,
- content: contentWithoutImages,
- images,
- }).then(() => {
+ const payload = buildPayload({ content: contentWithoutImages });
+ return submitContentChanges(payload).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
branch,
- commit_message: mergeRequestTitle,
+ commit_message: mergeRequestMeta.title,
actions: [
{
action: 'update',
@@ -117,17 +122,19 @@ describe('submitContentChanges', () => {
it('notifies error when content could not be committed', () => {
Api.commitMultiple.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId, images })).rejects.toThrow(
+ return expect(submitContentChanges(buildPayload())).rejects.toThrow(
SUBMIT_CHANGES_COMMIT_ERROR,
);
});
- it('creates a merge request when commiting changes succeeds', () => {
- return submitContentChanges({ username, projectId, sourcePath, content, images }).then(() => {
+ it('creates a merge request when committing changes succeeds', () => {
+ return submitContentChanges(buildPayload()).then(() => {
+ const { title, description } = mergeRequestMeta;
expect(Api.createProjectMergeRequest).toHaveBeenCalledWith(
projectId,
convertObjectPropsToSnakeCase({
- title: mergeRequestTitle,
+ title,
+ description,
targetBranch: DEFAULT_TARGET_BRANCH,
sourceBranch: branch,
}),
@@ -138,7 +145,7 @@ describe('submitContentChanges', () => {
it('notifies error when merge request could not be created', () => {
Api.createProjectMergeRequest.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId, images })).rejects.toThrow(
+ return expect(submitContentChanges(buildPayload())).rejects.toThrow(
SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
);
});
@@ -147,11 +154,9 @@ describe('submitContentChanges', () => {
let result;
beforeEach(() => {
- return submitContentChanges({ username, projectId, sourcePath, content, images }).then(
- _result => {
- result = _result;
- },
- );
+ return submitContentChanges(buildPayload()).then(_result => {
+ result = _result;
+ });
});
it('returns the branch name', () => {
@@ -179,7 +184,7 @@ describe('submitContentChanges', () => {
describe('sends the correct tracking event', () => {
beforeEach(() => {
- return submitContentChanges({ username, projectId, sourcePath, content, images });
+ return submitContentChanges(buildPayload());
});
it('for committing changes', () => {
diff --git a/spec/frontend/static_site_editor/services/templater_spec.js b/spec/frontend/static_site_editor/services/templater_spec.js
index 1e7ae872b7e..cb3a0a0c106 100644
--- a/spec/frontend/static_site_editor/services/templater_spec.js
+++ b/spec/frontend/static_site_editor/services/templater_spec.js
@@ -39,6 +39,10 @@ Below this line is a codeblock of the same HTML that should be ignored and prese
<p>Some paragraph...</p>
</div>
\`\`\`
+
+Below this line is a iframe that should be ignored and preserved
+
+<iframe></iframe>
`;
const sourceTemplated = `Below this line is a simple ERB (single-line erb block) example.
@@ -87,6 +91,10 @@ Below this line is a codeblock of the same HTML that should be ignored and prese
<p>Some paragraph...</p>
</div>
\`\`\`
+
+Below this line is a iframe that should be ignored and preserved
+
+<iframe></iframe>
`;
it.each`
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 544c19da57b..eebec7de9d4 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -1,4 +1,6 @@
import Vue from 'vue';
+import 'jquery';
+
import * as jqueryMatchers from 'custom-jquery-matchers';
import { config as testUtilsConfig } from '@vue/test-utils';
import Translate from '~/vue_shared/translate';
@@ -9,7 +11,6 @@ import customMatchers from './matchers';
import './helpers/dom_shims';
import './helpers/jquery';
-import '~/commons/jquery';
import '~/commons/bootstrap';
process.on('unhandledRejection', global.promiseRejectionHandler);
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index e2d39ffeaf0..8c2bef60e74 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -28,7 +28,7 @@ describe('Tracking', () => {
respectDoNotTrack: true,
forceSecureTracker: true,
eventMethod: 'post',
- contexts: { webPage: true },
+ contexts: { webPage: true, performanceTiming: true },
formTracking: false,
linkClickTracking: false,
});
diff --git a/spec/frontend/user_lists/components/add_user_modal_spec.js b/spec/frontend/user_lists/components/add_user_modal_spec.js
new file mode 100644
index 00000000000..82ce195d7cd
--- /dev/null
+++ b/spec/frontend/user_lists/components/add_user_modal_spec.js
@@ -0,0 +1,50 @@
+import { mount } from '@vue/test-utils';
+import AddUserModal from '~/user_lists/components/add_user_modal.vue';
+
+describe('Add User Modal', () => {
+ let wrapper;
+
+ const click = testId => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
+
+ beforeEach(() => {
+ wrapper = mount(AddUserModal, {
+ propsData: { visible: true },
+ });
+ });
+
+ it('should explain the format of user IDs to enter', () => {
+ expect(wrapper.find('[data-testid="add-userids-description"]').text()).toContain(
+ 'Enter a comma separated list of user IDs',
+ );
+ });
+
+ describe('events', () => {
+ beforeEach(() => {
+ wrapper.find('#add-user-ids').setValue('1, 2, 3, 4');
+ });
+
+ it('should emit the users entered when Add Users is clicked', () => {
+ click('confirm-add-user-ids');
+ expect(wrapper.emitted('addUsers')).toContainEqual(['1, 2, 3, 4']);
+ });
+
+ it('should clear the input after emitting', async () => {
+ click('confirm-add-user-ids');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('#add-user-ids').element.value).toBe('');
+ });
+
+ it('should not emit the users entered if cancel is clicked', () => {
+ click('cancel-add-user-ids');
+ expect(wrapper.emitted('addUsers')).toBeUndefined();
+ });
+
+ it('should clear the input after cancelling', async () => {
+ click('cancel-add-user-ids');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('#add-user-ids').element.value).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/components/edit_user_list_spec.js b/spec/frontend/user_lists/components/edit_user_list_spec.js
new file mode 100644
index 00000000000..51a38e12916
--- /dev/null
+++ b/spec/frontend/user_lists/components/edit_user_list_spec.js
@@ -0,0 +1,150 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import Api from '~/api';
+import createStore from '~/user_lists/store/edit';
+import EditUserList from '~/user_lists/components/edit_user_list.vue';
+import UserListForm from '~/user_lists/components/user_list_form.vue';
+import { userList } from '../../feature_flags/mock_data';
+import { redirectTo } from '~/lib/utils/url_utility';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+const localVue = createLocalVue(Vue);
+localVue.use(Vuex);
+
+describe('user_lists/components/edit_user_list', () => {
+ let wrapper;
+
+ const setInputValue = value => wrapper.find('[data-testid="user-list-name"]').setValue(value);
+
+ const click = button => wrapper.find(`[data-testid="${button}"]`).trigger('click');
+ const clickSave = () => click('save-user-list');
+
+ const destroy = () => wrapper?.destroy();
+
+ const factory = () => {
+ destroy();
+
+ wrapper = mount(EditUserList, {
+ localVue,
+ store: createStore({ projectId: '1', userListIid: '2' }),
+ provide: {
+ userListsDocsPath: '/docs/user_lists',
+ },
+ });
+ };
+
+ afterEach(() => {
+ destroy();
+ });
+
+ describe('loading', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockReturnValue(new Promise(() => {}));
+ factory();
+ });
+
+ it('should show a loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('loading error', () => {
+ const message = 'error creating list';
+ let alert;
+
+ beforeEach(async () => {
+ Api.fetchFeatureFlagUserList.mockRejectedValue({ message });
+ factory();
+ await waitForPromises();
+
+ alert = wrapper.find(GlAlert);
+ });
+
+ it('should show a flash with the error respopnse', () => {
+ expect(alert.text()).toContain(message);
+ });
+
+ it('should not be dismissible', async () => {
+ expect(alert.props('dismissible')).toBe(false);
+ });
+
+ it('should not show a user list form', () => {
+ expect(wrapper.find(UserListForm).exists()).toBe(false);
+ });
+ });
+
+ describe('update', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockResolvedValue({ data: userList });
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should link to the documentation', () => {
+ const link = wrapper.find('[data-testid="user-list-docs-link"]');
+ expect(link.attributes('href')).toBe('/docs/user_lists');
+ });
+
+ it('should link the cancel button to the user list details path', () => {
+ const link = wrapper.find('[data-testid="user-list-cancel"]');
+ expect(link.attributes('href')).toBe(userList.path);
+ });
+
+ it('should show the user list name in the title', () => {
+ expect(wrapper.find('[data-testid="user-list-title"]').text()).toBe(`Edit ${userList.name}`);
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ Api.updateFeatureFlagUserList.mockResolvedValue({ data: userList });
+ setInputValue('test');
+ clickSave();
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should create a user list with the entered name', () => {
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: 'test',
+ iid: userList.iid,
+ });
+ });
+
+ it('should redirect to the feature flag details page', () => {
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+
+ describe('error', () => {
+ let alert;
+ let message;
+
+ beforeEach(async () => {
+ message = 'error creating list';
+ Api.updateFeatureFlagUserList.mockRejectedValue({ message });
+ setInputValue('test');
+ clickSave();
+ await waitForPromises();
+
+ alert = wrapper.find(GlAlert);
+ });
+
+ it('should show a flash with the error respopnse', () => {
+ expect(alert.text()).toContain(message);
+ });
+
+ it('should dismiss the error if dismiss is clicked', async () => {
+ alert.find('button').trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/components/new_user_list_spec.js b/spec/frontend/user_lists/components/new_user_list_spec.js
new file mode 100644
index 00000000000..62fb0ca0859
--- /dev/null
+++ b/spec/frontend/user_lists/components/new_user_list_spec.js
@@ -0,0 +1,93 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { GlAlert } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import Api from '~/api';
+import createStore from '~/user_lists/store/new';
+import NewUserList from '~/user_lists/components/new_user_list.vue';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../feature_flags/mock_data';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+const localVue = createLocalVue(Vue);
+localVue.use(Vuex);
+
+describe('user_lists/components/new_user_list', () => {
+ let wrapper;
+
+ const setInputValue = value => wrapper.find('[data-testid="user-list-name"]').setValue(value);
+
+ const click = button => wrapper.find(`[data-testid="${button}"]`).trigger('click');
+
+ beforeEach(() => {
+ wrapper = mount(NewUserList, {
+ localVue,
+ store: createStore({ projectId: '1' }),
+ provide: {
+ featureFlagsPath: '/feature_flags',
+ userListsDocsPath: '/docs/user_lists',
+ },
+ });
+ });
+
+ it('should link to the documentation', () => {
+ const link = wrapper.find('[data-testid="user-list-docs-link"]');
+ expect(link.attributes('href')).toBe('/docs/user_lists');
+ });
+
+ it('should link the cancel buton back to feature flags', () => {
+ const cancel = wrapper.find('[data-testid="user-list-cancel"');
+ expect(cancel.attributes('href')).toBe('/feature_flags');
+ });
+
+ describe('create', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ Api.createFeatureFlagUserList.mockResolvedValue({ data: userList });
+ setInputValue('test');
+ click('save-user-list');
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should create a user list with the entered name', () => {
+ expect(Api.createFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: 'test',
+ user_xids: '',
+ });
+ });
+
+ it('should redirect to the feature flag details page', () => {
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+
+ describe('error', () => {
+ let alert;
+
+ beforeEach(async () => {
+ Api.createFeatureFlagUserList.mockRejectedValue({ message: 'error creating list' });
+ setInputValue('test');
+ click('save-user-list');
+
+ await waitForPromises();
+
+ alert = wrapper.find(GlAlert);
+ });
+
+ it('should show a flash with the error respopnse', () => {
+ expect(alert.text()).toContain('error creating list');
+ });
+
+ it('should dismiss the error when the dismiss button is clicked', async () => {
+ alert.find('button').trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/components/user_list_form_spec.js b/spec/frontend/user_lists/components/user_list_form_spec.js
new file mode 100644
index 00000000000..42f7659600e
--- /dev/null
+++ b/spec/frontend/user_lists/components/user_list_form_spec.js
@@ -0,0 +1,40 @@
+import { mount } from '@vue/test-utils';
+import Form from '~/user_lists/components/user_list_form.vue';
+import { userList } from '../../feature_flags/mock_data';
+
+describe('user_lists/components/user_list_form', () => {
+ let wrapper;
+ let input;
+
+ beforeEach(() => {
+ wrapper = mount(Form, {
+ propsData: {
+ cancelPath: '/cancel',
+ saveButtonLabel: 'Save',
+ userListsDocsPath: '/docs',
+ userList,
+ },
+ });
+
+ input = wrapper.find('[data-testid="user-list-name"]');
+ });
+
+ it('should set the name to the name of the given user list', () => {
+ expect(input.element.value).toBe(userList.name);
+ });
+
+ it('should link to the user lists docs', () => {
+ expect(wrapper.find('[data-testid="user-list-docs-link"]').attributes('href')).toBe('/docs');
+ });
+
+ it('should emit an updated user list when save is clicked', () => {
+ input.setValue('test');
+ wrapper.find('[data-testid="save-user-list"]').trigger('click');
+
+ expect(wrapper.emitted('submit')).toEqual([[{ ...userList, name: 'test' }]]);
+ });
+
+ it('should set the cancel button to the passed url', () => {
+ expect(wrapper.find('[data-testid="user-list-cancel"]').attributes('href')).toBe('/cancel');
+ });
+});
diff --git a/spec/frontend/user_lists/components/user_list_spec.js b/spec/frontend/user_lists/components/user_list_spec.js
new file mode 100644
index 00000000000..5f9b7967846
--- /dev/null
+++ b/spec/frontend/user_lists/components/user_list_spec.js
@@ -0,0 +1,196 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
+import { uniq } from 'lodash';
+import { GlAlert, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
+import Api from '~/api';
+import { parseUserIds, stringifyUserIds } from '~/user_lists/store/utils';
+import createStore from '~/user_lists/store/show';
+import UserList from '~/user_lists/components/user_list.vue';
+import { userList } from '../../feature_flags/mock_data';
+
+jest.mock('~/api');
+
+Vue.use(Vuex);
+
+describe('User List', () => {
+ let wrapper;
+
+ const click = testId => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
+
+ const findUserIds = () => wrapper.findAll('[data-testid="user-id"]');
+
+ const destroy = () => wrapper?.destroy();
+
+ const factory = () => {
+ destroy();
+
+ wrapper = mount(UserList, {
+ store: createStore({ projectId: '1', userListIid: '2' }),
+ propsData: {
+ emptyStatePath: '/empty_state.svg',
+ },
+ });
+ };
+
+ describe('loading', () => {
+ let resolveFn;
+
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockReturnValue(
+ new Promise(resolve => {
+ resolveFn = resolve;
+ }),
+ );
+ factory();
+ });
+
+ afterEach(() => {
+ resolveFn();
+ });
+
+ it('shows a loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('success', () => {
+ let userIds;
+
+ beforeEach(() => {
+ userIds = parseUserIds(userList.user_xids);
+ Api.fetchFeatureFlagUserList.mockResolvedValueOnce({ data: userList });
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('requests the user list on mount', () => {
+ expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2');
+ });
+
+ it('shows the list name', () => {
+ expect(wrapper.find('h3').text()).toBe(userList.name);
+ });
+
+ it('shows an add users button', () => {
+ expect(wrapper.find('[data-testid="add-users"]').text()).toBe('Add Users');
+ });
+
+ it('shows an edit list button', () => {
+ expect(wrapper.find('[data-testid="edit-user-list"]').text()).toBe('Edit');
+ });
+
+ it('shows a row for every id', () => {
+ expect(wrapper.findAll('[data-testid="user-id-row"]')).toHaveLength(userIds.length);
+ });
+
+ it('shows one id on each row', () => {
+ findUserIds().wrappers.forEach((w, i) => expect(w.text()).toBe(userIds[i]));
+ });
+
+ it('shows a delete button for every row', () => {
+ expect(wrapper.findAll('[data-testid="delete-user-id"]')).toHaveLength(userIds.length);
+ });
+
+ describe('adding users', () => {
+ const newIds = ['user3', 'user4', 'user5', 'test', 'example', 'foo'];
+ let receivedUserIds;
+ let parsedReceivedUserIds;
+
+ beforeEach(async () => {
+ Api.updateFeatureFlagUserList.mockResolvedValue(userList);
+ click('add-users');
+ await wrapper.vm.$nextTick();
+ wrapper.find('#add-user-ids').setValue(`${stringifyUserIds(newIds)},`);
+ click('confirm-add-user-ids');
+ await wrapper.vm.$nextTick();
+ [[, { user_xids: receivedUserIds }]] = Api.updateFeatureFlagUserList.mock.calls;
+ parsedReceivedUserIds = parseUserIds(receivedUserIds);
+ });
+
+ it('should add user IDs to the user list', () => {
+ newIds.forEach(id => expect(receivedUserIds).toContain(id));
+ });
+
+ it('should not remove existing user ids', () => {
+ userIds.forEach(id => expect(receivedUserIds).toContain(id));
+ });
+
+ it('should not submit empty IDs', () => {
+ parsedReceivedUserIds.forEach(id => expect(id).not.toBe(''));
+ });
+
+ it('should not create duplicate entries', () => {
+ expect(uniq(parsedReceivedUserIds)).toEqual(parsedReceivedUserIds);
+ });
+
+ it('should display the new IDs', () => {
+ const userIdWrappers = findUserIds();
+ newIds.forEach(id => {
+ const userIdWrapper = userIdWrappers.wrappers.find(w => w.text() === id);
+ expect(userIdWrapper.exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('deleting users', () => {
+ let receivedUserIds;
+
+ beforeEach(async () => {
+ Api.updateFeatureFlagUserList.mockResolvedValue(userList);
+ click('delete-user-id');
+ await wrapper.vm.$nextTick();
+ [[, { user_xids: receivedUserIds }]] = Api.updateFeatureFlagUserList.mock.calls;
+ });
+
+ it('should remove the ID clicked', () => {
+ expect(receivedUserIds).not.toContain(userIds[0]);
+ });
+
+ it('should not display the deleted user', () => {
+ const userIdWrappers = findUserIds();
+ const userIdWrapper = userIdWrappers.wrappers.find(w => w.text() === userIds[0]);
+ expect(userIdWrapper).toBeUndefined();
+ });
+ });
+ });
+
+ describe('error', () => {
+ const findAlert = () => wrapper.find(GlAlert);
+
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockRejectedValue();
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays the alert message', () => {
+ const alert = findAlert();
+ expect(alert.text()).toBe('Something went wrong on our end. Please try again!');
+ });
+
+ it('can dismiss the alert', async () => {
+ const alert = findAlert();
+ alert.find('button').trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+
+ describe('empty list', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockResolvedValueOnce({ data: { ...userList, user_xids: '' } });
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays an empty state', () => {
+ expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/edit/actions_spec.js b/spec/frontend/user_lists/store/edit/actions_spec.js
new file mode 100644
index 00000000000..7f0fb8e5401
--- /dev/null
+++ b/spec/frontend/user_lists/store/edit/actions_spec.js
@@ -0,0 +1,121 @@
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import createState from '~/user_lists/store/edit/state';
+import * as types from '~/user_lists/store/edit/mutation_types';
+import * as actions from '~/user_lists/store/edit/actions';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../../feature_flags/mock_data';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+describe('User Lists Edit Actions', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe('fetchUserList', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockResolvedValue({ data: userList });
+ });
+
+ it('should commit RECEIVE_USER_LIST_SUCCESS', () => {
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ state,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_SUCCESS, payload: userList },
+ ],
+ [],
+ () => expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2'),
+ );
+ });
+ });
+
+ describe('error', () => {
+ let error;
+ beforeEach(() => {
+ error = { response: { data: { message: ['error'] } } };
+ Api.fetchFeatureFlagUserList.mockRejectedValue(error);
+ });
+
+ it('should commit RECEIVE_USER_LIST_ERROR', () => {
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ state,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_ERROR, payload: ['error'] },
+ ],
+ [],
+ () => expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2'),
+ );
+ });
+ });
+ });
+
+ describe('dismissErrorAlert', () => {
+ it('should commit DISMISS_ERROR_ALERT', () => {
+ return testAction(actions.dismissErrorAlert, undefined, state, [
+ { type: types.DISMISS_ERROR_ALERT },
+ ]);
+ });
+ });
+
+ describe('updateUserList', () => {
+ let updatedList;
+
+ beforeEach(() => {
+ updatedList = {
+ ...userList,
+ name: 'new',
+ };
+ });
+ describe('success', () => {
+ beforeEach(() => {
+ Api.updateFeatureFlagUserList.mockResolvedValue({ data: userList });
+ state.userList = userList;
+ });
+
+ it('should commit RECEIVE_USER_LIST_SUCCESS', () => {
+ return testAction(actions.updateUserList, updatedList, state, [], [], () => {
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: updatedList.name,
+ iid: updatedList.iid,
+ });
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+ });
+
+ describe('error', () => {
+ let error;
+
+ beforeEach(() => {
+ error = { message: 'error' };
+ Api.updateFeatureFlagUserList.mockRejectedValue(error);
+ });
+
+ it('should commit RECEIVE_USER_LIST_ERROR', () => {
+ return testAction(
+ actions.updateUserList,
+ updatedList,
+ state,
+ [{ type: types.RECEIVE_USER_LIST_ERROR, payload: ['error'] }],
+ [],
+ () =>
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: updatedList.name,
+ iid: updatedList.iid,
+ }),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/edit/mutations_spec.js b/spec/frontend/user_lists/store/edit/mutations_spec.js
new file mode 100644
index 00000000000..3d4d2a59717
--- /dev/null
+++ b/spec/frontend/user_lists/store/edit/mutations_spec.js
@@ -0,0 +1,61 @@
+import statuses from '~/user_lists/constants/edit';
+import createState from '~/user_lists/store/edit/state';
+import * as types from '~/user_lists/store/edit/mutation_types';
+import mutations from '~/user_lists/store/edit/mutations';
+import { userList } from '../../../feature_flags/mock_data';
+
+describe('User List Edit Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe(types.REQUEST_USER_LIST, () => {
+ beforeEach(() => {
+ mutations[types.REQUEST_USER_LIST](state);
+ });
+
+ it('sets the state to loading', () => {
+ expect(state.status).toBe(statuses.LOADING);
+ });
+ });
+
+ describe(types.RECEIVE_USER_LIST_SUCCESS, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](state, userList);
+ });
+
+ it('sets the state to success', () => {
+ expect(state.status).toBe(statuses.SUCCESS);
+ });
+
+ it('sets the user list to the one received', () => {
+ expect(state.userList).toEqual(userList);
+ });
+ });
+
+ describe(types.RECIEVE_USER_LIST_ERROR, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_ERROR](state, ['network error']);
+ });
+
+ it('sets the state to error', () => {
+ expect(state.status).toBe(statuses.ERROR);
+ });
+
+ it('sets the error message to the recieved one', () => {
+ expect(state.errorMessage).toEqual(['network error']);
+ });
+ });
+
+ describe(types.DISMISS_ERROR_ALERT, () => {
+ beforeEach(() => {
+ mutations[types.DISMISS_ERROR_ALERT](state);
+ });
+
+ it('sets the state to error dismissed', () => {
+ expect(state.status).toBe(statuses.UNSYNCED);
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/new/actions_spec.js b/spec/frontend/user_lists/store/new/actions_spec.js
new file mode 100644
index 00000000000..9cc6212a125
--- /dev/null
+++ b/spec/frontend/user_lists/store/new/actions_spec.js
@@ -0,0 +1,69 @@
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import createState from '~/user_lists/store/new/state';
+import * as types from '~/user_lists/store/new/mutation_types';
+import * as actions from '~/user_lists/store/new/actions';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../../feature_flags/mock_data';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+describe('User Lists Edit Actions', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1' });
+ });
+
+ describe('dismissErrorAlert', () => {
+ it('should commit DISMISS_ERROR_ALERT', () => {
+ return testAction(actions.dismissErrorAlert, undefined, state, [
+ { type: types.DISMISS_ERROR_ALERT },
+ ]);
+ });
+ });
+
+ describe('createUserList', () => {
+ let createdList;
+
+ beforeEach(() => {
+ createdList = {
+ ...userList,
+ name: 'new',
+ };
+ });
+ describe('success', () => {
+ beforeEach(() => {
+ Api.createFeatureFlagUserList.mockResolvedValue({ data: userList });
+ });
+
+ it('should redirect to the user list page', () => {
+ return testAction(actions.createUserList, createdList, state, [], [], () => {
+ expect(Api.createFeatureFlagUserList).toHaveBeenCalledWith('1', createdList);
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+ });
+
+ describe('error', () => {
+ let error;
+
+ beforeEach(() => {
+ error = { message: 'error' };
+ Api.createFeatureFlagUserList.mockRejectedValue(error);
+ });
+
+ it('should commit RECEIVE_USER_LIST_ERROR', () => {
+ return testAction(
+ actions.createUserList,
+ createdList,
+ state,
+ [{ type: types.RECEIVE_CREATE_USER_LIST_ERROR, payload: ['error'] }],
+ [],
+ () => expect(Api.createFeatureFlagUserList).toHaveBeenCalledWith('1', createdList),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/new/mutations_spec.js b/spec/frontend/user_lists/store/new/mutations_spec.js
new file mode 100644
index 00000000000..89e8a83eb25
--- /dev/null
+++ b/spec/frontend/user_lists/store/new/mutations_spec.js
@@ -0,0 +1,38 @@
+import createState from '~/user_lists/store/new/state';
+import * as types from '~/user_lists/store/new/mutation_types';
+import mutations from '~/user_lists/store/new/mutations';
+
+describe('User List Edit Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1' });
+ });
+
+ describe(types.RECIEVE_USER_LIST_ERROR, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_CREATE_USER_LIST_ERROR](state, ['network error']);
+ });
+
+ it('sets the error message to the recieved one', () => {
+ expect(state.errorMessage).toEqual(['network error']);
+ });
+
+ it('sets the error message to the recevied API message if present', () => {
+ const message = ['name is blank', 'name is too short'];
+
+ mutations[types.RECEIVE_CREATE_USER_LIST_ERROR](state, message);
+ expect(state.errorMessage).toEqual(message);
+ });
+ });
+
+ describe(types.DISMISS_ERROR_ALERT, () => {
+ beforeEach(() => {
+ mutations[types.DISMISS_ERROR_ALERT](state);
+ });
+
+ it('clears the error message', () => {
+ expect(state.errorMessage).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/show/actions_spec.js b/spec/frontend/user_lists/store/show/actions_spec.js
new file mode 100644
index 00000000000..25a6b9ec0e4
--- /dev/null
+++ b/spec/frontend/user_lists/store/show/actions_spec.js
@@ -0,0 +1,117 @@
+import testAction from 'helpers/vuex_action_helper';
+import { userList } from 'jest/feature_flags/mock_data';
+import Api from '~/api';
+import { stringifyUserIds } from '~/user_lists/store/utils';
+import createState from '~/user_lists/store/show/state';
+import * as types from '~/user_lists/store/show/mutation_types';
+import * as actions from '~/user_lists/store/show/actions';
+
+jest.mock('~/api');
+
+describe('User Lists Show Actions', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe('fetchUserList', () => {
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_SUCCESS on success', () => {
+ Api.fetchFeatureFlagUserList.mockResolvedValue({ data: userList });
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ mockState,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_SUCCESS, payload: userList },
+ ],
+ [],
+ () => expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2'),
+ );
+ });
+
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_ERROR on error', () => {
+ Api.fetchFeatureFlagUserList.mockRejectedValue({ message: 'fail' });
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ mockState,
+ [{ type: types.REQUEST_USER_LIST }, { type: types.RECEIVE_USER_LIST_ERROR }],
+ [],
+ );
+ });
+ });
+
+ describe('dismissErrorAlert', () => {
+ it('commits DISMISS_ERROR_ALERT', () => {
+ return testAction(
+ actions.dismissErrorAlert,
+ undefined,
+ mockState,
+ [{ type: types.DISMISS_ERROR_ALERT }],
+ [],
+ );
+ });
+ });
+
+ describe('addUserIds', () => {
+ it('adds the given IDs and tries to update the user list', () => {
+ return testAction(
+ actions.addUserIds,
+ '1,2,3',
+ mockState,
+ [{ type: types.ADD_USER_IDS, payload: '1,2,3' }],
+ [{ type: 'updateUserList' }],
+ );
+ });
+ });
+
+ describe('removeUserId', () => {
+ it('removes the given ID and tries to update the user list', () => {
+ return testAction(
+ actions.removeUserId,
+ 'user3',
+ mockState,
+ [{ type: types.REMOVE_USER_ID, payload: 'user3' }],
+ [{ type: 'updateUserList' }],
+ );
+ });
+ });
+
+ describe('updateUserList', () => {
+ beforeEach(() => {
+ mockState.userList = userList;
+ mockState.userIds = ['user1', 'user2', 'user3'];
+ });
+
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_SUCCESS on success', () => {
+ Api.updateFeatureFlagUserList.mockResolvedValue({ data: userList });
+ return testAction(
+ actions.updateUserList,
+ undefined,
+ mockState,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_SUCCESS, payload: userList },
+ ],
+ [],
+ () =>
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ ...userList,
+ user_xids: stringifyUserIds(mockState.userIds),
+ }),
+ );
+ });
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_ERROR on error', () => {
+ Api.updateFeatureFlagUserList.mockRejectedValue({ message: 'fail' });
+ return testAction(
+ actions.updateUserList,
+ undefined,
+ mockState,
+ [{ type: types.REQUEST_USER_LIST }, { type: types.RECEIVE_USER_LIST_ERROR }],
+ [],
+ );
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/show/mutations_spec.js b/spec/frontend/user_lists/store/show/mutations_spec.js
new file mode 100644
index 00000000000..364cc6a0225
--- /dev/null
+++ b/spec/frontend/user_lists/store/show/mutations_spec.js
@@ -0,0 +1,86 @@
+import { uniq } from 'lodash';
+import { userList } from 'jest/feature_flags/mock_data';
+import createState from '~/user_lists/store/show/state';
+import mutations from '~/user_lists/store/show/mutations';
+import { states } from '~/user_lists/constants/show';
+import * as types from '~/user_lists/store/show/mutation_types';
+
+describe('User Lists Show Mutations', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe(types.REQUEST_USER_LIST, () => {
+ it('puts us in the loading state', () => {
+ mutations[types.REQUEST_USER_LIST](mockState);
+
+ expect(mockState.state).toBe(states.LOADING);
+ });
+ });
+
+ describe(types.RECEIVE_USER_LIST_SUCCESS, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, userList);
+ });
+
+ it('sets the state to LOADED', () => {
+ expect(mockState.state).toBe(states.SUCCESS);
+ });
+
+ it('sets the active user list', () => {
+ expect(mockState.userList).toEqual(userList);
+ });
+
+ it('splits the user IDs into an Array', () => {
+ expect(mockState.userIds).toEqual(userList.user_xids.split(','));
+ });
+
+ it('sets user IDs to an empty Array if an empty string is received', () => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, { ...userList, user_xids: '' });
+ expect(mockState.userIds).toEqual([]);
+ });
+ });
+ describe(types.RECEIVE_USER_LIST_ERROR, () => {
+ it('sets the state to error', () => {
+ mutations[types.RECEIVE_USER_LIST_ERROR](mockState);
+ expect(mockState.state).toBe(states.ERROR);
+ });
+ });
+ describe(types.ADD_USER_IDS, () => {
+ const newIds = ['user3', 'test1', '1', '3', ''];
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, userList);
+ mutations[types.ADD_USER_IDS](mockState, newIds.join(', '));
+ });
+
+ it('adds the new IDs to the state unless empty', () => {
+ newIds.filter(id => id).forEach(id => expect(mockState.userIds).toContain(id));
+ });
+
+ it('does not add duplicate IDs to the state', () => {
+ expect(mockState.userIds).toEqual(uniq(mockState.userIds));
+ });
+ });
+ describe(types.REMOVE_USER_ID, () => {
+ let userIds;
+ let removedId;
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, userList);
+ userIds = mockState.userIds;
+ removedId = 'user3';
+ mutations[types.REMOVE_USER_ID](mockState, removedId);
+ });
+
+ it('should remove the given id', () => {
+ expect(mockState).not.toContain(removedId);
+ });
+
+ it('should leave the rest of the IDs alone', () => {
+ userIds.filter(id => id !== removedId).forEach(id => expect(mockState.userIds).toContain(id));
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/utils_spec.js b/spec/frontend/user_lists/store/utils_spec.js
new file mode 100644
index 00000000000..9547b463eec
--- /dev/null
+++ b/spec/frontend/user_lists/store/utils_spec.js
@@ -0,0 +1,23 @@
+import { parseUserIds, stringifyUserIds } from '~/user_lists/store/utils';
+
+describe('User List Store Utils', () => {
+ describe('parseUserIds', () => {
+ it('should split comma-seperated user IDs into an array', () => {
+ expect(parseUserIds('1,2,3')).toEqual(['1', '2', '3']);
+ });
+
+ it('should filter whitespace before the comma', () => {
+ expect(parseUserIds('1\t,2 ,3')).toEqual(['1', '2', '3']);
+ });
+
+ it('should filter whitespace after the comma', () => {
+ expect(parseUserIds('1,\t2, 3')).toEqual(['1', '2', '3']);
+ });
+ });
+
+ describe('stringifyUserIds', () => {
+ it('should convert a list of user IDs into a comma-separated string', () => {
+ expect(stringifyUserIds(['1', '2', '3'])).toBe('1,2,3');
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
index 58ed92298bf..78efcb6e695 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
@@ -35,9 +35,7 @@ describe('MrWidgetAuthorTime', () => {
});
it('renders provided time', () => {
- expect(vm.$el.querySelector('time').getAttribute('data-original-title')).toEqual(
- '2017-03-23T23:02:00.807Z',
- );
+ expect(vm.$el.querySelector('time').getAttribute('title')).toEqual('2017-03-23T23:02:00.807Z');
expect(vm.$el.querySelector('time').textContent.trim()).toEqual('12 hours ago');
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index caea9a757ae..015f8bbac51 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -130,7 +130,7 @@ describe('MRWidgetHeader', () => {
});
it('renders clipboard button', () => {
- expect(vm.$el.querySelector('.btn-clipboard')).not.toEqual(null);
+ expect(vm.$el.querySelector('[data-testid="mr-widget-copy-clipboard"]')).not.toEqual(null);
});
it('renders target branch', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
index 6ec30493f8b..9923434a7dd 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
@@ -6,6 +6,10 @@ import component from '~/vue_merge_request_widget/components/states/mr_widget_re
describe('Merge request widget rebase component', () => {
let Component;
let vm;
+
+ const findRebaseMessageEl = () => vm.$el.querySelector('[data-testid="rebase-message"]');
+ const findRebaseMessageElText = () => findRebaseMessageEl().textContent.trim();
+
beforeEach(() => {
Component = Vue.extend(component);
});
@@ -21,9 +25,7 @@ describe('Merge request widget rebase component', () => {
service: {},
});
- expect(
- vm.$el.querySelector('.rebase-state-find-class-convention span').textContent.trim(),
- ).toContain('Rebase in progress');
+ expect(findRebaseMessageElText()).toContain('Rebase in progress');
});
});
@@ -39,9 +41,7 @@ describe('Merge request widget rebase component', () => {
});
it('it should render rebase button and warning message', () => {
- const text = vm.$el
- .querySelector('.rebase-state-find-class-convention span')
- .textContent.trim();
+ const text = findRebaseMessageElText();
expect(text).toContain('Fast-forward merge is not possible.');
expect(text.replace(/\s\s+/g, ' ')).toContain(
@@ -53,9 +53,7 @@ describe('Merge request widget rebase component', () => {
vm.rebasingError = 'Something went wrong!';
Vue.nextTick(() => {
- expect(
- vm.$el.querySelector('.rebase-state-find-class-convention span').textContent.trim(),
- ).toContain('Something went wrong!');
+ expect(findRebaseMessageElText()).toContain('Something went wrong!');
done();
});
});
@@ -72,9 +70,7 @@ describe('Merge request widget rebase component', () => {
service: {},
});
- const text = vm.$el
- .querySelector('.rebase-state-find-class-convention span')
- .textContent.trim();
+ const text = findRebaseMessageElText();
expect(text).toContain('Fast-forward merge is not possible.');
expect(text).toContain('Rebase the source branch onto');
@@ -93,7 +89,7 @@ describe('Merge request widget rebase component', () => {
service: {},
});
- const elem = vm.$el.querySelector('.rebase-state-find-class-convention span');
+ const elem = findRebaseMessageEl();
expect(elem.innerHTML).toContain(
`Fast-forward merge is not possible. Rebase the source branch onto <span class="label-branch">${targetBranch}</span> to allow this merge request to be merged.`,
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
index 98af44b0975..aae9b8660e2 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -1,12 +1,12 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlButton } from '@gitlab/ui';
import AutoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
describe('MRWidgetAutoMergeFailed', () => {
let wrapper;
const mergeError = 'This is the merge error';
- const findButton = () => wrapper.find('button');
+ const findButton = () => wrapper.find(GlButton);
const createComponent = (props = {}) => {
wrapper = shallowMount(AutoMergeFailedComponent, {
@@ -38,17 +38,13 @@ describe('MRWidgetAutoMergeFailed', () => {
it('emits event and shows loading icon when button is clicked', () => {
jest.spyOn(eventHub, '$emit');
- findButton().trigger('click');
+ findButton().vm.$emit('click');
expect(eventHub.$emit.mock.calls[0][0]).toBe('MRWidgetUpdateRequested');
return wrapper.vm.$nextTick(() => {
- expect(findButton().attributes('disabled')).toEqual('disabled');
- expect(
- findButton()
- .find(GlLoadingIcon)
- .exists(),
- ).toBe(true);
+ expect(findButton().attributes('disabled')).toBe('true');
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
index 1921599ae95..9b51e8583ba 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -212,8 +212,6 @@ describe('MRWidgetMerged', () => {
});
it('should use mergedEvent mergedAt as tooltip title', () => {
- expect(vm.$el.querySelector('time').getAttribute('data-original-title')).toBe(
- 'Jan 24, 2018 1:02pm GMT+0000',
- );
+ expect(vm.$el.querySelector('time').getAttribute('title')).toBe('Jan 24, 2018 1:02pm GMT+0000');
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 5eb24315ca6..9057ffaea45 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -101,8 +101,6 @@ describe('ReadyToMerge', () => {
expect(vm.isMakingRequest).toBeFalsy();
expect(vm.isMergingImmediately).toBeFalsy();
expect(vm.commitMessage).toBe(vm.mr.commitMessage);
- expect(vm.successSvg).toBeDefined();
- expect(vm.warningSvg).toBeDefined();
});
});
@@ -494,19 +492,6 @@ describe('ReadyToMerge', () => {
});
});
- it('hides close button', done => {
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
- jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
-
- vm.handleMergePolling(() => {}, () => {});
-
- setImmediate(() => {
- expect(document.querySelector('.btn-close').classList.contains('hidden')).toBeTruthy();
-
- done();
- });
- });
-
it('updates merge request count badge', done => {
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
index 4c213899dbd..5326d63cb8a 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
@@ -1,5 +1,6 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import SquashBeforeMerge from '~/vue_merge_request_widget/components/states/squash_before_merge.vue';
+import { SQUASH_BEFORE_MERGE } from '~/vue_merge_request_widget/i18n';
const localVue = createLocalVue();
@@ -85,7 +86,7 @@ describe('Squash before merge component', () => {
});
describe('tooltip', () => {
- const tooltipTitle = () => findLabel().element.dataset.title;
+ const tooltipTitle = () => findLabel().attributes('title');
it('does not render when isDisabled is false', () => {
createComponent({
@@ -101,7 +102,7 @@ describe('Squash before merge component', () => {
isDisabled: true,
});
- expect(tooltipTitle()).toBe('Required in this project.');
+ expect(tooltipTitle()).toBe(SQUASH_BEFORE_MERGE.tooltipTitle);
});
});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
index 1711efb5512..13c0665f929 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
@@ -31,10 +31,7 @@ describe('DeploymentAction component', () => {
wrapper.destroy();
}
- wrapper = mount(DeploymentActions, {
- ...options,
- provide: { glFeatures: { deployFromFooter: true } },
- });
+ wrapper = mount(DeploymentActions, options);
};
const findStopButton = () => wrapper.find('.js-stop-env');
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
index ce395de3b5d..17d7fcc4bff 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
@@ -19,10 +19,7 @@ describe('Deployment component', () => {
if (wrapper && wrapper.destroy) {
wrapper.destroy();
}
- wrapper = mount(DeploymentComponent, {
- ...options,
- provide: { glFeatures: { deployFromFooter: true } },
- });
+ wrapper = mount(DeploymentComponent, options);
};
beforeEach(() => {
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index 4688af30269..144283dc507 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -262,6 +262,7 @@ export default {
merge_trains_enabled: true,
merge_trains_count: 3,
merge_train_index: 1,
+ security_reports_docs_path: 'security-reports-docs-path',
};
export const mockStore = {
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index a2ade44b7c4..25c967996e3 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -1,6 +1,8 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import mountComponent from 'helpers/vue_mount_component_helper';
+import { withGonExperiment } from 'helpers/experimentation_helper';
+import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -50,13 +52,13 @@ describe('mrWidgetOptions', () => {
gon.features = {};
});
- const createComponent = () => {
+ const createComponent = (mrData = mockData) => {
if (vm) {
vm.$destroy();
}
vm = mountComponent(MrWidgetOptions, {
- mrData: { ...mockData },
+ mrData: { ...mrData },
});
return axios.waitForAll();
@@ -64,6 +66,7 @@ describe('mrWidgetOptions', () => {
const findSuggestPipeline = () => vm.$el.querySelector('[data-testid="mr-suggest-pipeline"]');
const findSuggestPipelineButton = () => findSuggestPipeline().querySelector('button');
+ const findSecurityMrWidget = () => vm.$el.querySelector('[data-testid="security-mr-widget"]');
describe('default', () => {
beforeEach(() => {
@@ -533,7 +536,7 @@ describe('mrWidgetOptions', () => {
const tooltip = vm.$el.querySelector('[data-testid="question-o-icon"]');
expect(vm.$el.textContent).toContain('Deletes source branch');
- expect(tooltip.getAttribute('data-original-title')).toBe(
+ expect(tooltip.getAttribute('title')).toBe(
'A user with write access to the source branch selected this option',
);
@@ -812,43 +815,96 @@ describe('mrWidgetOptions', () => {
});
});
- describe('given suggestPipeline feature flag is enabled', () => {
+ describe('security widget', () => {
+ describe.each`
+ context | hasPipeline | reportType | isFlagEnabled | shouldRender
+ ${'security report and flag enabled'} | ${true} | ${'sast'} | ${true} | ${true}
+ ${'security report and flag disabled'} | ${true} | ${'sast'} | ${false} | ${false}
+ ${'no security report and flag enabled'} | ${true} | ${'foo'} | ${true} | ${false}
+ ${'no pipeline and flag enabled'} | ${false} | ${'sast'} | ${true} | ${false}
+ `('given $context', ({ hasPipeline, reportType, isFlagEnabled, shouldRender }) => {
+ beforeEach(() => {
+ gon.features.coreSecurityMrWidget = isFlagEnabled;
+
+ if (hasPipeline) {
+ jest.spyOn(Api, 'pipelineJobs').mockResolvedValue({
+ data: [{ artifacts: [{ file_type: reportType }] }],
+ });
+ }
+
+ return createComponent({
+ ...mockData,
+ ...(hasPipeline ? {} : { pipeline: undefined }),
+ });
+ });
+
+ if (shouldRender) {
+ it('renders', () => {
+ expect(findSecurityMrWidget()).toEqual(expect.any(HTMLElement));
+ });
+ } else {
+ it('does not render', () => {
+ expect(findSecurityMrWidget()).toBeNull();
+ });
+ }
+ });
+ });
+
+ describe('suggestPipeline Experiment', () => {
beforeEach(() => {
mock.onAny().reply(200);
// This is needed because some grandchildren Bootstrap components throw warnings
// https://gitlab.com/gitlab-org/gitlab/issues/208458
jest.spyOn(console, 'warn').mockImplementation();
+ });
- gon.features = { suggestPipeline: true };
+ describe('given experiment is enabled', () => {
+ withGonExperiment('suggestPipeline');
- createComponent();
+ beforeEach(() => {
+ createComponent();
- vm.mr.hasCI = false;
- });
+ vm.mr.hasCI = false;
+ });
- it('should suggest pipelines when none exist', () => {
- expect(findSuggestPipeline()).toEqual(expect.any(Element));
- });
+ it('should suggest pipelines when none exist', () => {
+ expect(findSuggestPipeline()).toEqual(expect.any(Element));
+ });
- it.each([
- { isDismissedSuggestPipeline: true },
- { mergeRequestAddCiConfigPath: null },
- { hasCI: true },
- ])('with %s, should not suggest pipeline', async obj => {
- Object.assign(vm.mr, obj);
+ it.each([
+ { isDismissedSuggestPipeline: true },
+ { mergeRequestAddCiConfigPath: null },
+ { hasCI: true },
+ ])('with %s, should not suggest pipeline', async obj => {
+ Object.assign(vm.mr, obj);
- await vm.$nextTick();
+ await vm.$nextTick();
- expect(findSuggestPipeline()).toBeNull();
+ expect(findSuggestPipeline()).toBeNull();
+ });
+
+ it('should allow dismiss of the suggest pipeline message', async () => {
+ findSuggestPipelineButton().click();
+
+ await vm.$nextTick();
+
+ expect(findSuggestPipeline()).toBeNull();
+ });
});
- it('should allow dismiss of the suggest pipeline message', async () => {
- findSuggestPipelineButton().click();
+ describe('given suggestPipeline experiment is not enabled', () => {
+ withGonExperiment('suggestPipeline', false);
- await vm.$nextTick();
+ beforeEach(() => {
+ createComponent();
- expect(findSuggestPipeline()).toBeNull();
+ vm.mr.hasCI = false;
+ });
+
+ it('should not suggest pipelines when none exist', () => {
+ expect(findSuggestPipeline()).toBeNull();
+ });
});
});
});
diff --git a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
index b691a366a0f..f73f78d6f6e 100644
--- a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
@@ -118,27 +118,33 @@ describe('MergeRequestStore', () => {
describe('setPaths', () => {
it('should set the add ci config path', () => {
- store.setData({ ...mockData });
+ store.setPaths({ ...mockData });
expect(store.mergeRequestAddCiConfigPath).toBe('/group2/project2/new/pipeline');
});
it('should set humanAccess=Maintainer when user has that role', () => {
- store.setData({ ...mockData });
+ store.setPaths({ ...mockData });
expect(store.humanAccess).toBe('Maintainer');
});
it('should set pipelinesEmptySvgPath', () => {
- store.setData({ ...mockData });
+ store.setPaths({ ...mockData });
expect(store.pipelinesEmptySvgPath).toBe('/path/to/svg');
});
it('should set newPipelinePath', () => {
- store.setData({ ...mockData });
+ store.setPaths({ ...mockData });
expect(store.newPipelinePath).toBe('/group2/project2/pipelines/new');
});
+
+ it('should set securityReportsDocsPath', () => {
+ store.setPaths({ ...mockData });
+
+ expect(store.securityReportsDocsPath).toBe('security-reports-docs-path');
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
index 19671d425a9..82503e5a025 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/awards_list_spec.js.snap
@@ -228,9 +228,11 @@ exports[`vue_shared/components/awards_list default matches snapshot 1`] = `
/>
</span>
- <i
- aria-hidden="true"
- class="fa fa-spinner fa-spin award-control-icon award-control-icon-loading"
+ <gl-loading-icon-stub
+ class="award-control-icon-loading"
+ color="dark"
+ label="Loading"
+ size="md"
/>
</button>
</div>
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index dfd114a2d1c..ec4a81054db 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -39,6 +39,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="d-inline-flex"
data-clipboard-text="ssh://foo.bar"
@@ -80,6 +81,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="d-inline-flex"
data-clipboard-text="http://foo.bar"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap
new file mode 100644
index 00000000000..26785855369
--- /dev/null
+++ b/spec/frontend/vue_shared/components/__snapshots__/editor_lite_spec.js.snap
@@ -0,0 +1,14 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Editor Lite component rendering matches the snapshot 1`] = `
+<div
+ data-editor-loading=""
+ id="editor-lite-snippet_777"
+>
+ <pre
+ class="editor-loading-content"
+ >
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ </pre>
+</div>
+`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
index c2b97f1e7f9..19a649089e0 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
@@ -11,7 +11,7 @@ exports[`Expand button on click when short text is provided renders button after
<!---->
<svg
- class="gl-icon s16"
+ class="gl-button-icon gl-icon s16"
data-testid="ellipsis_h-icon"
>
<use
@@ -39,7 +39,7 @@ exports[`Expand button on click when short text is provided renders button after
<!---->
<svg
- class="gl-icon s16"
+ class="gl-button-icon gl-icon s16"
data-testid="ellipsis_h-icon"
>
<use
@@ -62,7 +62,7 @@ exports[`Expand button when short text is provided renders button before text 1`
<!---->
<svg
- class="gl-icon s16"
+ class="gl-button-icon gl-icon s16"
data-testid="ellipsis_h-icon"
>
<use
@@ -90,7 +90,7 @@ exports[`Expand button when short text is provided renders button before text 1`
<!---->
<svg
- class="gl-icon s16"
+ class="gl-button-icon gl-icon s16"
data-testid="ellipsis_h-icon"
>
<use
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
index fcb9c4b8b02..8eb0e8f9550 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -1,15 +1,23 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`SplitButton renders actionItems 1`] = `
-<gl-deprecated-dropdown-stub
- menu-class="dropdown-menu-selectable "
+<gl-dropdown-stub
+ category="tertiary"
+ headertext=""
+ menu-class=""
+ size="medium"
split="true"
text="professor"
- variant="secondary"
+ variant="default"
>
- <gl-deprecated-dropdown-item-stub
- active="true"
- active-class="is-active"
+ <gl-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischecked="true"
+ ischeckitem="true"
+ secondarytext=""
>
<strong>
professor
@@ -18,11 +26,16 @@ exports[`SplitButton renders actionItems 1`] = `
<div>
very symphonic
</div>
- </gl-deprecated-dropdown-item-stub>
+ </gl-dropdown-item-stub>
- <gl-deprecated-dropdown-divider-stub />
- <gl-deprecated-dropdown-item-stub
- active-class="is-active"
+ <gl-dropdown-divider-stub />
+ <gl-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischeckitem="true"
+ secondarytext=""
>
<strong>
captain
@@ -31,8 +44,8 @@ exports[`SplitButton renders actionItems 1`] = `
<div>
warp drive
</div>
- </gl-deprecated-dropdown-item-stub>
+ </gl-dropdown-item-stub>
<!---->
-</gl-deprecated-dropdown-stub>
+</gl-dropdown-stub>
`;
diff --git a/spec/frontend/vue_shared/components/actions_button_spec.js b/spec/frontend/vue_shared/components/actions_button_spec.js
index 4dde9d726d1..6e7ed9d612b 100644
--- a/spec/frontend/vue_shared/components/actions_button_spec.js
+++ b/spec/frontend/vue_shared/components/actions_button_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdown, GlLink } from '@gitlab/ui';
+import { GlDropdown, GlButton } from '@gitlab/ui';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
@@ -9,7 +9,12 @@ const TEST_ACTION = {
secondaryText: 'Lorem ipsum.',
tooltip: '',
href: '/sample',
- attrs: { 'data-test': '123' },
+ attrs: {
+ 'data-test': '123',
+ category: 'secondary',
+ href: '/sample',
+ variant: 'default',
+ },
};
const TEST_ACTION_2 = {
key: 'action2',
@@ -40,8 +45,8 @@ describe('Actions button component', () => {
return directiveBinding.value;
};
- const findLink = () => wrapper.find(GlLink);
- const findLinkTooltip = () => getTooltip(findLink());
+ const findButton = () => wrapper.find(GlButton);
+ const findButtonTooltip = () => getTooltip(findButton());
const findDropdown = () => wrapper.find(GlDropdown);
const findDropdownTooltip = () => getTooltip(findDropdown());
const parseDropdownItems = () =>
@@ -63,7 +68,7 @@ describe('Actions button component', () => {
};
});
const clickOn = (child, evt = new Event('click')) => child.vm.$emit('click', evt);
- const clickLink = (...args) => clickOn(findLink(), ...args);
+ const clickLink = (...args) => clickOn(findButton(), ...args);
const clickDropdown = (...args) => clickOn(findDropdown(), ...args);
describe('with 1 action', () => {
@@ -76,22 +81,19 @@ describe('Actions button component', () => {
});
it('should render single button', () => {
- const link = findLink();
-
- expect(link.attributes()).toEqual({
- class: expect.any(String),
+ expect(findButton().attributes()).toMatchObject({
href: TEST_ACTION.href,
...TEST_ACTION.attrs,
});
- expect(link.text()).toBe(TEST_ACTION.text);
+ expect(findButton().text()).toBe(TEST_ACTION.text);
});
it('should have tooltip', () => {
- expect(findLinkTooltip()).toBe(TEST_ACTION.tooltip);
+ expect(findButtonTooltip()).toBe(TEST_ACTION.tooltip);
});
it('should have attrs', () => {
- expect(findLink().attributes()).toMatchObject(TEST_ACTION.attrs);
+ expect(findButton().attributes()).toMatchObject(TEST_ACTION.attrs);
});
it('can click', () => {
@@ -103,7 +105,7 @@ describe('Actions button component', () => {
it('should have tooltip', () => {
createComponent({ actions: [{ ...TEST_ACTION, tooltip: TEST_TOOLTIP }] });
- expect(findLinkTooltip()).toBe(TEST_TOOLTIP);
+ expect(findButtonTooltip()).toBe(TEST_TOOLTIP);
});
});
diff --git a/spec/frontend/vue_shared/components/alert_detail_table_spec.js b/spec/frontend/vue_shared/components/alert_detail_table_spec.js
deleted file mode 100644
index 9c38ccad8a7..00000000000
--- a/spec/frontend/vue_shared/components/alert_detail_table_spec.js
+++ /dev/null
@@ -1,74 +0,0 @@
-import { mount } from '@vue/test-utils';
-import { GlTable, GlLoadingIcon } from '@gitlab/ui';
-import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
-
-const mockAlert = {
- iid: '1527542',
- title: 'SyntaxError: Invalid or unexpected token',
- severity: 'CRITICAL',
- eventCount: 7,
- createdAt: '2020-04-17T23:18:14.996Z',
- startedAt: '2020-04-17T23:18:14.996Z',
- endedAt: '2020-04-17T23:18:14.996Z',
- status: 'TRIGGERED',
- assignees: { nodes: [] },
- notes: { nodes: [] },
- todos: { nodes: [] },
-};
-
-describe('AlertDetails', () => {
- let wrapper;
-
- function mountComponent(propsData = {}) {
- wrapper = mount(AlertDetailsTable, {
- propsData: {
- alert: mockAlert,
- loading: false,
- ...propsData,
- },
- });
- }
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findTableComponent = () => wrapper.find(GlTable);
-
- describe('Alert details', () => {
- describe('empty state', () => {
- beforeEach(() => {
- mountComponent({ alert: null });
- });
-
- it('shows an empty state when no alert is provided', () => {
- expect(wrapper.text()).toContain('No alert data to display.');
- });
- });
-
- describe('loading state', () => {
- beforeEach(() => {
- mountComponent({ loading: true });
- });
-
- it('displays a loading state when loading', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- });
- });
-
- describe('with table data', () => {
- beforeEach(() => {
- mountComponent();
- });
-
- it('renders a table', () => {
- expect(findTableComponent().exists()).toBe(true);
- });
-
- it('renders a cell based on alert data', () => {
- expect(findTableComponent().text()).toContain('SyntaxError: Invalid or unexpected token');
- });
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/alert_details_table_spec.js b/spec/frontend/vue_shared/components/alert_details_table_spec.js
new file mode 100644
index 00000000000..dff307e92c2
--- /dev/null
+++ b/spec/frontend/vue_shared/components/alert_details_table_spec.js
@@ -0,0 +1,139 @@
+import { GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
+
+const mockAlert = {
+ iid: '1527542',
+ title: 'SyntaxError: Invalid or unexpected token',
+ severity: 'CRITICAL',
+ eventCount: 7,
+ createdAt: '2020-04-17T23:18:14.996Z',
+ startedAt: '2020-04-17T23:18:14.996Z',
+ endedAt: '2020-04-17T23:18:14.996Z',
+ status: 'TRIGGERED',
+ assignees: { nodes: [] },
+ notes: { nodes: [] },
+ todos: { nodes: [] },
+ hosts: ['host1', 'host2'],
+ __typename: 'AlertManagementAlert',
+};
+
+const environmentName = 'Production';
+const environmentPath = '/fake/path';
+
+describe('AlertDetails', () => {
+ let environmentData = { name: environmentName, path: environmentPath };
+ let glFeatures = { exposeEnvironmentPathInAlertDetails: false };
+ let wrapper;
+
+ function mountComponent(propsData = {}) {
+ wrapper = mount(AlertDetailsTable, {
+ provide: {
+ glFeatures,
+ },
+ propsData: {
+ alert: {
+ ...mockAlert,
+ environment: environmentData,
+ },
+ loading: false,
+ ...propsData,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findTableComponent = () => wrapper.find(GlTable);
+ const findTableKeys = () => findTableComponent().findAll('tbody td:first-child');
+ const findTableFieldValueByKey = fieldKey =>
+ findTableComponent()
+ .findAll('tbody tr')
+ .filter(row => row.text().includes(fieldKey))
+ .at(0)
+ .find('td:nth-child(2)');
+ const findTableField = (fields, fieldName) => fields.filter(row => row.text() === fieldName);
+
+ describe('Alert details', () => {
+ describe('empty state', () => {
+ beforeEach(() => {
+ mountComponent({ alert: null });
+ });
+
+ it('shows an empty state when no alert is provided', () => {
+ expect(wrapper.text()).toContain('No alert data to display.');
+ });
+ });
+
+ describe('loading state', () => {
+ beforeEach(() => {
+ mountComponent({ loading: true });
+ });
+
+ it('displays a loading state when loading', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('with table data', () => {
+ beforeEach(mountComponent);
+
+ it('renders a table', () => {
+ expect(findTableComponent().exists()).toBe(true);
+ });
+
+ it('renders a cell based on alert data', () => {
+ expect(findTableComponent().text()).toContain('SyntaxError: Invalid or unexpected token');
+ });
+
+ it('should show allowed alert fields', () => {
+ const fields = findTableKeys();
+
+ expect(findTableField(fields, 'Iid').exists()).toBe(true);
+ expect(findTableField(fields, 'Title').exists()).toBe(true);
+ expect(findTableField(fields, 'Severity').exists()).toBe(true);
+ expect(findTableField(fields, 'Status').exists()).toBe(true);
+ expect(findTableField(fields, 'Hosts').exists()).toBe(true);
+ expect(findTableField(fields, 'Environment').exists()).toBe(false);
+ });
+
+ it('should not show disallowed and flaggedAllowed alert fields', () => {
+ const fields = findTableKeys();
+
+ expect(findTableField(fields, 'Typename').exists()).toBe(false);
+ expect(findTableField(fields, 'Todos').exists()).toBe(false);
+ expect(findTableField(fields, 'Notes').exists()).toBe(false);
+ expect(findTableField(fields, 'Assignees').exists()).toBe(false);
+ expect(findTableField(fields, 'Environment').exists()).toBe(false);
+ });
+ });
+
+ describe('when exposeEnvironmentPathInAlertDetails is enabled', () => {
+ beforeEach(() => {
+ glFeatures = { exposeEnvironmentPathInAlertDetails: true };
+ mountComponent();
+ });
+
+ it('should show flaggedAllowed alert fields', () => {
+ const fields = findTableKeys();
+
+ expect(findTableField(fields, 'Environment').exists()).toBe(true);
+ });
+
+ it('should display only the name for the environment', () => {
+ expect(findTableFieldValueByKey('Iid').text()).toBe('1527542');
+ expect(findTableFieldValueByKey('Environment').text()).toBe(environmentName);
+ });
+
+ it('should not display the environment row if there is not data', () => {
+ environmentData = { name: null, path: null };
+ mountComponent();
+
+ expect(findTableFieldValueByKey('Environment').text()).toBeFalsy();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index 7f0b7ba8cf8..51a2653befc 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton, GlIcon } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('clipboard button', () => {
@@ -26,9 +26,8 @@ describe('clipboard button', () => {
});
it('renders a button for clipboard', () => {
- expect(wrapper.find(GlDeprecatedButton).exists()).toBe(true);
+ expect(wrapper.find(GlButton).exists()).toBe(true);
expect(wrapper.attributes('data-clipboard-text')).toBe('copy me');
- expect(wrapper.find(GlIcon).props('name')).toBe('copy-to-clipboard');
});
it('should have a tooltip with default values', () => {
diff --git a/spec/frontend/vue_shared/components/confirm_modal_spec.js b/spec/frontend/vue_shared/components/confirm_modal_spec.js
index 5d92af64de0..8456ca9d125 100644
--- a/spec/frontend/vue_shared/components/confirm_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_modal_spec.js
@@ -86,6 +86,22 @@ describe('vue_shared/components/confirm_modal', () => {
expect(findForm().element.submit).not.toHaveBeenCalled();
});
+ describe('with handleSubmit prop', () => {
+ const handleSubmit = jest.fn();
+ beforeEach(() => {
+ createComponent({ handleSubmit });
+ findModal().vm.$emit('primary');
+ });
+
+ it('will call handleSubmit', () => {
+ expect(handleSubmit).toHaveBeenCalled();
+ });
+
+ it('does not submit the form', () => {
+ expect(findForm().element.submit).not.toHaveBeenCalled();
+ });
+ });
+
describe('when modal submitted', () => {
beforeEach(() => {
findModal().vm.$emit('primary');
diff --git a/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js b/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js
index b201a9acdd4..c37a44df6f8 100644
--- a/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js
+++ b/spec/frontend/vue_shared/components/deprecated_modal_2_spec.js
@@ -78,7 +78,7 @@ describe('DeprecatedModal2', () => {
});
it('sets the primary button text', () => {
- const primaryButton = vm.$el.querySelector('.modal-footer button:last-of-type');
+ const primaryButton = vm.$el.querySelector('.js-modal-primary-action .gl-button-text');
expect(primaryButton.innerHTML.trim()).toBe(props.footerPrimaryButtonText);
});
diff --git a/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js b/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js
index efa30bf6605..ec553c52236 100644
--- a/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js
+++ b/spec/frontend/vue_shared/components/dropdown/dropdown_search_input_spec.js
@@ -29,7 +29,7 @@ describe('DropdownSearchInputComponent', () => {
});
it('renders search icon element', () => {
- expect(wrapper.find('.fa-search.dropdown-input-search').exists()).toBe(true);
+ expect(wrapper.find('.dropdown-input-search[data-testid="search-icon"]').exists()).toBe(true);
});
it('displays custom placeholder text', () => {
diff --git a/spec/frontend/vue_shared/components/editor_lite_spec.js b/spec/frontend/vue_shared/components/editor_lite_spec.js
new file mode 100644
index 00000000000..52502fcf64f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/editor_lite_spec.js
@@ -0,0 +1,144 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import EditorLite from '~/vue_shared/components/editor_lite.vue';
+import Editor from '~/editor/editor_lite';
+
+jest.mock('~/editor/editor_lite');
+
+describe('Editor Lite component', () => {
+ let wrapper;
+ const onDidChangeModelContent = jest.fn();
+ const updateModelLanguage = jest.fn();
+ const getValue = jest.fn();
+ const setValue = jest.fn();
+ const value = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
+ const fileName = 'lorem.txt';
+ const fileGlobalId = 'snippet_777';
+ const createInstanceMock = jest.fn().mockImplementation(() => ({
+ onDidChangeModelContent,
+ updateModelLanguage,
+ getValue,
+ setValue,
+ dispose: jest.fn(),
+ }));
+ Editor.mockImplementation(() => {
+ return {
+ createInstance: createInstanceMock,
+ };
+ });
+ function createComponent(props = {}) {
+ wrapper = shallowMount(EditorLite, {
+ propsData: {
+ value,
+ fileName,
+ fileGlobalId,
+ ...props,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const triggerChangeContent = val => {
+ getValue.mockReturnValue(val);
+ const [cb] = onDidChangeModelContent.mock.calls[0];
+
+ cb();
+
+ jest.runOnlyPendingTimers();
+ };
+
+ describe('rendering', () => {
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('renders content', () => {
+ expect(wrapper.text()).toContain(value);
+ });
+ });
+
+ describe('functionality', () => {
+ it('does not fail without content', () => {
+ const spy = jest.spyOn(global.console, 'error');
+ createComponent({ value: undefined });
+
+ expect(spy).not.toHaveBeenCalled();
+ expect(wrapper.find('[id^="editor-lite-"]').exists()).toBe(true);
+ });
+
+ it('initialises Editor Lite instance', () => {
+ const el = wrapper.find({ ref: 'editor' }).element;
+ expect(createInstanceMock).toHaveBeenCalledWith({
+ el,
+ blobPath: fileName,
+ blobGlobalId: fileGlobalId,
+ blobContent: value,
+ extensions: null,
+ });
+ });
+
+ it('reacts to the changes in fileName', () => {
+ const newFileName = 'ipsum.txt';
+
+ wrapper.setProps({
+ fileName: newFileName,
+ });
+
+ return nextTick().then(() => {
+ expect(updateModelLanguage).toHaveBeenCalledWith(newFileName);
+ });
+ });
+
+ it('registers callback with editor onChangeContent', () => {
+ expect(onDidChangeModelContent).toHaveBeenCalledWith(expect.any(Function));
+ });
+
+ it('emits input event when the blob content is changed', () => {
+ expect(wrapper.emitted().input).toBeUndefined();
+
+ triggerChangeContent(value);
+
+ expect(wrapper.emitted().input).toEqual([[value]]);
+ });
+
+ it('emits editor-ready event when the Editor Lite is ready', async () => {
+ const el = wrapper.find({ ref: 'editor' }).element;
+ expect(wrapper.emitted()['editor-ready']).toBeUndefined();
+
+ await el.dispatchEvent(new Event('editor-ready'));
+
+ expect(wrapper.emitted()['editor-ready']).toBeDefined();
+ });
+
+ describe('reaction to the value update', () => {
+ it('reacts to the changes in the passed value', async () => {
+ const newValue = 'New Value';
+
+ wrapper.setProps({
+ value: newValue,
+ });
+
+ await nextTick();
+ expect(setValue).toHaveBeenCalledWith(newValue);
+ });
+
+ it("does not update value if the passed one is exactly the same as the editor's content", async () => {
+ const newValue = `${value}`; // to make sure we're creating a new String with the same content and not just a reference
+
+ wrapper.setProps({
+ value: newValue,
+ });
+
+ await nextTick();
+ expect(setValue).not.toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
new file mode 100644
index 00000000000..1dd5f08e76a
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
@@ -0,0 +1,448 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { mockBranches } from 'jest/vue_shared/components/filtered_search_bar/mock_data';
+import * as actions from '~/vue_shared/components/filtered_search_bar/store/modules/filters/actions';
+import * as types from '~/vue_shared/components/filtered_search_bar/store/modules/filters/mutation_types';
+import initialState from '~/vue_shared/components/filtered_search_bar/store/modules/filters/state';
+import httpStatusCodes from '~/lib/utils/http_status';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import Api from '~/api';
+import { filterMilestones, filterUsers, filterLabels } from './mock_data';
+
+const milestonesEndpoint = 'fake_milestones_endpoint';
+const labelsEndpoint = 'fake_labels_endpoint';
+const groupEndpoint = 'fake_group_endpoint';
+const projectEndpoint = 'fake_project_endpoint';
+
+jest.mock('~/flash');
+
+describe('Filters actions', () => {
+ let state;
+ let mock;
+ let mockDispatch;
+ let mockCommit;
+
+ beforeEach(() => {
+ state = initialState();
+ mock = new MockAdapter(axios);
+
+ mockDispatch = jest.fn().mockResolvedValue();
+ mockCommit = jest.fn();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('initialize', () => {
+ const initialData = {
+ milestonesEndpoint,
+ labelsEndpoint,
+ groupEndpoint,
+ projectEndpoint,
+ selectedAuthor: 'Mr cool',
+ selectedMilestone: 'NEXT',
+ };
+
+ it('does not dispatch', () => {
+ const result = actions.initialize(
+ {
+ state,
+ dispatch: mockDispatch,
+ commit: mockCommit,
+ },
+ initialData,
+ );
+ expect(result).toBeUndefined();
+ expect(mockDispatch).not.toHaveBeenCalled();
+ });
+
+ it(`commits the ${types.SET_SELECTED_FILTERS}`, () => {
+ actions.initialize(
+ {
+ state,
+ dispatch: mockDispatch,
+ commit: mockCommit,
+ },
+ initialData,
+ );
+ expect(mockCommit).toHaveBeenCalledWith(types.SET_SELECTED_FILTERS, initialData);
+ });
+ });
+
+ describe('setFilters', () => {
+ const nextFilters = {
+ selectedAuthor: 'Mr cool',
+ selectedMilestone: 'NEXT',
+ };
+
+ it('dispatches the root/setFilters action', () => {
+ return testAction(
+ actions.setFilters,
+ nextFilters,
+ state,
+ [
+ {
+ payload: nextFilters,
+ type: types.SET_SELECTED_FILTERS,
+ },
+ ],
+ [
+ {
+ type: 'setFilters',
+ payload: nextFilters,
+ },
+ ],
+ );
+ });
+ });
+
+ describe('setEndpoints', () => {
+ it('sets the api paths', () => {
+ return testAction(
+ actions.setEndpoints,
+ { milestonesEndpoint, labelsEndpoint, groupEndpoint, projectEndpoint },
+ state,
+ [
+ { payload: 'fake_milestones_endpoint', type: types.SET_MILESTONES_ENDPOINT },
+ { payload: 'fake_labels_endpoint', type: types.SET_LABELS_ENDPOINT },
+ { payload: 'fake_group_endpoint', type: types.SET_GROUP_ENDPOINT },
+ { payload: 'fake_project_endpoint', type: types.SET_PROJECT_ENDPOINT },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('fetchBranches', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ const url = Api.buildUrl(Api.createBranchPath).replace(
+ ':id',
+ encodeURIComponent(projectEndpoint),
+ );
+ mock.onGet(url).replyOnce(httpStatusCodes.OK, mockBranches);
+ });
+
+ it('dispatches RECEIVE_BRANCHES_SUCCESS with received data', () => {
+ return testAction(
+ actions.fetchBranches,
+ null,
+ { ...state, projectEndpoint },
+ [
+ { type: types.REQUEST_BRANCHES },
+ { type: types.RECEIVE_BRANCHES_SUCCESS, payload: mockBranches },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(data).toBe(mockBranches);
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ });
+
+ it('dispatches RECEIVE_BRANCHES_ERROR', () => {
+ return testAction(
+ actions.fetchBranches,
+ null,
+ state,
+ [
+ { type: types.REQUEST_BRANCHES },
+ {
+ type: types.RECEIVE_BRANCHES_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => expect(createFlash).toHaveBeenCalled());
+ });
+ });
+ });
+
+ describe('fetchAuthors', () => {
+ let restoreVersion;
+ beforeEach(() => {
+ restoreVersion = gon.api_version;
+ gon.api_version = 'v1';
+ });
+
+ afterEach(() => {
+ gon.api_version = restoreVersion;
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.OK, filterUsers);
+ });
+
+ it('dispatches RECEIVE_AUTHORS_SUCCESS with received data and groupEndpoint set', () => {
+ return testAction(
+ actions.fetchAuthors,
+ null,
+ { ...state, groupEndpoint },
+ [
+ { type: types.REQUEST_AUTHORS },
+ { type: types.RECEIVE_AUTHORS_SUCCESS, payload: filterUsers },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(mock.history.get[0].url).toBe('/api/v1/groups/fake_group_endpoint/members');
+ expect(data).toBe(filterUsers);
+ });
+ });
+
+ it('dispatches RECEIVE_AUTHORS_SUCCESS with received data and projectEndpoint set', () => {
+ return testAction(
+ actions.fetchAuthors,
+ null,
+ { ...state, projectEndpoint },
+ [
+ { type: types.REQUEST_AUTHORS },
+ { type: types.RECEIVE_AUTHORS_SUCCESS, payload: filterUsers },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(mock.history.get[0].url).toBe('/api/v1/projects/fake_project_endpoint/users');
+ expect(data).toBe(filterUsers);
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ });
+
+ it('dispatches RECEIVE_AUTHORS_ERROR and groupEndpoint set', () => {
+ return testAction(
+ actions.fetchAuthors,
+ null,
+ { ...state, groupEndpoint },
+ [
+ { type: types.REQUEST_AUTHORS },
+ {
+ type: types.RECEIVE_AUTHORS_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => {
+ expect(mock.history.get[0].url).toBe('/api/v1/groups/fake_group_endpoint/members');
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+
+ it('dispatches RECEIVE_AUTHORS_ERROR and projectEndpoint set', () => {
+ return testAction(
+ actions.fetchAuthors,
+ null,
+ { ...state, projectEndpoint },
+ [
+ { type: types.REQUEST_AUTHORS },
+ {
+ type: types.RECEIVE_AUTHORS_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => {
+ expect(mock.history.get[0].url).toBe('/api/v1/projects/fake_project_endpoint/users');
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('fetchMilestones', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(milestonesEndpoint).replyOnce(httpStatusCodes.OK, filterMilestones);
+ });
+
+ it('dispatches RECEIVE_MILESTONES_SUCCESS with received data', () => {
+ return testAction(
+ actions.fetchMilestones,
+ null,
+ { ...state, milestonesEndpoint },
+ [
+ { type: types.REQUEST_MILESTONES },
+ { type: types.RECEIVE_MILESTONES_SUCCESS, payload: filterMilestones },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(data).toBe(filterMilestones);
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ });
+
+ it('dispatches RECEIVE_MILESTONES_ERROR', () => {
+ return testAction(
+ actions.fetchMilestones,
+ null,
+ state,
+ [
+ { type: types.REQUEST_MILESTONES },
+ {
+ type: types.RECEIVE_MILESTONES_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => expect(createFlash).toHaveBeenCalled());
+ });
+ });
+ });
+
+ describe('fetchAssignees', () => {
+ describe('success', () => {
+ let restoreVersion;
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.OK, filterUsers);
+ restoreVersion = gon.api_version;
+ gon.api_version = 'v1';
+ });
+
+ afterEach(() => {
+ gon.api_version = restoreVersion;
+ });
+
+ it('dispatches RECEIVE_ASSIGNEES_SUCCESS with received data and groupEndpoint set', () => {
+ return testAction(
+ actions.fetchAssignees,
+ null,
+ { ...state, milestonesEndpoint, groupEndpoint },
+ [
+ { type: types.REQUEST_ASSIGNEES },
+ { type: types.RECEIVE_ASSIGNEES_SUCCESS, payload: filterUsers },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(mock.history.get[0].url).toBe('/api/v1/groups/fake_group_endpoint/members');
+ expect(data).toBe(filterUsers);
+ });
+ });
+
+ it('dispatches RECEIVE_ASSIGNEES_SUCCESS with received data and projectEndpoint set', () => {
+ return testAction(
+ actions.fetchAssignees,
+ null,
+ { ...state, milestonesEndpoint, projectEndpoint },
+ [
+ { type: types.REQUEST_ASSIGNEES },
+ { type: types.RECEIVE_ASSIGNEES_SUCCESS, payload: filterUsers },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(mock.history.get[0].url).toBe('/api/v1/projects/fake_project_endpoint/users');
+ expect(data).toBe(filterUsers);
+ });
+ });
+ });
+
+ describe('error', () => {
+ let restoreVersion;
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ restoreVersion = gon.api_version;
+ gon.api_version = 'v1';
+ });
+
+ afterEach(() => {
+ gon.api_version = restoreVersion;
+ });
+
+ it('dispatches RECEIVE_ASSIGNEES_ERROR and groupEndpoint set', () => {
+ return testAction(
+ actions.fetchAssignees,
+ null,
+ { ...state, groupEndpoint },
+ [
+ { type: types.REQUEST_ASSIGNEES },
+ {
+ type: types.RECEIVE_ASSIGNEES_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => {
+ expect(mock.history.get[0].url).toBe('/api/v1/groups/fake_group_endpoint/members');
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+
+ it('dispatches RECEIVE_ASSIGNEES_ERROR and projectEndpoint set', () => {
+ return testAction(
+ actions.fetchAssignees,
+ null,
+ { ...state, projectEndpoint },
+ [
+ { type: types.REQUEST_ASSIGNEES },
+ {
+ type: types.RECEIVE_ASSIGNEES_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => {
+ expect(mock.history.get[0].url).toBe('/api/v1/projects/fake_project_endpoint/users');
+ expect(createFlash).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('fetchLabels', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onGet(labelsEndpoint).replyOnce(httpStatusCodes.OK, filterLabels);
+ });
+
+ it('dispatches RECEIVE_LABELS_SUCCESS with received data', () => {
+ return testAction(
+ actions.fetchLabels,
+ null,
+ { ...state, labelsEndpoint },
+ [
+ { type: types.REQUEST_LABELS },
+ { type: types.RECEIVE_LABELS_SUCCESS, payload: filterLabels },
+ ],
+ [],
+ ).then(({ data }) => {
+ expect(data).toBe(filterLabels);
+ });
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ });
+
+ it('dispatches RECEIVE_LABELS_ERROR', () => {
+ return testAction(
+ actions.fetchLabels,
+ null,
+ state,
+ [
+ { type: types.REQUEST_LABELS },
+ {
+ type: types.RECEIVE_LABELS_ERROR,
+ payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ },
+ ],
+ [],
+ ).then(() => expect(createFlash).toHaveBeenCalled());
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mock_data.js
new file mode 100644
index 00000000000..6afac9f752a
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mock_data.js
@@ -0,0 +1,50 @@
+export const filterMilestones = [
+ { id: 1, title: 'None', name: 'Any' },
+ { id: 101, title: 'Any', name: 'None' },
+ { id: 1001, title: 'v1.0', name: 'v1.0' },
+ { id: 10101, title: 'v0.0', name: 'v0.0' },
+];
+
+export const filterUsers = [
+ {
+ id: 31,
+ name: 'VSM User2',
+ username: 'vsm-user-2-1589776313',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/762398957a8c6e04eed16da88098899d?s=80\u0026d=identicon',
+ web_url: 'http://127.0.0.1:3001/vsm-user-2-1589776313',
+ access_level: 30,
+ expires_at: null,
+ },
+ {
+ id: 32,
+ name: 'VSM User3',
+ username: 'vsm-user-3-1589776313',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/f78932237e8a5c5376b65a709824802f?s=80\u0026d=identicon',
+ web_url: 'http://127.0.0.1:3001/vsm-user-3-1589776313',
+ access_level: 30,
+ expires_at: null,
+ },
+ {
+ id: 33,
+ name: 'VSM User4',
+ username: 'vsm-user-4-1589776313',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/ab506dc600d1a941e4d77d5ceeeba73f?s=80\u0026d=identicon',
+ web_url: 'http://127.0.0.1:3001/vsm-user-4-1589776313',
+ access_level: 30,
+ expires_at: null,
+ },
+];
+
+export const filterLabels = [
+ { id: 194, title: 'Afterfunc-Phureforge-781', color: '#990000', text_color: '#FFFFFF' },
+ { id: 10, title: 'Afternix', color: '#16ecf2', text_color: '#FFFFFF' },
+ { id: 176, title: 'Panasync-Pens-266', color: '#990000', text_color: '#FFFFFF' },
+ { id: 79, title: 'Passat', color: '#f1a3d4', text_color: '#333333' },
+ { id: 197, title: 'Phast-Onesync-395', color: '#990000', text_color: '#FFFFFF' },
+];
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mutations_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mutations_spec.js
new file mode 100644
index 00000000000..263a4ee178f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/mutations_spec.js
@@ -0,0 +1,116 @@
+import { get } from 'lodash';
+import { mockBranches } from 'jest/vue_shared/components/filtered_search_bar/mock_data';
+import initialState from '~/vue_shared/components/filtered_search_bar/store/modules/filters/state';
+import mutations from '~/vue_shared/components/filtered_search_bar/store/modules/filters/mutations';
+import * as types from '~/vue_shared/components/filtered_search_bar/store/modules/filters/mutation_types';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { filterMilestones, filterUsers, filterLabels } from './mock_data';
+
+let state = null;
+
+const branches = mockBranches.map(convertObjectPropsToCamelCase);
+const milestones = filterMilestones.map(convertObjectPropsToCamelCase);
+const users = filterUsers.map(convertObjectPropsToCamelCase);
+const labels = filterLabels.map(convertObjectPropsToCamelCase);
+
+const filterValue = { value: 'foo' };
+
+describe('Filters mutations', () => {
+ const errorCode = 500;
+ beforeEach(() => {
+ state = initialState();
+ });
+
+ afterEach(() => {
+ state = null;
+ });
+
+ it.each`
+ mutation | stateKey | value
+ ${types.SET_MILESTONES_ENDPOINT} | ${'milestonesEndpoint'} | ${'new-milestone-endpoint'}
+ ${types.SET_LABELS_ENDPOINT} | ${'labelsEndpoint'} | ${'new-label-endpoint'}
+ ${types.SET_GROUP_ENDPOINT} | ${'groupEndpoint'} | ${'new-group-endpoint'}
+ `('$mutation will set $stateKey=$value', ({ mutation, stateKey, value }) => {
+ mutations[mutation](state, value);
+
+ expect(state[stateKey]).toEqual(value);
+ });
+
+ it.each`
+ mutation | stateKey | filterName | value
+ ${types.SET_SELECTED_FILTERS} | ${'branches.source.selected'} | ${'selectedSourceBranch'} | ${null}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.source.selected'} | ${'selectedSourceBranch'} | ${filterValue}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.source.selectedList'} | ${'selectedSourceBranchList'} | ${[]}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.source.selectedList'} | ${'selectedSourceBranchList'} | ${[filterValue]}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.target.selected'} | ${'selectedTargetBranch'} | ${null}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.target.selected'} | ${'selectedTargetBranch'} | ${filterValue}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.target.selectedList'} | ${'selectedTargetBranchList'} | ${[]}
+ ${types.SET_SELECTED_FILTERS} | ${'branches.target.selectedList'} | ${'selectedTargetBranchList'} | ${[filterValue]}
+ ${types.SET_SELECTED_FILTERS} | ${'authors.selected'} | ${'selectedAuthor'} | ${null}
+ ${types.SET_SELECTED_FILTERS} | ${'authors.selected'} | ${'selectedAuthor'} | ${filterValue}
+ ${types.SET_SELECTED_FILTERS} | ${'authors.selectedList'} | ${'selectedAuthorList'} | ${[]}
+ ${types.SET_SELECTED_FILTERS} | ${'authors.selectedList'} | ${'selectedAuthorList'} | ${[filterValue]}
+ ${types.SET_SELECTED_FILTERS} | ${'milestones.selected'} | ${'selectedMilestone'} | ${null}
+ ${types.SET_SELECTED_FILTERS} | ${'milestones.selected'} | ${'selectedMilestone'} | ${filterValue}
+ ${types.SET_SELECTED_FILTERS} | ${'milestones.selectedList'} | ${'selectedMilestoneList'} | ${[]}
+ ${types.SET_SELECTED_FILTERS} | ${'milestones.selectedList'} | ${'selectedMilestoneList'} | ${[filterValue]}
+ ${types.SET_SELECTED_FILTERS} | ${'assignees.selected'} | ${'selectedAssignee'} | ${null}
+ ${types.SET_SELECTED_FILTERS} | ${'assignees.selected'} | ${'selectedAssignee'} | ${filterValue}
+ ${types.SET_SELECTED_FILTERS} | ${'assignees.selectedList'} | ${'selectedAssigneeList'} | ${[]}
+ ${types.SET_SELECTED_FILTERS} | ${'assignees.selectedList'} | ${'selectedAssigneeList'} | ${[filterValue]}
+ ${types.SET_SELECTED_FILTERS} | ${'labels.selected'} | ${'selectedLabel'} | ${null}
+ ${types.SET_SELECTED_FILTERS} | ${'labels.selected'} | ${'selectedLabel'} | ${filterValue}
+ ${types.SET_SELECTED_FILTERS} | ${'labels.selectedList'} | ${'selectedLabelList'} | ${[]}
+ ${types.SET_SELECTED_FILTERS} | ${'labels.selectedList'} | ${'selectedLabelList'} | ${[filterValue]}
+ `(
+ '$mutation will set $stateKey with a given value',
+ ({ mutation, stateKey, filterName, value }) => {
+ mutations[mutation](state, { [filterName]: value });
+
+ expect(get(state, stateKey)).toEqual(value);
+ },
+ );
+
+ it.each`
+ mutation | rootKey | stateKey | value
+ ${types.REQUEST_BRANCHES} | ${'branches'} | ${'isLoading'} | ${true}
+ ${types.RECEIVE_BRANCHES_SUCCESS} | ${'branches'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_BRANCHES_SUCCESS} | ${'branches'} | ${'data'} | ${branches}
+ ${types.RECEIVE_BRANCHES_SUCCESS} | ${'branches'} | ${'errorCode'} | ${null}
+ ${types.RECEIVE_BRANCHES_ERROR} | ${'branches'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_BRANCHES_ERROR} | ${'branches'} | ${'data'} | ${[]}
+ ${types.RECEIVE_BRANCHES_ERROR} | ${'branches'} | ${'errorCode'} | ${errorCode}
+ ${types.REQUEST_MILESTONES} | ${'milestones'} | ${'isLoading'} | ${true}
+ ${types.RECEIVE_MILESTONES_SUCCESS} | ${'milestones'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_MILESTONES_SUCCESS} | ${'milestones'} | ${'data'} | ${milestones}
+ ${types.RECEIVE_MILESTONES_SUCCESS} | ${'milestones'} | ${'errorCode'} | ${null}
+ ${types.RECEIVE_MILESTONES_ERROR} | ${'milestones'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_MILESTONES_ERROR} | ${'milestones'} | ${'data'} | ${[]}
+ ${types.RECEIVE_MILESTONES_ERROR} | ${'milestones'} | ${'errorCode'} | ${errorCode}
+ ${types.REQUEST_AUTHORS} | ${'authors'} | ${'isLoading'} | ${true}
+ ${types.RECEIVE_AUTHORS_SUCCESS} | ${'authors'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_AUTHORS_SUCCESS} | ${'authors'} | ${'data'} | ${users}
+ ${types.RECEIVE_AUTHORS_SUCCESS} | ${'authors'} | ${'errorCode'} | ${null}
+ ${types.RECEIVE_AUTHORS_ERROR} | ${'authors'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_AUTHORS_ERROR} | ${'authors'} | ${'data'} | ${[]}
+ ${types.RECEIVE_AUTHORS_ERROR} | ${'authors'} | ${'errorCode'} | ${errorCode}
+ ${types.REQUEST_LABELS} | ${'labels'} | ${'isLoading'} | ${true}
+ ${types.RECEIVE_LABELS_SUCCESS} | ${'labels'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_LABELS_SUCCESS} | ${'labels'} | ${'data'} | ${labels}
+ ${types.RECEIVE_LABELS_SUCCESS} | ${'labels'} | ${'errorCode'} | ${null}
+ ${types.RECEIVE_LABELS_ERROR} | ${'labels'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_LABELS_ERROR} | ${'labels'} | ${'data'} | ${[]}
+ ${types.RECEIVE_LABELS_ERROR} | ${'labels'} | ${'errorCode'} | ${errorCode}
+ ${types.REQUEST_ASSIGNEES} | ${'assignees'} | ${'isLoading'} | ${true}
+ ${types.RECEIVE_ASSIGNEES_SUCCESS} | ${'assignees'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_ASSIGNEES_SUCCESS} | ${'assignees'} | ${'data'} | ${users}
+ ${types.RECEIVE_ASSIGNEES_SUCCESS} | ${'assignees'} | ${'errorCode'} | ${null}
+ ${types.RECEIVE_ASSIGNEES_ERROR} | ${'assignees'} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_ASSIGNEES_ERROR} | ${'assignees'} | ${'data'} | ${[]}
+ ${types.RECEIVE_ASSIGNEES_ERROR} | ${'assignees'} | ${'errorCode'} | ${errorCode}
+ `('$mutation will set $stateKey with a given value', ({ mutation, rootKey, stateKey, value }) => {
+ mutations[mutation](state, value);
+
+ expect(state[rootKey][stateKey]).toEqual(value);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js
new file mode 100644
index 00000000000..1b7c80a5252
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/test_helper.js
@@ -0,0 +1,11 @@
+export function getFilterParams(tokens, options = {}) {
+ const { key = 'value', operator = '=', prop = 'title' } = options;
+ return tokens.map(token => {
+ return { [key]: token[prop], operator };
+ });
+}
+
+export function getFilterValues(tokens, options = {}) {
+ const { prop = 'title' } = options;
+ return tokens.map(token => token[prop]);
+}
diff --git a/spec/frontend/vue_shared/components/local_storage_sync_spec.js b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
index 5470171a21e..efa9b5796fb 100644
--- a/spec/frontend/vue_shared/components/local_storage_sync_spec.js
+++ b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
@@ -12,7 +12,9 @@ describe('Local Storage Sync', () => {
};
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ }
wrapper = null;
localStorage.clear();
});
@@ -45,23 +47,23 @@ describe('Local Storage Sync', () => {
expect(wrapper.emitted('input')).toBeFalsy();
});
- it('saves updated value to localStorage', () => {
- createComponent({
- props: {
- storageKey,
- value: 'ascending',
- },
- });
-
- const newValue = 'descending';
- wrapper.setProps({
- value: newValue,
- });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(localStorage.getItem(storageKey)).toBe(newValue);
- });
- });
+ it.each('foo', 3, true, ['foo', 'bar'], { foo: 'bar' })(
+ 'saves updated value to localStorage',
+ newValue => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ },
+ });
+
+ wrapper.setProps({ value: newValue });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(localStorage.getItem(storageKey)).toBe(String(newValue));
+ });
+ },
+ );
it('does not save default value', () => {
const value = 'ascending';
@@ -124,5 +126,117 @@ describe('Local Storage Sync', () => {
expect(localStorage.getItem(storageKey)).toBe(newValue);
});
});
+
+ it('persists the value by default', async () => {
+ const persistedValue = 'persisted';
+
+ createComponent({
+ props: {
+ storageKey,
+ },
+ });
+
+ wrapper.setProps({ value: persistedValue });
+ await wrapper.vm.$nextTick();
+ expect(localStorage.getItem(storageKey)).toBe(persistedValue);
+ });
+
+ it('does not save a value if persist is set to false', async () => {
+ const notPersistedValue = 'notPersisted';
+
+ createComponent({
+ props: {
+ storageKey,
+ },
+ });
+
+ wrapper.setProps({ persist: false, value: notPersistedValue });
+ await wrapper.vm.$nextTick();
+ expect(localStorage.getItem(storageKey)).not.toBe(notPersistedValue);
+ });
+ });
+
+ describe('with "asJson" prop set to "true"', () => {
+ const storageKey = 'testStorageKey';
+
+ describe.each`
+ value | serializedValue
+ ${null} | ${'null'}
+ ${''} | ${'""'}
+ ${true} | ${'true'}
+ ${false} | ${'false'}
+ ${42} | ${'42'}
+ ${'42'} | ${'"42"'}
+ ${'{ foo: '} | ${'"{ foo: "'}
+ ${['test']} | ${'["test"]'}
+ ${{ foo: 'bar' }} | ${'{"foo":"bar"}'}
+ `('given $value', ({ value, serializedValue }) => {
+ describe('is a new value', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ asJson: true,
+ },
+ });
+
+ wrapper.setProps({ value });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('serializes the value correctly to localStorage', () => {
+ expect(localStorage.getItem(storageKey)).toBe(serializedValue);
+ });
+ });
+
+ describe('is already stored', () => {
+ beforeEach(() => {
+ localStorage.setItem(storageKey, serializedValue);
+
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ asJson: true,
+ },
+ });
+ });
+
+ it('emits an input event with the deserialized value', () => {
+ expect(wrapper.emitted('input')).toEqual([[value]]);
+ });
+ });
+ });
+
+ describe('with bad JSON in storage', () => {
+ const badJSON = '{ badJSON';
+
+ beforeEach(() => {
+ jest.spyOn(console, 'warn').mockImplementation();
+ localStorage.setItem(storageKey, badJSON);
+
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ asJson: true,
+ },
+ });
+ });
+
+ it('should console warn', () => {
+ // eslint-disable-next-line no-console
+ expect(console.warn).toHaveBeenCalledWith(
+ `[gitlab] Failed to deserialize value from localStorage (key=${storageKey})`,
+ badJSON,
+ );
+ });
+
+ it('should not emit an input event', () => {
+ expect(wrapper.emitted('input')).toBeUndefined();
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
index cdd7a3ccaf0..b8a9143bc79 100644
--- a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
+++ b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
@@ -10,6 +10,7 @@ exports[`Suggestion Diff component matches snapshot 1`] = `
helppagepath="path_to_docs"
isapplyingbatch="true"
isbatched="true"
+ suggestionscount="0"
/>
<table
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index 3da0a35f05a..a2ce6f40193 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -2,11 +2,13 @@ import { mount } from '@vue/test-utils';
import { TEST_HOST, FIXTURES_PATH } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
-import fieldComponent from '~/vue_shared/components/markdown/field.vue';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
import axios from '~/lib/utils/axios_utils';
const markdownPreviewPath = `${TEST_HOST}/preview`;
const markdownDocsPath = `${TEST_HOST}/docs`;
+const textareaValue = 'testing\n123';
+const uploadsPath = 'test/uploads';
function assertMarkdownTabs(isWrite, writeLink, previewLink, wrapper) {
expect(writeLink.element.parentNode.classList.contains('active')).toBe(isWrite);
@@ -14,66 +16,81 @@ function assertMarkdownTabs(isWrite, writeLink, previewLink, wrapper) {
expect(wrapper.find('.md-preview-holder').element.style.display).toBe(isWrite ? 'none' : '');
}
-function createComponent() {
- const wrapper = mount(fieldComponent, {
- propsData: {
- markdownDocsPath,
- markdownPreviewPath,
- isSubmitting: false,
- },
- slots: {
- textarea: '<textarea>testing\n123</textarea>',
- },
- template: `
- <field-component
- markdown-preview-path="${markdownPreviewPath}"
- markdown-docs-path="${markdownDocsPath}"
- :isSubmitting="false"
- >
- <textarea
- slot="textarea"
- v-model="text">
- <slot>this is a test</slot>
- </textarea>
- </field-component>
- `,
- });
- return wrapper;
-}
-
-const getPreviewLink = wrapper => wrapper.find('.nav-links .js-preview-link');
-const getWriteLink = wrapper => wrapper.find('.nav-links .js-write-link');
-const getMarkdownButton = wrapper => wrapper.find('.js-md');
-const getAllMarkdownButtons = wrapper => wrapper.findAll('.js-md');
-const getVideo = wrapper => wrapper.find('video');
-
describe('Markdown field component', () => {
let axiosMock;
+ let subject;
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
+ // window.uploads_path is needed for dropzone to initialize
+ window.uploads_path = uploadsPath;
});
afterEach(() => {
+ subject.destroy();
+ subject = null;
axiosMock.restore();
});
+ function createSubject() {
+ // We actually mount a wrapper component so that we can force Vue to rerender classes in order to test a regression
+ // caused by mixing Vanilla JS and Vue.
+ subject = mount(
+ {
+ components: {
+ MarkdownField,
+ },
+ props: {
+ wrapperClasses: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ },
+ template: `
+<markdown-field :class="wrapperClasses" v-bind="$attrs">
+ <template #textarea>
+ <textarea class="js-gfm-input" :value="$attrs.textareaValue"></textarea>
+ </template>
+</markdown-field>`,
+ },
+ {
+ propsData: {
+ markdownDocsPath,
+ markdownPreviewPath,
+ isSubmitting: false,
+ textareaValue,
+ },
+ },
+ );
+ }
+
+ const getPreviewLink = () => subject.find('.nav-links .js-preview-link');
+ const getWriteLink = () => subject.find('.nav-links .js-write-link');
+ const getMarkdownButton = () => subject.find('.js-md');
+ const getAllMarkdownButtons = () => subject.findAll('.js-md');
+ const getVideo = () => subject.find('video');
+ const getAttachButton = () => subject.find('.button-attach-file');
+ const clickAttachButton = () => getAttachButton().trigger('click');
+ const findDropzone = () => subject.find('.div-dropzone');
+
describe('mounted', () => {
- let wrapper;
const previewHTML = `
<p>markdown preview</p>
<video src="${FIXTURES_PATH}/static/mock-video.mp4" muted="muted"></video>
`;
let previewLink;
let writeLink;
+ let dropzoneSpy;
- afterEach(() => {
- wrapper.destroy();
+ beforeEach(() => {
+ dropzoneSpy = jest.fn();
+ createSubject();
+ findDropzone().element.addEventListener('click', dropzoneSpy);
});
it('renders textarea inside backdrop', () => {
- wrapper = createComponent();
- expect(wrapper.find('.zen-backdrop textarea').element).not.toBeNull();
+ expect(subject.find('.zen-backdrop textarea').element).not.toBeNull();
});
describe('markdown preview', () => {
@@ -82,44 +99,40 @@ describe('Markdown field component', () => {
});
it('sets preview link as active', () => {
- wrapper = createComponent();
- previewLink = getPreviewLink(wrapper);
+ previewLink = getPreviewLink();
previewLink.trigger('click');
- return wrapper.vm.$nextTick().then(() => {
+ return subject.vm.$nextTick().then(() => {
expect(previewLink.element.parentNode.classList.contains('active')).toBeTruthy();
});
});
it('shows preview loading text', () => {
- wrapper = createComponent();
- previewLink = getPreviewLink(wrapper);
+ previewLink = getPreviewLink();
previewLink.trigger('click');
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.find('.md-preview-holder').element.textContent.trim()).toContain(
+ return subject.vm.$nextTick(() => {
+ expect(subject.find('.md-preview-holder').element.textContent.trim()).toContain(
'Loading…',
);
});
});
it('renders markdown preview and GFM', () => {
- wrapper = createComponent();
const renderGFMSpy = jest.spyOn($.fn, 'renderGFM');
- previewLink = getPreviewLink(wrapper);
+ previewLink = getPreviewLink();
previewLink.trigger('click');
return axios.waitFor(markdownPreviewPath).then(() => {
- expect(wrapper.find('.md-preview-holder').element.innerHTML).toContain(previewHTML);
+ expect(subject.find('.md-preview-holder').element.innerHTML).toContain(previewHTML);
expect(renderGFMSpy).toHaveBeenCalled();
});
});
it('calls video.pause() on comment input when isSubmitting is changed to true', () => {
- wrapper = createComponent();
- previewLink = getPreviewLink(wrapper);
+ previewLink = getPreviewLink();
previewLink.trigger('click');
let callPause;
@@ -127,79 +140,107 @@ describe('Markdown field component', () => {
return axios
.waitFor(markdownPreviewPath)
.then(() => {
- const video = getVideo(wrapper);
+ const video = getVideo();
callPause = jest.spyOn(video.element, 'pause').mockImplementation(() => true);
- wrapper.setProps({
- isSubmitting: true,
- markdownPreviewPath,
- markdownDocsPath,
- });
+ subject.setProps({ isSubmitting: true });
- return wrapper.vm.$nextTick();
+ return subject.vm.$nextTick();
})
.then(() => {
expect(callPause).toHaveBeenCalled();
});
});
- it('clicking already active write or preview link does nothing', () => {
- wrapper = createComponent();
- writeLink = getWriteLink(wrapper);
- previewLink = getPreviewLink(wrapper);
+ it('clicking already active write or preview link does nothing', async () => {
+ writeLink = getWriteLink();
+ previewLink = getPreviewLink();
+
+ writeLink.trigger('click');
+ await subject.vm.$nextTick();
+ assertMarkdownTabs(true, writeLink, previewLink, subject);
writeLink.trigger('click');
- return wrapper.vm
- .$nextTick()
- .then(() => assertMarkdownTabs(true, writeLink, previewLink, wrapper))
- .then(() => writeLink.trigger('click'))
- .then(() => wrapper.vm.$nextTick())
- .then(() => assertMarkdownTabs(true, writeLink, previewLink, wrapper))
- .then(() => previewLink.trigger('click'))
- .then(() => wrapper.vm.$nextTick())
- .then(() => assertMarkdownTabs(false, writeLink, previewLink, wrapper))
- .then(() => previewLink.trigger('click'))
- .then(() => wrapper.vm.$nextTick())
- .then(() => assertMarkdownTabs(false, writeLink, previewLink, wrapper));
+ await subject.vm.$nextTick();
+
+ assertMarkdownTabs(true, writeLink, previewLink, subject);
+ previewLink.trigger('click');
+ await subject.vm.$nextTick();
+
+ assertMarkdownTabs(false, writeLink, previewLink, subject);
+ previewLink.trigger('click');
+ await subject.vm.$nextTick();
+
+ assertMarkdownTabs(false, writeLink, previewLink, subject);
});
});
describe('markdown buttons', () => {
it('converts single words', () => {
- wrapper = createComponent();
- const textarea = wrapper.find('textarea').element;
+ const textarea = subject.find('textarea').element;
textarea.setSelectionRange(0, 7);
- const markdownButton = getMarkdownButton(wrapper);
+ const markdownButton = getMarkdownButton();
markdownButton.trigger('click');
- return wrapper.vm.$nextTick(() => {
+ return subject.vm.$nextTick(() => {
expect(textarea.value).toContain('**testing**');
});
});
it('converts a line', () => {
- wrapper = createComponent();
- const textarea = wrapper.find('textarea').element;
+ const textarea = subject.find('textarea').element;
textarea.setSelectionRange(0, 0);
- const markdownButton = getAllMarkdownButtons(wrapper).wrappers[5];
+ const markdownButton = getAllMarkdownButtons().wrappers[5];
markdownButton.trigger('click');
- return wrapper.vm.$nextTick(() => {
+ return subject.vm.$nextTick(() => {
expect(textarea.value).toContain('- testing');
});
});
it('converts multiple lines', () => {
- wrapper = createComponent();
- const textarea = wrapper.find('textarea').element;
+ const textarea = subject.find('textarea').element;
textarea.setSelectionRange(0, 50);
- const markdownButton = getAllMarkdownButtons(wrapper).wrappers[5];
+ const markdownButton = getAllMarkdownButtons().wrappers[5];
markdownButton.trigger('click');
- return wrapper.vm.$nextTick(() => {
+ return subject.vm.$nextTick(() => {
expect(textarea.value).toContain('- testing\n- 123');
});
});
});
+
+ it('should render attach a file button', () => {
+ expect(getAttachButton().text()).toBe('Attach a file');
+ });
+
+ it('should trigger dropzone when attach button is clicked', () => {
+ expect(dropzoneSpy).not.toHaveBeenCalled();
+
+ clickAttachButton();
+
+ expect(dropzoneSpy).toHaveBeenCalled();
+ });
+
+ describe('when textarea has changed', () => {
+ beforeEach(async () => {
+ // Do something to trigger rerendering the class
+ subject.setProps({ wrapperClasses: 'foo' });
+
+ await subject.vm.$nextTick();
+ });
+
+ it('should have rerendered classes and kept gfm-form', () => {
+ expect(subject.classes()).toEqual(expect.arrayContaining(['gfm-form', 'foo']));
+ });
+
+ it('should trigger dropzone when attach button is clicked', () => {
+ expect(dropzoneSpy).not.toHaveBeenCalled();
+
+ clickAttachButton();
+
+ expect(dropzoneSpy).toHaveBeenCalled();
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index a521668b15c..b19e74b5b11 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -57,7 +57,9 @@ describe('Suggestion Diff component', () => {
});
it('renders apply suggestion and add to batch buttons', () => {
- createComponent();
+ createComponent({
+ suggestionsCount: 2,
+ });
const applyBtn = findApplyButton();
const addToBatchBtn = findAddToBatchButton();
@@ -104,7 +106,9 @@ describe('Suggestion Diff component', () => {
describe('when add to batch is clicked', () => {
it('emits addToBatch', () => {
- createComponent();
+ createComponent({
+ suggestionsCount: 2,
+ });
findAddToBatchButton().vm.$emit('click');
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/access_request_action_buttons_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/access_request_action_buttons_spec.js
new file mode 100644
index 00000000000..58cb8ef61d1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/access_request_action_buttons_spec.js
@@ -0,0 +1,108 @@
+import { shallowMount } from '@vue/test-utils';
+import AccessRequestActionButtons from '~/vue_shared/components/members/action_buttons/access_request_action_buttons.vue';
+import RemoveMemberButton from '~/vue_shared/components/members/action_buttons/remove_member_button.vue';
+import ApproveAccessRequestButton from '~/vue_shared/components/members/action_buttons/approve_access_request_button.vue';
+import { accessRequest as member } from '../mock_data';
+
+describe('AccessRequestActionButtons', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(AccessRequestActionButtons, {
+ propsData: {
+ member,
+ isCurrentUser: true,
+ ...propsData,
+ },
+ });
+ };
+
+ const findRemoveMemberButton = () => wrapper.find(RemoveMemberButton);
+ const findApproveButton = () => wrapper.find(ApproveAccessRequestButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when user has `canRemove` permissions', () => {
+ beforeEach(() => {
+ createComponent({
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('renders remove member button', () => {
+ expect(findRemoveMemberButton().exists()).toBe(true);
+ });
+
+ it('sets props correctly', () => {
+ expect(findRemoveMemberButton().props()).toMatchObject({
+ memberId: member.id,
+ title: 'Deny access',
+ isAccessRequest: true,
+ icon: 'close',
+ });
+ });
+
+ describe('when member is the current user', () => {
+ it('sets `message` prop correctly', () => {
+ expect(findRemoveMemberButton().props('message')).toBe(
+ `Are you sure you want to withdraw your access request for "${member.source.name}"`,
+ );
+ });
+ });
+
+ describe('when member is not the current user', () => {
+ it('sets `message` prop correctly', () => {
+ createComponent({
+ isCurrentUser: false,
+ permissions: {
+ canRemove: true,
+ },
+ });
+
+ expect(findRemoveMemberButton().props('message')).toBe(
+ `Are you sure you want to deny ${member.user.name}'s request to join "${member.source.name}"`,
+ );
+ });
+ });
+ });
+
+ describe('when user does not have `canRemove` permissions', () => {
+ it('does not render remove member button', () => {
+ createComponent({
+ permissions: {
+ canRemove: false,
+ },
+ });
+
+ expect(findRemoveMemberButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when user has `canUpdate` permissions', () => {
+ it('renders the approve button', () => {
+ createComponent({
+ permissions: {
+ canUpdate: true,
+ },
+ });
+
+ expect(findApproveButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when user does not have `canUpdate` permissions', () => {
+ it('does not render the approve button', () => {
+ createComponent({
+ permissions: {
+ canUpdate: false,
+ },
+ });
+
+ expect(findApproveButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/approve_access_request_button_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/approve_access_request_button_spec.js
new file mode 100644
index 00000000000..93edaaa400d
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/approve_access_request_button_spec.js
@@ -0,0 +1,74 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlButton, GlForm } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import ApproveAccessRequestButton from '~/vue_shared/components/members/action_buttons/approve_access_request_button.vue';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ApproveAccessRequestButton', () => {
+ let wrapper;
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ });
+ };
+
+ const createComponent = (propsData = {}, state) => {
+ wrapper = shallowMount(ApproveAccessRequestButton, {
+ localVue,
+ store: createStore(state),
+ propsData: {
+ memberId: 1,
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const findForm = () => wrapper.find(GlForm);
+ const findButton = () => findForm().find(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays a tooltip', () => {
+ const button = findButton();
+
+ expect(getBinding(button.element, 'gl-tooltip')).not.toBeUndefined();
+ expect(button.attributes('title')).toBe('Grant access');
+ });
+
+ it('sets `aria-label` attribute', () => {
+ expect(findButton().attributes('aria-label')).toBe('Grant access');
+ });
+
+ it('submits the form when button is clicked', () => {
+ expect(findButton().attributes('type')).toBe('submit');
+ });
+
+ it('displays form with correct action and inputs', () => {
+ const form = findForm();
+
+ expect(form.attributes('action')).toBe(
+ '/groups/foo-bar/-/group_members/1/approve_access_request',
+ );
+ expect(form.find('input[name="authenticity_token"]').attributes('value')).toBe(
+ 'mock-csrf-token',
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/invite_action_buttons_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/invite_action_buttons_spec.js
new file mode 100644
index 00000000000..1374cdc6aef
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/invite_action_buttons_spec.js
@@ -0,0 +1,85 @@
+import { shallowMount } from '@vue/test-utils';
+import InviteActionButtons from '~/vue_shared/components/members/action_buttons/invite_action_buttons.vue';
+import RemoveMemberButton from '~/vue_shared/components/members/action_buttons/remove_member_button.vue';
+import ResendInviteButton from '~/vue_shared/components/members/action_buttons/resend_invite_button.vue';
+import { invite as member } from '../mock_data';
+
+describe('InviteActionButtons', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(InviteActionButtons, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ });
+ };
+
+ const findRemoveMemberButton = () => wrapper.find(RemoveMemberButton);
+ const findResendInviteButton = () => wrapper.find(ResendInviteButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when user has `canRemove` permissions', () => {
+ beforeEach(() => {
+ createComponent({
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('renders remove member button', () => {
+ expect(findRemoveMemberButton().exists()).toBe(true);
+ });
+
+ it('sets props correctly', () => {
+ expect(findRemoveMemberButton().props()).toEqual({
+ memberId: member.id,
+ message: `Are you sure you want to revoke the invitation for ${member.invite.email} to join "${member.source.name}"`,
+ title: 'Revoke invite',
+ isAccessRequest: false,
+ icon: 'remove',
+ });
+ });
+ });
+
+ describe('when user does not have `canRemove` permissions', () => {
+ it('does not render remove member button', () => {
+ createComponent({
+ permissions: {
+ canRemove: false,
+ },
+ });
+
+ expect(findRemoveMemberButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when user has `canResend` permissions', () => {
+ it('renders resend invite button', () => {
+ createComponent({
+ permissions: {
+ canResend: true,
+ },
+ });
+
+ expect(findResendInviteButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when user does not have `canResend` permissions', () => {
+ it('does not render resend invite button', () => {
+ createComponent({
+ permissions: {
+ canResend: false,
+ },
+ });
+
+ expect(findResendInviteButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/leave_button_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/leave_button_spec.js
new file mode 100644
index 00000000000..00896b23b95
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/leave_button_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import LeaveButton from '~/vue_shared/components/members/action_buttons/leave_button.vue';
+import LeaveModal from '~/vue_shared/components/members/modals/leave_modal.vue';
+import { LEAVE_MODAL_ID } from '~/vue_shared/components/members/constants';
+import { member } from '../mock_data';
+
+describe('LeaveButton', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(LeaveButton, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ GlModal: createMockDirective(),
+ },
+ });
+ };
+
+ const findButton = () => wrapper.find(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays a tooltip', () => {
+ const button = findButton();
+
+ expect(getBinding(button.element, 'gl-tooltip')).not.toBeUndefined();
+ expect(button.attributes('title')).toBe('Leave');
+ });
+
+ it('sets `aria-label` attribute', () => {
+ expect(findButton().attributes('aria-label')).toBe('Leave');
+ });
+
+ it('renders leave modal', () => {
+ const leaveModal = wrapper.find(LeaveModal);
+
+ expect(leaveModal.exists()).toBe(true);
+ expect(leaveModal.props('member')).toEqual(member);
+ });
+
+ it('triggers leave modal', () => {
+ const binding = getBinding(findButton().element, 'gl-modal');
+
+ expect(binding).not.toBeUndefined();
+ expect(binding.value).toBe(LEAVE_MODAL_ID);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/remove_group_link_button_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/remove_group_link_button_spec.js
new file mode 100644
index 00000000000..84fe1c51773
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/remove_group_link_button_spec.js
@@ -0,0 +1,64 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlButton } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import RemoveGroupLinkButton from '~/vue_shared/components/members/action_buttons/remove_group_link_button.vue';
+import { group } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('RemoveGroupLinkButton', () => {
+ let wrapper;
+
+ const actions = {
+ showRemoveGroupLinkModal: jest.fn(),
+ };
+
+ const createStore = () => {
+ return new Vuex.Store({
+ actions,
+ });
+ };
+
+ const createComponent = () => {
+ wrapper = mount(RemoveGroupLinkButton, {
+ localVue,
+ store: createStore(),
+ propsData: {
+ groupLink: group,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const findButton = () => wrapper.find(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays a tooltip', () => {
+ const button = findButton();
+
+ expect(getBinding(button.element, 'gl-tooltip')).not.toBeUndefined();
+ expect(button.attributes('title')).toBe('Remove group');
+ });
+
+ it('sets `aria-label` attribute', () => {
+ expect(findButton().attributes('aria-label')).toBe('Remove group');
+ });
+
+ it('calls Vuex action to open remove group link modal when clicked', () => {
+ findButton().trigger('click');
+
+ expect(actions.showRemoveGroupLinkModal).toHaveBeenCalledWith(expect.any(Object), group);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/remove_member_button_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/remove_member_button_spec.js
new file mode 100644
index 00000000000..7aa30494234
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/remove_member_button_spec.js
@@ -0,0 +1,66 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import RemoveMemberButton from '~/vue_shared/components/members/action_buttons/remove_member_button.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('RemoveMemberButton', () => {
+ let wrapper;
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ });
+ };
+
+ const createComponent = (propsData = {}, state) => {
+ wrapper = shallowMount(RemoveMemberButton, {
+ localVue,
+ store: createStore(state),
+ propsData: {
+ memberId: 1,
+ message: 'Are you sure you want to remove John Smith?',
+ title: 'Remove member',
+ isAccessRequest: true,
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('sets attributes on button', () => {
+ createComponent();
+
+ expect(wrapper.attributes()).toMatchObject({
+ 'data-member-path': '/groups/foo-bar/-/group_members/1',
+ 'data-message': 'Are you sure you want to remove John Smith?',
+ 'data-is-access-request': 'true',
+ 'aria-label': 'Remove member',
+ title: 'Remove member',
+ icon: 'remove',
+ });
+ });
+
+ it('displays `title` prop as a tooltip', () => {
+ createComponent();
+
+ expect(getBinding(wrapper.element, 'gl-tooltip')).not.toBeUndefined();
+ });
+
+ it('has CSS class used by `remove_member_modal.vue`', () => {
+ createComponent();
+
+ expect(wrapper.classes()).toContain('js-remove-member-button');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/resend_invite_button_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/resend_invite_button_spec.js
new file mode 100644
index 00000000000..859fdd01043
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/resend_invite_button_spec.js
@@ -0,0 +1,66 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlButton } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import ResendInviteButton from '~/vue_shared/components/members/action_buttons/resend_invite_button.vue';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ResendInviteButton', () => {
+ let wrapper;
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ });
+ };
+
+ const createComponent = (propsData = {}, state) => {
+ wrapper = shallowMount(ResendInviteButton, {
+ localVue,
+ store: createStore(state),
+ propsData: {
+ memberId: 1,
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const findForm = () => wrapper.find('form');
+ const findButton = () => findForm().find(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays a tooltip', () => {
+ expect(getBinding(findButton().element, 'gl-tooltip')).not.toBeUndefined();
+ expect(findButton().attributes('title')).toBe('Resend invite');
+ });
+
+ it('submits the form when button is clicked', () => {
+ expect(findButton().attributes('type')).toBe('submit');
+ });
+
+ it('displays form with correct action and inputs', () => {
+ expect(findForm().attributes('action')).toBe('/groups/foo-bar/-/group_members/1/resend_invite');
+ expect(
+ findForm()
+ .find('input[name="authenticity_token"]')
+ .attributes('value'),
+ ).toBe('mock-csrf-token');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/action_buttons/user_action_buttons_spec.js b/spec/frontend/vue_shared/components/members/action_buttons/user_action_buttons_spec.js
new file mode 100644
index 00000000000..f766ad5b0d1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/action_buttons/user_action_buttons_spec.js
@@ -0,0 +1,89 @@
+import { shallowMount } from '@vue/test-utils';
+import UserActionButtons from '~/vue_shared/components/members/action_buttons/user_action_buttons.vue';
+import RemoveMemberButton from '~/vue_shared/components/members/action_buttons/remove_member_button.vue';
+import LeaveButton from '~/vue_shared/components/members/action_buttons/leave_button.vue';
+import { member, orphanedMember } from '../mock_data';
+
+describe('UserActionButtons', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(UserActionButtons, {
+ propsData: {
+ member,
+ isCurrentUser: false,
+ ...propsData,
+ },
+ });
+ };
+
+ const findRemoveMemberButton = () => wrapper.find(RemoveMemberButton);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when user has `canRemove` permissions', () => {
+ beforeEach(() => {
+ createComponent({
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('renders remove member button', () => {
+ expect(findRemoveMemberButton().exists()).toBe(true);
+ });
+
+ it('sets props correctly', () => {
+ expect(findRemoveMemberButton().props()).toEqual({
+ memberId: member.id,
+ message: `Are you sure you want to remove ${member.user.name} from "${member.source.name}"`,
+ title: 'Remove member',
+ isAccessRequest: false,
+ icon: 'remove',
+ });
+ });
+
+ describe('when member is orphaned', () => {
+ it('sets `message` prop correctly', () => {
+ createComponent({
+ member: orphanedMember,
+ permissions: {
+ canRemove: true,
+ },
+ });
+
+ expect(findRemoveMemberButton().props('message')).toBe(
+ `Are you sure you want to remove this orphaned member from "${orphanedMember.source.name}"`,
+ );
+ });
+ });
+
+ describe('when member is the current user', () => {
+ it('renders leave button', () => {
+ createComponent({
+ isCurrentUser: true,
+ permissions: {
+ canRemove: true,
+ },
+ });
+
+ expect(wrapper.find(LeaveButton).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when user does not have `canRemove` permissions', () => {
+ it('does not render remove member button', () => {
+ createComponent({
+ permissions: {
+ canRemove: false,
+ },
+ });
+
+ expect(findRemoveMemberButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js b/spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js
new file mode 100644
index 00000000000..d6f5773295c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js
@@ -0,0 +1,46 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { getByText as getByTextHelper } from '@testing-library/dom';
+import { GlAvatarLink } from '@gitlab/ui';
+import { group as member } from '../mock_data';
+import GroupAvatar from '~/vue_shared/components/members/avatars/group_avatar.vue';
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const group = member.sharedWithGroup;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(GroupAvatar, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders link to group', () => {
+ const link = wrapper.find(GlAvatarLink);
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(group.webUrl);
+ });
+
+ it("renders group's full name", () => {
+ expect(getByText(group.fullName).exists()).toBe(true);
+ });
+
+ it("renders group's avatar", () => {
+ expect(wrapper.find('img').attributes('src')).toBe(group.avatarUrl);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js b/spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js
new file mode 100644
index 00000000000..7948da7eb40
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js
@@ -0,0 +1,38 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { getByText as getByTextHelper } from '@testing-library/dom';
+import { invite as member } from '../mock_data';
+import InviteAvatar from '~/vue_shared/components/members/avatars/invite_avatar.vue';
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const { invite } = member;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(InviteAvatar, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders email as name', () => {
+ expect(getByText(invite.email).exists()).toBe(true);
+ });
+
+ it('renders avatar', () => {
+ expect(wrapper.find('img').attributes('src')).toBe(invite.avatarUrl);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js b/spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js
new file mode 100644
index 00000000000..93d8e640968
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js
@@ -0,0 +1,115 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { GlAvatarLink, GlBadge } from '@gitlab/ui';
+import { member as memberMock, orphanedMember } from '../mock_data';
+import UserAvatar from '~/vue_shared/components/members/avatars/user_avatar.vue';
+
+describe('UserAvatar', () => {
+ let wrapper;
+
+ const { user } = memberMock;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(UserAvatar, {
+ propsData: {
+ member: memberMock,
+ isCurrentUser: false,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(within(wrapper.element).findByText(text, options));
+
+ const findStatusEmoji = emoji => wrapper.find(`gl-emoji[data-name="${emoji}"]`);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it("renders link to user's profile", () => {
+ createComponent();
+
+ const link = wrapper.find(GlAvatarLink);
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toMatchObject({
+ href: user.webUrl,
+ 'data-user-id': `${user.id}`,
+ 'data-username': user.username,
+ });
+ });
+
+ it("renders user's name", () => {
+ createComponent();
+
+ expect(getByText(user.name).exists()).toBe(true);
+ });
+
+ it("renders user's username", () => {
+ createComponent();
+
+ expect(getByText(`@${user.username}`).exists()).toBe(true);
+ });
+
+ it("renders user's avatar", () => {
+ createComponent();
+
+ expect(wrapper.find('img').attributes('src')).toBe(user.avatarUrl);
+ });
+
+ describe('when user property does not exist', () => {
+ it('displays an orphaned user', () => {
+ createComponent({ member: orphanedMember });
+
+ expect(getByText('Orphaned member').exists()).toBe(true);
+ });
+ });
+
+ describe('badges', () => {
+ it.each`
+ member | badgeText
+ ${{ ...memberMock, user: { ...memberMock.user, blocked: true } }} | ${'Blocked'}
+ ${{ ...memberMock, user: { ...memberMock.user, twoFactorEnabled: true } }} | ${'2FA'}
+ `('renders the "$badgeText" badge', ({ member, badgeText }) => {
+ createComponent({ member });
+
+ expect(wrapper.find(GlBadge).text()).toBe(badgeText);
+ });
+
+ it('renders the "It\'s you" badge when member is current user', () => {
+ createComponent({ isCurrentUser: true });
+
+ expect(getByText("It's you").exists()).toBe(true);
+ });
+ });
+
+ describe('user status', () => {
+ const emoji = 'island';
+
+ describe('when set', () => {
+ it('displays the status emoji', () => {
+ createComponent({
+ member: {
+ ...memberMock,
+ user: {
+ ...memberMock.user,
+ status: { emoji, messageHtml: 'On vacation' },
+ },
+ },
+ });
+
+ expect(findStatusEmoji(emoji).exists()).toBe(true);
+ });
+ });
+
+ describe('when not set', () => {
+ it('does not display status emoji', () => {
+ createComponent();
+
+ expect(findStatusEmoji(emoji).exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/mock_data.js b/spec/frontend/vue_shared/components/members/mock_data.js
new file mode 100644
index 00000000000..d7bb8c0d142
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/mock_data.js
@@ -0,0 +1,70 @@
+export const member = {
+ requestedAt: null,
+ canUpdate: false,
+ canRemove: false,
+ canOverride: false,
+ accessLevel: { integerValue: 50, stringValue: 'Owner' },
+ source: {
+ id: 178,
+ name: 'Foo Bar',
+ webUrl: 'https://gitlab.com/groups/foo-bar',
+ },
+ user: {
+ id: 123,
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'https://gitlab.com/root',
+ avatarUrl: 'https://www.gravatar.com/avatar/4816142ef496f956a277bedf1a40607b?s=80&d=identicon',
+ blocked: false,
+ twoFactorEnabled: false,
+ },
+ id: 238,
+ createdAt: '2020-07-17T16:22:46.923Z',
+ expiresAt: null,
+ usingLicense: false,
+ groupSso: false,
+ groupManagedAccount: false,
+ validRoles: {
+ Guest: 10,
+ Reporter: 20,
+ Developer: 30,
+ Maintainer: 40,
+ Owner: 50,
+ 'Minimal Access': 5,
+ },
+};
+
+export const group = {
+ accessLevel: { integerValue: 10, stringValue: 'Guest' },
+ sharedWithGroup: {
+ id: 24,
+ name: 'Commit451',
+ avatarUrl: '/uploads/-/system/user/avatar/1/avatar.png?width=40',
+ fullPath: 'parent-group/commit451',
+ fullName: 'Parent group / Commit451',
+ webUrl: 'https://gitlab.com/groups/parent-group/commit451',
+ },
+ id: 3,
+ createdAt: '2020-08-06T15:31:07.662Z',
+ expiresAt: null,
+ validRoles: { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 },
+};
+
+const { user, ...memberNoUser } = member;
+export const invite = {
+ ...memberNoUser,
+ invite: {
+ email: 'jewel@hudsonwalter.biz',
+ avatarUrl: 'https://www.gravatar.com/avatar/cbab7510da7eec2f60f638261b05436d?s=80&d=identicon',
+ canResend: true,
+ },
+};
+
+export const orphanedMember = memberNoUser;
+
+export const accessRequest = {
+ ...member,
+ requestedAt: '2020-07-17T16:22:46.923Z',
+};
+
+export const members = [member];
diff --git a/spec/frontend/vue_shared/components/members/modals/leave_modal_spec.js b/spec/frontend/vue_shared/components/members/modals/leave_modal_spec.js
new file mode 100644
index 00000000000..63de355a3c8
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/modals/leave_modal_spec.js
@@ -0,0 +1,91 @@
+import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
+import { GlModal, GlForm } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { within } from '@testing-library/dom';
+import Vuex from 'vuex';
+import LeaveModal from '~/vue_shared/components/members/modals/leave_modal.vue';
+import { LEAVE_MODAL_ID } from '~/vue_shared/components/members/constants';
+import { member } from '../mock_data';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('LeaveModal', () => {
+ let wrapper;
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ });
+ };
+
+ const createComponent = (propsData = {}, state) => {
+ wrapper = mount(LeaveModal, {
+ localVue,
+ store: createStore(state),
+ propsData: {
+ member,
+ ...propsData,
+ },
+ attrs: {
+ static: true,
+ visible: true,
+ },
+ });
+ };
+
+ const findModal = () => wrapper.find(GlModal);
+
+ const findForm = () => findModal().find(GlForm);
+
+ const getByText = (text, options) =>
+ createWrapper(within(findModal().element).getByText(text, options));
+
+ beforeEach(async () => {
+ createComponent();
+ await nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('sets modal ID', () => {
+ expect(findModal().props('modalId')).toBe(LEAVE_MODAL_ID);
+ });
+
+ it('displays modal title', () => {
+ expect(getByText(`Leave "${member.source.name}"`).exists()).toBe(true);
+ });
+
+ it('displays modal body', () => {
+ expect(getByText(`Are you sure you want to leave "${member.source.name}"?`).exists()).toBe(
+ true,
+ );
+ });
+
+ it('displays form with correct action and inputs', () => {
+ const form = findForm();
+
+ expect(form.attributes('action')).toBe('/groups/foo-bar/-/group_members/leave');
+ expect(form.find('input[name="_method"]').attributes('value')).toBe('delete');
+ expect(form.find('input[name="authenticity_token"]').attributes('value')).toBe(
+ 'mock-csrf-token',
+ );
+ });
+
+ it('submits the form when "Leave" button is clicked', () => {
+ const submitSpy = jest.spyOn(findForm().element, 'submit');
+
+ getByText('Leave').trigger('click');
+
+ expect(submitSpy).toHaveBeenCalled();
+
+ submitSpy.mockRestore();
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/modals/remove_group_link_modal_spec.js b/spec/frontend/vue_shared/components/members/modals/remove_group_link_modal_spec.js
new file mode 100644
index 00000000000..84da051792d
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/modals/remove_group_link_modal_spec.js
@@ -0,0 +1,106 @@
+import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
+import { GlModal, GlForm } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { within } from '@testing-library/dom';
+import Vuex from 'vuex';
+import RemoveGroupLinkModal from '~/vue_shared/components/members/modals/remove_group_link_modal.vue';
+import { REMOVE_GROUP_LINK_MODAL_ID } from '~/vue_shared/components/members/constants';
+import { group } from '../mock_data';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('RemoveGroupLinkModal', () => {
+ let wrapper;
+
+ const actions = {
+ hideRemoveGroupLinkModal: jest.fn(),
+ };
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ memberPath: '/groups/foo-bar/-/group_links/:id',
+ groupLinkToRemove: group,
+ removeGroupLinkModalVisible: true,
+ ...state,
+ },
+ actions,
+ });
+ };
+
+ const createComponent = state => {
+ wrapper = mount(RemoveGroupLinkModal, {
+ localVue,
+ store: createStore(state),
+ attrs: {
+ static: true,
+ },
+ });
+ };
+
+ const findModal = () => wrapper.find(GlModal);
+ const findForm = () => findModal().find(GlForm);
+ const getByText = (text, options) =>
+ createWrapper(within(findModal().element).getByText(text, options));
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when modal is open', () => {
+ beforeEach(async () => {
+ createComponent();
+ await nextTick();
+ });
+
+ it('sets modal ID', () => {
+ expect(findModal().props('modalId')).toBe(REMOVE_GROUP_LINK_MODAL_ID);
+ });
+
+ it('displays modal title', () => {
+ expect(getByText(`Remove "${group.sharedWithGroup.fullName}"`).exists()).toBe(true);
+ });
+
+ it('displays modal body', () => {
+ expect(
+ getByText(`Are you sure you want to remove "${group.sharedWithGroup.fullName}"?`).exists(),
+ ).toBe(true);
+ });
+
+ it('displays form with correct action and inputs', () => {
+ const form = findForm();
+
+ expect(form.attributes('action')).toBe(`/groups/foo-bar/-/group_links/${group.id}`);
+ expect(form.find('input[name="_method"]').attributes('value')).toBe('delete');
+ expect(form.find('input[name="authenticity_token"]').attributes('value')).toBe(
+ 'mock-csrf-token',
+ );
+ });
+
+ it('submits the form when "Remove group" button is clicked', () => {
+ const submitSpy = jest.spyOn(findForm().element, 'submit');
+
+ getByText('Remove group').trigger('click');
+
+ expect(submitSpy).toHaveBeenCalled();
+
+ submitSpy.mockRestore();
+ });
+
+ it('calls `hideRemoveGroupLinkModal` action when modal is closed', () => {
+ getByText('Cancel').trigger('click');
+
+ expect(actions.hideRemoveGroupLinkModal).toHaveBeenCalled();
+ });
+ });
+
+ it('modal does not show when `removeGroupLinkModalVisible` is `false`', () => {
+ createComponent({ removeGroupLinkModalVisible: false });
+
+ expect(findModal().vm.$attrs.visible).toBe(false);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/created_at_spec.js b/spec/frontend/vue_shared/components/members/table/created_at_spec.js
new file mode 100644
index 00000000000..cf3821baf44
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/created_at_spec.js
@@ -0,0 +1,61 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { useFakeDate } from 'helpers/fake_date';
+import CreatedAt from '~/vue_shared/components/members/table/created_at.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+describe('CreatedAt', () => {
+ // March 15th, 2020
+ useFakeDate(2020, 2, 15);
+
+ const date = '2020-03-01T00:00:00.000';
+ const dateTimeAgo = '2 weeks ago';
+
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = mount(CreatedAt, {
+ propsData: {
+ date,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(within(wrapper.element).getByText(text, options));
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('created at text', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays created at text', () => {
+ expect(getByText(dateTimeAgo).exists()).toBe(true);
+ });
+
+ it('uses `TimeAgoTooltip` component to display tooltip', () => {
+ expect(wrapper.find(TimeAgoTooltip).exists()).toBe(true);
+ });
+ });
+
+ describe('when `createdBy` prop is provided', () => {
+ it('displays a link to the user that created the member', () => {
+ createComponent({
+ createdBy: {
+ name: 'Administrator',
+ webUrl: 'https://gitlab.com/root',
+ },
+ });
+
+ const link = getByText('Administrator');
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe('https://gitlab.com/root');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/expires_at_spec.js b/spec/frontend/vue_shared/components/members/table/expires_at_spec.js
new file mode 100644
index 00000000000..95ae251b0fd
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/expires_at_spec.js
@@ -0,0 +1,86 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { useFakeDate } from 'helpers/fake_date';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import ExpiresAt from '~/vue_shared/components/members/table/expires_at.vue';
+
+describe('ExpiresAt', () => {
+ // March 15th, 2020
+ useFakeDate(2020, 2, 15);
+
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = mount(ExpiresAt, {
+ propsData,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(within(wrapper.element).getByText(text, options));
+
+ const getTooltipDirective = elementWrapper => getBinding(elementWrapper.element, 'gl-tooltip');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when no expiration date is set', () => {
+ it('displays "No expiration set"', () => {
+ createComponent({ date: null });
+
+ expect(getByText('No expiration set').exists()).toBe(true);
+ });
+ });
+
+ describe('when expiration date is in the past', () => {
+ let expiredText;
+
+ beforeEach(() => {
+ createComponent({ date: '2019-03-15T00:00:00.000' });
+
+ expiredText = getByText('Expired');
+ });
+
+ it('displays "Expired"', () => {
+ expect(expiredText.exists()).toBe(true);
+ expect(expiredText.classes()).toContain('gl-text-red-500');
+ });
+
+ it('displays tooltip with formatted date', () => {
+ const tooltipDirective = getTooltipDirective(expiredText);
+
+ expect(tooltipDirective).not.toBeUndefined();
+ expect(expiredText.attributes('title')).toBe('Mar 15, 2019 12:00am GMT+0000');
+ });
+ });
+
+ describe('when expiration date is in the future', () => {
+ it.each`
+ date | expected | warningColor
+ ${'2020-03-23T00:00:00.000'} | ${'in 8 days'} | ${false}
+ ${'2020-03-20T00:00:00.000'} | ${'in 5 days'} | ${true}
+ ${'2020-03-16T00:00:00.000'} | ${'in 1 day'} | ${true}
+ ${'2020-03-15T05:00:00.000'} | ${'in about 5 hours'} | ${true}
+ ${'2020-03-15T01:00:00.000'} | ${'in about 1 hour'} | ${true}
+ ${'2020-03-15T00:30:00.000'} | ${'in 30 minutes'} | ${true}
+ ${'2020-03-15T00:01:15.000'} | ${'in 1 minute'} | ${true}
+ ${'2020-03-15T00:00:15.000'} | ${'in less than a minute'} | ${true}
+ `('displays "$expected"', ({ date, expected, warningColor }) => {
+ createComponent({ date });
+
+ const expiredText = getByText(expected);
+
+ expect(expiredText.exists()).toBe(true);
+
+ if (warningColor) {
+ expect(expiredText.classes()).toContain('gl-text-orange-500');
+ } else {
+ expect(expiredText.classes()).not.toContain('gl-text-orange-500');
+ }
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_action_buttons_spec.js b/spec/frontend/vue_shared/components/members/table/member_action_buttons_spec.js
new file mode 100644
index 00000000000..e55d9b6be2a
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_action_buttons_spec.js
@@ -0,0 +1,43 @@
+import { shallowMount } from '@vue/test-utils';
+import { MEMBER_TYPES } from '~/vue_shared/components/members/constants';
+import { member as memberMock, group, invite, accessRequest } from '../mock_data';
+import MemberActionButtons from '~/vue_shared/components/members/table/member_action_buttons.vue';
+import UserActionButtons from '~/vue_shared/components/members/action_buttons/user_action_buttons.vue';
+import GroupActionButtons from '~/vue_shared/components/members/action_buttons/group_action_buttons.vue';
+import InviteActionButtons from '~/vue_shared/components/members/action_buttons/invite_action_buttons.vue';
+import AccessRequestActionButtons from '~/vue_shared/components/members/action_buttons/access_request_action_buttons.vue';
+
+describe('MemberActionButtons', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(MemberActionButtons, {
+ propsData: {
+ isCurrentUser: false,
+ permissions: {
+ canRemove: true,
+ },
+ ...propsData,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ test.each`
+ memberType | member | expectedComponent | expectedComponentName
+ ${MEMBER_TYPES.user} | ${memberMock} | ${UserActionButtons} | ${'UserActionButtons'}
+ ${MEMBER_TYPES.group} | ${group} | ${GroupActionButtons} | ${'GroupActionButtons'}
+ ${MEMBER_TYPES.invite} | ${invite} | ${InviteActionButtons} | ${'InviteActionButtons'}
+ ${MEMBER_TYPES.accessRequest} | ${accessRequest} | ${AccessRequestActionButtons} | ${'AccessRequestActionButtons'}
+ `(
+ 'renders $expectedComponentName when `memberType` is $memberType',
+ ({ memberType, member, expectedComponent }) => {
+ createComponent({ memberType, member });
+
+ expect(wrapper.find(expectedComponent).exists()).toBe(true);
+ },
+ );
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_avatar_spec.js b/spec/frontend/vue_shared/components/members/table/member_avatar_spec.js
new file mode 100644
index 00000000000..a171dd830c1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_avatar_spec.js
@@ -0,0 +1,39 @@
+import { shallowMount } from '@vue/test-utils';
+import { MEMBER_TYPES } from '~/vue_shared/components/members/constants';
+import { member as memberMock, group, invite, accessRequest } from '../mock_data';
+import MemberAvatar from '~/vue_shared/components/members/table/member_avatar.vue';
+import UserAvatar from '~/vue_shared/components/members/avatars/user_avatar.vue';
+import GroupAvatar from '~/vue_shared/components/members/avatars/group_avatar.vue';
+import InviteAvatar from '~/vue_shared/components/members/avatars/invite_avatar.vue';
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(MemberAvatar, {
+ propsData: {
+ isCurrentUser: false,
+ ...propsData,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ test.each`
+ memberType | member | expectedComponent | expectedComponentName
+ ${MEMBER_TYPES.user} | ${memberMock} | ${UserAvatar} | ${'UserAvatar'}
+ ${MEMBER_TYPES.group} | ${group} | ${GroupAvatar} | ${'GroupAvatar'}
+ ${MEMBER_TYPES.invite} | ${invite} | ${InviteAvatar} | ${'InviteAvatar'}
+ ${MEMBER_TYPES.accessRequest} | ${accessRequest} | ${UserAvatar} | ${'UserAvatar'}
+ `(
+ 'renders $expectedComponentName when `memberType` is $memberType',
+ ({ memberType, member, expectedComponent }) => {
+ createComponent({ memberType, member });
+
+ expect(wrapper.find(expectedComponent).exists()).toBe(true);
+ },
+ );
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_source_spec.js b/spec/frontend/vue_shared/components/members/table/member_source_spec.js
new file mode 100644
index 00000000000..8b914d76674
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_source_spec.js
@@ -0,0 +1,71 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { getByText as getByTextHelper } from '@testing-library/dom';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import MemberSource from '~/vue_shared/components/members/table/member_source.vue';
+
+describe('MemberSource', () => {
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = mount(MemberSource, {
+ propsData: {
+ memberSource: {
+ id: 102,
+ name: 'Foo bar',
+ webUrl: 'https://gitlab.com/groups/foo-bar',
+ },
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ const getTooltipDirective = elementWrapper => getBinding(elementWrapper.element, 'gl-tooltip');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('direct member', () => {
+ it('displays "Direct member"', () => {
+ createComponent({
+ isDirectMember: true,
+ });
+
+ expect(getByText('Direct member').exists()).toBe(true);
+ });
+ });
+
+ describe('inherited member', () => {
+ let sourceGroupLink;
+
+ beforeEach(() => {
+ createComponent({
+ isDirectMember: false,
+ });
+
+ sourceGroupLink = getByText('Foo bar');
+ });
+
+ it('displays a link to source group', () => {
+ createComponent({
+ isDirectMember: false,
+ });
+
+ expect(sourceGroupLink.exists()).toBe(true);
+ expect(sourceGroupLink.attributes('href')).toBe('https://gitlab.com/groups/foo-bar');
+ });
+
+ it('displays tooltip with "Inherited"', () => {
+ const tooltipDirective = getTooltipDirective(sourceGroupLink);
+
+ expect(tooltipDirective).not.toBeUndefined();
+ expect(sourceGroupLink.attributes('title')).toBe('Inherited');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js b/spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js
new file mode 100644
index 00000000000..ba693975a88
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js
@@ -0,0 +1,251 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { MEMBER_TYPES } from '~/vue_shared/components/members/constants';
+import { member as memberMock, group, invite, accessRequest } from '../mock_data';
+import MembersTableCell from '~/vue_shared/components/members/table/members_table_cell.vue';
+
+describe('MemberList', () => {
+ const WrappedComponent = {
+ props: {
+ memberType: {
+ type: String,
+ required: true,
+ },
+ isDirectMember: {
+ type: Boolean,
+ required: true,
+ },
+ isCurrentUser: {
+ type: Boolean,
+ required: true,
+ },
+ permissions: {
+ type: Object,
+ required: true,
+ },
+ },
+ render(createElement) {
+ return createElement('div', this.memberType);
+ },
+ };
+
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+ localVue.component('wrapped-component', WrappedComponent);
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ sourceId: 1,
+ currentUserId: 1,
+ ...state,
+ },
+ });
+ };
+
+ let wrapper;
+
+ const createComponent = (propsData, state = {}) => {
+ wrapper = mount(MembersTableCell, {
+ localVue,
+ propsData,
+ store: createStore(state),
+ scopedSlots: {
+ default: `
+ <wrapped-component
+ :member-type="props.memberType"
+ :is-direct-member="props.isDirectMember"
+ :is-current-user="props.isCurrentUser"
+ :permissions="props.permissions"
+ />
+ `,
+ },
+ });
+ };
+
+ const findWrappedComponent = () => wrapper.find(WrappedComponent);
+
+ const memberCurrentUser = {
+ ...memberMock,
+ user: {
+ ...memberMock.user,
+ id: 1,
+ },
+ };
+
+ const createComponentWithDirectMember = (member = {}) => {
+ createComponent({
+ member: {
+ ...memberMock,
+ source: {
+ ...memberMock.source,
+ id: 1,
+ },
+ ...member,
+ },
+ });
+ };
+ const createComponentWithInheritedMember = (member = {}) => {
+ createComponent({
+ member: { ...memberMock, ...member },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ test.each`
+ member | expectedMemberType
+ ${memberMock} | ${MEMBER_TYPES.user}
+ ${group} | ${MEMBER_TYPES.group}
+ ${invite} | ${MEMBER_TYPES.invite}
+ ${accessRequest} | ${MEMBER_TYPES.accessRequest}
+ `(
+ 'sets scoped slot prop `memberType` to $expectedMemberType',
+ ({ member, expectedMemberType }) => {
+ createComponent({ member });
+
+ expect(findWrappedComponent().props('memberType')).toBe(expectedMemberType);
+ },
+ );
+
+ describe('isDirectMember', () => {
+ it('returns `true` when member source has same ID as `sourceId`', () => {
+ createComponentWithDirectMember();
+
+ expect(findWrappedComponent().props('isDirectMember')).toBe(true);
+ });
+
+ it('returns `false` when member is inherited', () => {
+ createComponentWithInheritedMember();
+
+ expect(findWrappedComponent().props('isDirectMember')).toBe(false);
+ });
+
+ it('returns `true` for linked groups', () => {
+ createComponent({
+ member: group,
+ });
+
+ expect(findWrappedComponent().props('isDirectMember')).toBe(true);
+ });
+ });
+
+ describe('isCurrentUser', () => {
+ it('returns `true` when `member.user` has the same ID as `currentUserId`', () => {
+ createComponent({
+ member: memberCurrentUser,
+ });
+
+ expect(findWrappedComponent().props('isCurrentUser')).toBe(true);
+ });
+
+ it('returns `false` when `member.user` does not have the same ID as `currentUserId`', () => {
+ createComponent({
+ member: memberMock,
+ });
+
+ expect(findWrappedComponent().props('isCurrentUser')).toBe(false);
+ });
+ });
+
+ describe('permissions', () => {
+ describe('canRemove', () => {
+ describe('for a direct member', () => {
+ it('returns `true` when `canRemove` is `true`', () => {
+ createComponentWithDirectMember({
+ canRemove: true,
+ });
+
+ expect(findWrappedComponent().props('permissions').canRemove).toBe(true);
+ });
+
+ it('returns `false` when `canRemove` is `false`', () => {
+ createComponentWithDirectMember({
+ canRemove: false,
+ });
+
+ expect(findWrappedComponent().props('permissions').canRemove).toBe(false);
+ });
+ });
+
+ describe('for an inherited member', () => {
+ it('returns `false`', () => {
+ createComponentWithInheritedMember();
+
+ expect(findWrappedComponent().props('permissions').canRemove).toBe(false);
+ });
+ });
+ });
+
+ describe('canResend', () => {
+ describe('when member type is `invite`', () => {
+ it('returns `true` when `canResend` is `true`', () => {
+ createComponent({
+ member: invite,
+ });
+
+ expect(findWrappedComponent().props('permissions').canResend).toBe(true);
+ });
+
+ it('returns `false` when `canResend` is `false`', () => {
+ createComponent({
+ member: {
+ ...invite,
+ invite: {
+ ...invite,
+ canResend: false,
+ },
+ },
+ });
+
+ expect(findWrappedComponent().props('permissions').canResend).toBe(false);
+ });
+ });
+
+ describe('when member type is not `invite`', () => {
+ it('returns `false`', () => {
+ createComponent({ member: memberMock });
+
+ expect(findWrappedComponent().props('permissions').canResend).toBe(false);
+ });
+ });
+ });
+
+ describe('canUpdate', () => {
+ describe('for a direct member', () => {
+ it('returns `true` when `canUpdate` is `true`', () => {
+ createComponentWithDirectMember({
+ canUpdate: true,
+ });
+
+ expect(findWrappedComponent().props('permissions').canUpdate).toBe(true);
+ });
+
+ it('returns `false` when `canUpdate` is `false`', () => {
+ createComponentWithDirectMember({
+ canUpdate: false,
+ });
+
+ expect(findWrappedComponent().props('permissions').canUpdate).toBe(false);
+ });
+
+ it('returns `false` for current user', () => {
+ createComponentWithDirectMember(memberCurrentUser);
+
+ expect(findWrappedComponent().props('permissions').canUpdate).toBe(false);
+ });
+ });
+
+ describe('for an inherited member', () => {
+ it('returns `false`', () => {
+ createComponentWithInheritedMember();
+
+ expect(findWrappedComponent().props('permissions').canUpdate).toBe(false);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/members_table_spec.js b/spec/frontend/vue_shared/components/members/table/members_table_spec.js
new file mode 100644
index 00000000000..20c1c26d2ee
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/members_table_spec.js
@@ -0,0 +1,141 @@
+import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
+import Vuex from 'vuex';
+import {
+ getByText as getByTextHelper,
+ getByTestId as getByTestIdHelper,
+} from '@testing-library/dom';
+import { GlBadge } from '@gitlab/ui';
+import MembersTable from '~/vue_shared/components/members/table/members_table.vue';
+import MemberAvatar from '~/vue_shared/components/members/table/member_avatar.vue';
+import MemberSource from '~/vue_shared/components/members/table/member_source.vue';
+import ExpiresAt from '~/vue_shared/components/members/table/expires_at.vue';
+import CreatedAt from '~/vue_shared/components/members/table/created_at.vue';
+import RoleDropdown from '~/vue_shared/components/members/table/role_dropdown.vue';
+import MemberActionButtons from '~/vue_shared/components/members/table/member_action_buttons.vue';
+import * as initUserPopovers from '~/user_popovers';
+import { member as memberMock, invite, accessRequest } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ members: [],
+ tableFields: [],
+ sourceId: 1,
+ ...state,
+ },
+ });
+ };
+
+ const createComponent = state => {
+ wrapper = mount(MembersTable, {
+ localVue,
+ store: createStore(state),
+ stubs: [
+ 'member-avatar',
+ 'member-source',
+ 'expires-at',
+ 'created-at',
+ 'member-action-buttons',
+ 'role-dropdown',
+ 'remove-group-link-modal',
+ ],
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ const getByTestId = (id, options) =>
+ createWrapper(getByTestIdHelper(wrapper.element, id, options));
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('fields', () => {
+ const memberCanUpdate = {
+ ...memberMock,
+ canUpdate: true,
+ source: { ...memberMock.source, id: 1 },
+ };
+
+ it.each`
+ field | label | member | expectedComponent
+ ${'account'} | ${'Account'} | ${memberMock} | ${MemberAvatar}
+ ${'source'} | ${'Source'} | ${memberMock} | ${MemberSource}
+ ${'granted'} | ${'Access granted'} | ${memberMock} | ${CreatedAt}
+ ${'invited'} | ${'Invited'} | ${invite} | ${CreatedAt}
+ ${'requested'} | ${'Requested'} | ${accessRequest} | ${CreatedAt}
+ ${'expires'} | ${'Access expires'} | ${memberMock} | ${ExpiresAt}
+ ${'maxRole'} | ${'Max role'} | ${memberCanUpdate} | ${RoleDropdown}
+ ${'expiration'} | ${'Expiration'} | ${memberMock} | ${null}
+ `('renders the $label field', ({ field, label, member, expectedComponent }) => {
+ createComponent({
+ members: [member],
+ tableFields: [field],
+ });
+
+ expect(getByText(label, { selector: '[role="columnheader"]' }).exists()).toBe(true);
+
+ if (expectedComponent) {
+ expect(
+ wrapper
+ .find(`[data-label="${label}"][role="cell"]`)
+ .find(expectedComponent)
+ .exists(),
+ ).toBe(true);
+ }
+ });
+
+ it('renders "Actions" field for screen readers', () => {
+ createComponent({ members: [memberMock], tableFields: ['actions'] });
+
+ const actionField = getByTestId('col-actions');
+
+ expect(actionField.exists()).toBe(true);
+ expect(actionField.classes('gl-sr-only')).toBe(true);
+ expect(
+ wrapper
+ .find(`[data-label="Actions"][role="cell"]`)
+ .find(MemberActionButtons)
+ .exists(),
+ ).toBe(true);
+ });
+ });
+
+ describe('when `members` is an empty array', () => {
+ it('displays a "No members found" message', () => {
+ createComponent();
+
+ expect(getByText('No members found').exists()).toBe(true);
+ });
+ });
+
+ describe('when member can not be updated', () => {
+ it('renders badge in "Max role" field', () => {
+ createComponent({ members: [memberMock], tableFields: ['maxRole'] });
+
+ expect(
+ wrapper
+ .find(`[data-label="Max role"][role="cell"]`)
+ .find(GlBadge)
+ .text(),
+ ).toBe(memberMock.accessLevel.stringValue);
+ });
+ });
+
+ it('initializes user popovers when mounted', () => {
+ const initUserPopoversMock = jest.spyOn(initUserPopovers, 'default');
+
+ createComponent();
+
+ expect(initUserPopoversMock).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/role_dropdown_spec.js b/spec/frontend/vue_shared/components/members/table/role_dropdown_spec.js
new file mode 100644
index 00000000000..1e47953a510
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/role_dropdown_spec.js
@@ -0,0 +1,150 @@
+import { mount, createWrapper, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { nextTick } from 'vue';
+import { within } from '@testing-library/dom';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import RoleDropdown from '~/vue_shared/components/members/table/role_dropdown.vue';
+import { member } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('RoleDropdown', () => {
+ let wrapper;
+ let actions;
+ const $toast = {
+ show: jest.fn(),
+ };
+
+ const createStore = () => {
+ actions = {
+ updateMemberRole: jest.fn(() => Promise.resolve()),
+ };
+
+ return new Vuex.Store({ actions });
+ };
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(RoleDropdown, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ localVue,
+ store: createStore(),
+ mocks: {
+ $toast,
+ },
+ });
+ };
+
+ const getDropdownMenu = () => within(wrapper.element).getByRole('menu');
+ const getByTextInDropdownMenu = (text, options = {}) =>
+ createWrapper(within(getDropdownMenu()).getByText(text, options));
+ const getDropdownItemByText = text =>
+ createWrapper(
+ within(getDropdownMenu())
+ .getByText(text, { selector: '[role="menuitem"] p' })
+ .closest('[role="menuitem"]'),
+ );
+ const getCheckedDropdownItem = () =>
+ wrapper
+ .findAll(GlDropdownItem)
+ .wrappers.find(dropdownItemWrapper => dropdownItemWrapper.props('isChecked'));
+
+ const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
+ const findDropdown = () => wrapper.find(GlDropdown);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when dropdown is open', () => {
+ beforeEach(done => {
+ createComponent();
+
+ findDropdownToggle().trigger('click');
+ wrapper.vm.$root.$on('bv::dropdown::shown', () => {
+ done();
+ });
+ });
+
+ it('renders all valid roles', () => {
+ Object.keys(member.validRoles).forEach(role => {
+ expect(getDropdownItemByText(role).exists()).toBe(true);
+ });
+ });
+
+ it('renders dropdown header', () => {
+ expect(getByTextInDropdownMenu('Change permissions').exists()).toBe(true);
+ });
+
+ it('sets dropdown toggle and checks selected role', () => {
+ expect(findDropdownToggle().text()).toBe('Owner');
+ expect(getCheckedDropdownItem().text()).toBe('Owner');
+ });
+
+ describe('when dropdown item is selected', () => {
+ it('does nothing if the item selected was already selected', () => {
+ getDropdownItemByText('Owner').trigger('click');
+
+ expect(actions.updateMemberRole).not.toHaveBeenCalled();
+ });
+
+ it('calls `updateMemberRole` Vuex action', () => {
+ getDropdownItemByText('Developer').trigger('click');
+
+ expect(actions.updateMemberRole).toHaveBeenCalledWith(expect.any(Object), {
+ memberId: member.id,
+ accessLevel: { integerValue: 30, stringValue: 'Developer' },
+ });
+ });
+
+ it('displays toast when successful', async () => {
+ getDropdownItemByText('Developer').trigger('click');
+
+ await waitForPromises();
+
+ expect($toast.show).toHaveBeenCalledWith('Role updated successfully.');
+ });
+
+ it('disables dropdown while waiting for `updateMemberRole` to resolve', async () => {
+ getDropdownItemByText('Developer').trigger('click');
+
+ await nextTick();
+
+ expect(findDropdown().attributes('disabled')).toBe('disabled');
+
+ await waitForPromises();
+
+ expect(findDropdown().attributes('disabled')).toBeUndefined();
+ });
+ });
+ });
+
+ it("sets initial dropdown toggle value to member's role", () => {
+ createComponent();
+
+ expect(findDropdownToggle().text()).toBe('Owner');
+ });
+
+ it('sets the dropdown alignment to right on mobile', async () => {
+ jest.spyOn(bp, 'isDesktop').mockReturnValue(false);
+ createComponent();
+
+ await nextTick();
+
+ expect(findDropdown().attributes('right')).toBe('true');
+ });
+
+ it('sets the dropdown alignment to left on desktop', async () => {
+ jest.spyOn(bp, 'isDesktop').mockReturnValue(true);
+ createComponent();
+
+ await nextTick();
+
+ expect(findDropdown().attributes('right')).toBeUndefined();
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/utils_spec.js b/spec/frontend/vue_shared/components/members/utils_spec.js
new file mode 100644
index 00000000000..f183abc08d6
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/utils_spec.js
@@ -0,0 +1,29 @@
+import { generateBadges } from '~/vue_shared/components/members/utils';
+import { member as memberMock } from './mock_data';
+
+describe('Members Utils', () => {
+ describe('generateBadges', () => {
+ it('has correct properties for each badge', () => {
+ const badges = generateBadges(memberMock, true);
+
+ badges.forEach(badge => {
+ expect(badge).toEqual(
+ expect.objectContaining({
+ show: expect.any(Boolean),
+ text: expect.any(String),
+ variant: expect.stringMatching(/muted|neutral|info|success|danger|warning/),
+ }),
+ );
+ });
+ });
+
+ it.each`
+ member | expected
+ ${memberMock} | ${{ show: true, text: "It's you", variant: 'success' }}
+ ${{ ...memberMock, user: { ...memberMock.user, blocked: true } }} | ${{ show: true, text: 'Blocked', variant: 'danger' }}
+ ${{ ...memberMock, user: { ...memberMock.user, twoFactorEnabled: true } }} | ${{ show: true, text: '2FA', variant: 'info' }}
+ `('returns expected output for "$expected.text" badge', ({ member, expected }) => {
+ expect(generateBadges(member, true)).toContainEqual(expect.objectContaining(expected));
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items.json b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items.json
new file mode 100644
index 00000000000..0d85b2bc68a
--- /dev/null
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items.json
@@ -0,0 +1,15 @@
+[
+ {
+ "iid": "1527542",
+ "title": "SyntaxError: Invalid or unexpected token",
+ "createdAt": "2020-04-17T23:18:14.996Z",
+ "assignees": { "nodes": [] }
+ },
+ {
+ "iid": "1527543",
+ "title": "SyntaxError: Invalid or unexpected token by root",
+ "createdAt": "2020-04-17T23:19:14.996Z",
+ "assignees": { "nodes": [] }
+ }
+ ]
+ \ No newline at end of file
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items_filters.json b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items_filters.json
new file mode 100644
index 00000000000..b42ec42d8b8
--- /dev/null
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/mocks/items_filters.json
@@ -0,0 +1,14 @@
+[
+ {
+ "type": "assignee_username",
+ "value": { "data": "root2" }
+ },
+ {
+ "type": "author_username",
+ "value": { "data": "root" }
+ },
+ {
+ "type": "filtered-search-term",
+ "value": { "data": "bar" }
+ }
+ ] \ No newline at end of file
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
new file mode 100644
index 00000000000..d943aaf3e5f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -0,0 +1,350 @@
+import { mount } from '@vue/test-utils';
+import { GlAlert, GlBadge, GlPagination, GlTabs, GlTab } from '@gitlab/ui';
+import PageWrapper from '~/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue';
+import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
+import Tracking from '~/tracking';
+import mockItems from './mocks/items.json';
+import mockFilters from './mocks/items_filters.json';
+
+const EmptyStateSlot = {
+ template: '<div class="empty-state">Empty State</div>',
+};
+
+const HeaderActionsSlot = {
+ template: '<div class="header-actions"><button>Action Button</button></div>',
+};
+
+const TitleSlot = {
+ template: '<div>Page Wrapper Title</div>',
+};
+
+const TableSlot = {
+ template: '<table class="gl-table"></table>',
+};
+
+const itemsCount = {
+ opened: 24,
+ closed: 10,
+ all: 34,
+};
+
+const ITEMS_STATUS_TABS = [
+ {
+ title: 'Opened items',
+ status: 'OPENED',
+ filters: ['opened'],
+ },
+ {
+ title: 'Closed items',
+ status: 'CLOSED',
+ filters: ['closed'],
+ },
+ {
+ title: 'All items',
+ status: 'ALL',
+ filters: ['all'],
+ },
+];
+
+describe('AlertManagementEmptyState', () => {
+ let wrapper;
+
+ function mountComponent({ props = {} } = {}) {
+ wrapper = mount(PageWrapper, {
+ provide: {
+ projectPath: '/link',
+ },
+ propsData: {
+ items: [],
+ itemsCount: {},
+ pageInfo: {},
+ statusTabs: [],
+ loading: false,
+ showItems: false,
+ showErrorMsg: false,
+ trackViewsOptions: {},
+ i18n: {},
+ serverErrorMessage: '',
+ filterSearchKey: '',
+ ...props,
+ },
+ slots: {
+ 'emtpy-state': EmptyStateSlot,
+ 'header-actions': HeaderActionsSlot,
+ title: TitleSlot,
+ table: TableSlot,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ const EmptyState = () => wrapper.find('.empty-state');
+ const ItemsTable = () => wrapper.find('.gl-table');
+ const ErrorAlert = () => wrapper.find(GlAlert);
+ const Pagination = () => wrapper.find(GlPagination);
+ const Tabs = () => wrapper.find(GlTabs);
+ const ActionButton = () => wrapper.find('.header-actions > button');
+ const Filters = () => wrapper.find(FilteredSearchBar);
+ const findPagination = () => wrapper.find(GlPagination);
+ const findStatusFilterTabs = () => wrapper.findAll(GlTab);
+ const findStatusTabs = () => wrapper.find(GlTabs);
+ const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
+
+ describe('Snowplow tracking', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+ mountComponent({
+ props: { trackViewsOptions: { category: 'category', action: 'action' } },
+ });
+ });
+
+ it('should track the items list page views', () => {
+ const { category, action } = wrapper.vm.trackViewsOptions;
+ expect(Tracking.event).toHaveBeenCalledWith(category, action);
+ });
+ });
+
+ describe('Page wrapper with no items', () => {
+ it('renders the empty state if there are no items present', () => {
+ expect(EmptyState().exists()).toBe(true);
+ });
+ });
+
+ describe('Page wrapper with items', () => {
+ it('renders the tabs selection with valid tabs', () => {
+ mountComponent({
+ props: {
+ statusTabs: [{ status: 'opened', title: 'Open' }, { status: 'closed', title: 'Closed' }],
+ },
+ });
+
+ expect(Tabs().exists()).toBe(true);
+ });
+
+ it('renders the header action buttons if present', () => {
+ expect(ActionButton().exists()).toBe(true);
+ });
+
+ it('renders a error alert if there are errors', () => {
+ mountComponent({
+ props: { showErrorMsg: true },
+ });
+
+ expect(ErrorAlert().exists()).toBe(true);
+ });
+
+ it('renders a table of items if items are present', () => {
+ mountComponent({
+ props: { showItems: true, items: mockItems },
+ });
+
+ expect(ItemsTable().exists()).toBe(true);
+ });
+
+ it('renders pagination if there the pagination info object has a next or previous page', () => {
+ mountComponent({
+ props: { pageInfo: { hasNextPage: true } },
+ });
+
+ expect(Pagination().exists()).toBe(true);
+ });
+
+ it('renders the filter set with the tokens according to the prop filterSearchTokens', () => {
+ mountComponent({
+ props: { filterSearchTokens: ['assignee_username'] },
+ });
+
+ expect(Filters().exists()).toBe(true);
+ });
+ });
+
+ describe('Status Filter Tabs', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: { items: mockItems, itemsCount, statusTabs: ITEMS_STATUS_TABS },
+ });
+ });
+
+ it('should display filter tabs', () => {
+ const tabs = findStatusFilterTabs().wrappers;
+
+ tabs.forEach((tab, i) => {
+ expect(tab.attributes('data-testid')).toContain(ITEMS_STATUS_TABS[i].status);
+ });
+ });
+
+ it('should display filter tabs with items count badge for each status', () => {
+ const tabs = findStatusFilterTabs().wrappers;
+ const badges = findStatusFilterBadge();
+
+ tabs.forEach((tab, i) => {
+ const status = ITEMS_STATUS_TABS[i].status.toLowerCase();
+ expect(tab.attributes('data-testid')).toContain(ITEMS_STATUS_TABS[i].status);
+ expect(badges.at(i).text()).toContain(itemsCount[status]);
+ });
+ });
+ });
+
+ describe('Pagination', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: {
+ items: mockItems,
+ itemsCount,
+ statusTabs: ITEMS_STATUS_TABS,
+ pageInfo: { hasNextPage: true },
+ },
+ });
+ });
+
+ it('should render pagination', () => {
+ expect(wrapper.find(GlPagination).exists()).toBe(true);
+ });
+
+ describe('prevPage', () => {
+ it('returns prevPage button', async () => {
+ findPagination().vm.$emit('input', 3);
+
+ await wrapper.vm.$nextTick();
+ expect(
+ findPagination()
+ .findAll('.page-item')
+ .at(0)
+ .text(),
+ ).toBe('Prev');
+ });
+
+ it('returns prevPage number', async () => {
+ findPagination().vm.$emit('input', 3);
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.previousPage).toBe(2);
+ });
+
+ it('returns 0 when it is the first page', async () => {
+ findPagination().vm.$emit('input', 1);
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.previousPage).toBe(0);
+ });
+ });
+
+ describe('nextPage', () => {
+ it('returns nextPage button', async () => {
+ findPagination().vm.$emit('input', 3);
+
+ await wrapper.vm.$nextTick();
+ expect(
+ findPagination()
+ .findAll('.page-item')
+ .at(1)
+ .text(),
+ ).toBe('Next');
+ });
+
+ it('returns nextPage number', async () => {
+ mountComponent({
+ props: {
+ items: mockItems,
+ itemsCount,
+ statusTabs: ITEMS_STATUS_TABS,
+ pageInfo: { hasNextPage: true },
+ },
+ });
+ findPagination().vm.$emit('input', 1);
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.nextPage).toBe(2);
+ });
+
+ it('returns `null` when currentPage is already last page', async () => {
+ findStatusTabs().vm.$emit('input', 1);
+ findPagination().vm.$emit('input', 1);
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.nextPage).toBeNull();
+ });
+ });
+ });
+
+ describe('Filtered search component', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: {
+ items: mockItems,
+ itemsCount,
+ statusTabs: ITEMS_STATUS_TABS,
+ filterSearchKey: 'items',
+ },
+ });
+ });
+
+ it('renders the search component for incidents', () => {
+ expect(Filters().props('searchInputPlaceholder')).toBe('Search or filter results…');
+ expect(Filters().props('tokens')).toEqual([
+ {
+ type: 'author_username',
+ icon: 'user',
+ title: 'Author',
+ unique: true,
+ symbol: '@',
+ token: AuthorToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchPath: '/link',
+ fetchAuthors: expect.any(Function),
+ },
+ {
+ type: 'assignee_username',
+ icon: 'user',
+ title: 'Assignee',
+ unique: true,
+ symbol: '@',
+ token: AuthorToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchPath: '/link',
+ fetchAuthors: expect.any(Function),
+ },
+ ]);
+ expect(Filters().props('recentSearchesStorageKey')).toBe('items');
+ });
+
+ it('returns correctly applied filter search values', async () => {
+ const searchTerm = 'foo';
+ wrapper.setData({
+ searchTerm,
+ });
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.filteredSearchValue).toEqual([searchTerm]);
+ });
+
+ it('updates props tied to getIncidents GraphQL query', () => {
+ wrapper.vm.handleFilterItems(mockFilters);
+
+ expect(wrapper.vm.authorUsername).toBe('root');
+ expect(wrapper.vm.assigneeUsername).toEqual('root2');
+ expect(wrapper.vm.searchTerm).toBe(mockFilters[2].value.data);
+ });
+
+ it('updates props `searchTerm` and `authorUsername` with empty values when passed filters param is empty', () => {
+ wrapper.setData({
+ authorUsername: 'foo',
+ searchTerm: 'bar',
+ });
+
+ wrapper.vm.handleFilterItems([]);
+
+ expect(wrapper.vm.authorUsername).toBe('');
+ expect(wrapper.vm.searchTerm).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap b/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
index 16094a42668..ecea151fc8a 100644
--- a/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
+++ b/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
@@ -38,7 +38,8 @@ exports[`Package code instruction single line to match the default snapshot 1`]
data-testid="instruction-button"
>
<button
- class="btn input-group-text btn-secondary btn-md btn-default"
+ aria-label="Copy this value"
+ class="btn input-group-text btn-default btn-md gl-button btn-default-secondary btn-icon"
data-clipboard-text="npm i @my-package"
title="Copy npm install command"
type="button"
@@ -46,13 +47,15 @@ exports[`Package code instruction single line to match the default snapshot 1`]
<!---->
<svg
- class="gl-icon s16"
+ class="gl-button-icon gl-icon s16"
data-testid="copy-to-clipboard-icon"
>
<use
href="#copy-to-clipboard"
/>
</svg>
+
+ <!---->
</button>
</span>
</div>
diff --git a/spec/frontend/vue_shared/components/registry/list_item_spec.js b/spec/frontend/vue_shared/components/registry/list_item_spec.js
index e2cfdedb4bf..2a48bf4f2d6 100644
--- a/spec/frontend/vue_shared/components/registry/list_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/list_item_spec.js
@@ -58,9 +58,9 @@ describe('list item', () => {
describe.each`
slotNames
- ${['details_foo']}
- ${['details_foo', 'details_bar']}
- ${['details_foo', 'details_bar', 'details_baz']}
+ ${['details-foo']}
+ ${['details-foo', 'details-bar']}
+ ${['details-foo', 'details-bar', 'details-baz']}
`('$slotNames details slots', ({ slotNames }) => {
const slotMocks = slotNames.reduce((acc, current) => {
acc[current] = `<div data-testid="${current}" />`;
@@ -89,7 +89,7 @@ describe('list item', () => {
describe('details toggle button', () => {
it('is visible when at least one details slot exists', async () => {
- mountComponent({}, { details_foo: '<span></span>' });
+ mountComponent({}, { 'details-foo': '<span></span>' });
await wrapper.vm.$nextTick();
expect(findToggleDetailsButton().exists()).toBe(true);
});
diff --git a/spec/frontend/vue_shared/components/registry/title_area_spec.js b/spec/frontend/vue_shared/components/registry/title_area_spec.js
index 6740d6097a4..5cb606b58d9 100644
--- a/spec/frontend/vue_shared/components/registry/title_area_spec.js
+++ b/spec/frontend/vue_shared/components/registry/title_area_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatar } from '@gitlab/ui';
+import { GlAvatar, GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import component from '~/vue_shared/components/registry/title_area.vue';
@@ -10,10 +10,12 @@ describe('title area', () => {
const findMetadataSlot = name => wrapper.find(`[data-testid="${name}"]`);
const findTitle = () => wrapper.find('[data-testid="title"]');
const findAvatar = () => wrapper.find(GlAvatar);
+ const findInfoMessages = () => wrapper.findAll('[data-testid="info-message"]');
const mountComponent = ({ propsData = { title: 'foo' }, slots } = {}) => {
wrapper = shallowMount(component, {
propsData,
+ stubs: { GlSprintf },
slots: {
'sub-header': '<div data-testid="sub-header" />',
'right-actions': '<div data-testid="right-actions" />',
@@ -77,9 +79,9 @@ describe('title area', () => {
describe.each`
slotNames
- ${['metadata_foo']}
- ${['metadata_foo', 'metadata_bar']}
- ${['metadata_foo', 'metadata_bar', 'metadata_baz']}
+ ${['metadata-foo']}
+ ${['metadata-foo', 'metadata-bar']}
+ ${['metadata-foo', 'metadata-bar', 'metadata-baz']}
`('$slotNames metadata slots', ({ slotNames }) => {
const slotMocks = slotNames.reduce((acc, current) => {
acc[current] = `<div data-testid="${current}" />`;
@@ -95,4 +97,33 @@ describe('title area', () => {
});
});
});
+
+ describe('info-messages', () => {
+ it('shows a message when the props contains one', () => {
+ mountComponent({ propsData: { infoMessages: [{ text: 'foo foo bar bar' }] } });
+
+ const messages = findInfoMessages();
+ expect(messages).toHaveLength(1);
+ expect(messages.at(0).text()).toBe('foo foo bar bar');
+ });
+
+ it('shows a link when the props contains one', () => {
+ mountComponent({
+ propsData: {
+ infoMessages: [{ text: 'foo %{docLinkStart}link%{docLinkEnd}', link: 'bar' }],
+ },
+ });
+
+ const message = findInfoMessages().at(0);
+
+ expect(message.find(GlLink).attributes('href')).toBe('bar');
+ expect(message.text()).toBe('foo link');
+ });
+
+ it('multiple messages generates multiple spans', () => {
+ mountComponent({ propsData: { infoMessages: [{ text: 'foo' }, { text: 'bar' }] } });
+
+ expect(findInfoMessages()).toHaveLength(2);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
index 16f60b5ff21..0f2f263a776 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
@@ -4,24 +4,37 @@ import {
removeCustomEventListener,
registerHTMLToMarkdownRenderer,
addImage,
+ insertVideo,
getMarkdown,
getEditorOptions,
} from '~/vue_shared/components/rich_content_editor/services/editor_service';
import buildHTMLToMarkdownRenderer from '~/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer';
import buildCustomRenderer from '~/vue_shared/components/rich_content_editor/services/build_custom_renderer';
+import sanitizeHTML from '~/vue_shared/components/rich_content_editor/services/sanitize_html';
jest.mock('~/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer');
jest.mock('~/vue_shared/components/rich_content_editor/services/build_custom_renderer');
+jest.mock('~/vue_shared/components/rich_content_editor/services/sanitize_html');
describe('Editor Service', () => {
let mockInstance;
let event;
let handler;
+ const parseHtml = str => {
+ const wrapper = document.createElement('div');
+ wrapper.innerHTML = str;
+ return wrapper.firstChild;
+ };
beforeEach(() => {
mockInstance = {
eventManager: { addEventType: jest.fn(), removeEventHandler: jest.fn(), listen: jest.fn() },
- editor: { exec: jest.fn() },
+ editor: {
+ exec: jest.fn(),
+ isWysiwygMode: jest.fn(),
+ getSquire: jest.fn(),
+ insertText: jest.fn(),
+ },
invoke: jest.fn(),
toMarkOptions: {
renderer: {
@@ -87,6 +100,38 @@ describe('Editor Service', () => {
});
});
+ describe('insertVideo', () => {
+ const mockUrl = 'some/url';
+ const htmlString = `<figure contenteditable="false" class="gl-relative gl-h-0 video_container"><iframe class="gl-absolute gl-top-0 gl-left-0 gl-w-full gl-h-full" width="560" height="315" frameborder="0" src="some/url"></iframe></figure>`;
+ const mockInsertElement = jest.fn();
+
+ beforeEach(() =>
+ mockInstance.editor.getSquire.mockReturnValue({ insertElement: mockInsertElement }),
+ );
+
+ describe('WYSIWYG mode', () => {
+ it('calls the insertElement method on the squire instance with an iFrame element', () => {
+ mockInstance.editor.isWysiwygMode.mockReturnValue(true);
+
+ insertVideo(mockInstance, mockUrl);
+
+ expect(mockInstance.editor.getSquire().insertElement).toHaveBeenCalledWith(
+ parseHtml(htmlString),
+ );
+ });
+ });
+
+ describe('Markdown mode', () => {
+ it('calls the insertText method on the editor instance with the iFrame element HTML', () => {
+ mockInstance.editor.isWysiwygMode.mockReturnValue(false);
+
+ insertVideo(mockInstance, mockUrl);
+
+ expect(mockInstance.editor.insertText).toHaveBeenCalledWith(htmlString);
+ });
+ });
+ });
+
describe('getMarkdown', () => {
it('calls the invoke method on the instance', () => {
getMarkdown(mockInstance);
@@ -143,5 +188,14 @@ describe('Editor Service', () => {
getEditorOptions(externalOptions);
expect(buildCustomRenderer).toHaveBeenCalledWith(externalOptions.customRenderers);
});
+
+ it('uses the internal sanitizeHTML service for HTML sanitization', () => {
+ const options = getEditorOptions();
+ const html = '<div></div>';
+
+ options.customHTMLSanitizer(html);
+
+ expect(sanitizeHTML).toHaveBeenCalledWith(html);
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js
new file mode 100644
index 00000000000..be3a4030b1d
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_content_editor/modals/insert_video_modal_spec.js
@@ -0,0 +1,44 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import InsertVideoModal from '~/vue_shared/components/rich_content_editor/modals/insert_video_modal.vue';
+
+describe('Insert Video Modal', () => {
+ let wrapper;
+
+ const findModal = () => wrapper.find(GlModal);
+ const findUrlInput = () => wrapper.find({ ref: 'urlInput' });
+
+ const triggerInsertVideo = url => {
+ const preventDefault = jest.fn();
+ findUrlInput().vm.$emit('input', url);
+ findModal().vm.$emit('primary', { preventDefault });
+ };
+
+ beforeEach(() => {
+ wrapper = shallowMount(InsertVideoModal);
+ });
+
+ afterEach(() => wrapper.destroy());
+
+ describe('when content is loaded', () => {
+ it('renders a modal component', () => {
+ expect(findModal().exists()).toBe(true);
+ });
+
+ it('renders an input to add a URL', () => {
+ expect(findUrlInput().exists()).toBe(true);
+ });
+ });
+
+ describe('insert video', () => {
+ it.each`
+ url | emitted
+ ${'https://www.youtube.com/embed/someId'} | ${[['https://www.youtube.com/embed/someId']]}
+ ${'https://www.youtube.com/watch?v=1234'} | ${[['https://www.youtube.com/embed/1234']]}
+ ${'::youtube.com/invalid/url'} | ${undefined}
+ `('formats the url correctly', ({ url, emitted }) => {
+ triggerInsertVideo(url);
+ expect(wrapper.emitted('insertVideo')).toEqual(emitted);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
index 3d54db7fe5c..8c2c0413819 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import RichContentEditor from '~/vue_shared/components/rich_content_editor/rich_content_editor.vue';
import AddImageModal from '~/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue';
+import InsertVideoModal from '~/vue_shared/components/rich_content_editor/modals/insert_video_modal.vue';
import {
EDITOR_TYPES,
EDITOR_HEIGHT,
@@ -12,6 +13,7 @@ import {
addCustomEventListener,
removeCustomEventListener,
addImage,
+ insertVideo,
registerHTMLToMarkdownRenderer,
getEditorOptions,
} from '~/vue_shared/components/rich_content_editor/services/editor_service';
@@ -21,6 +23,7 @@ jest.mock('~/vue_shared/components/rich_content_editor/services/editor_service',
addCustomEventListener: jest.fn(),
removeCustomEventListener: jest.fn(),
addImage: jest.fn(),
+ insertVideo: jest.fn(),
registerHTMLToMarkdownRenderer: jest.fn(),
getEditorOptions: jest.fn(),
}));
@@ -32,6 +35,7 @@ describe('Rich Content Editor', () => {
const imageRoot = 'path/to/root/';
const findEditor = () => wrapper.find({ ref: 'editor' });
const findAddImageModal = () => wrapper.find(AddImageModal);
+ const findInsertVideoModal = () => wrapper.find(InsertVideoModal);
const buildWrapper = () => {
wrapper = shallowMount(RichContentEditor, {
@@ -122,6 +126,14 @@ describe('Rich Content Editor', () => {
);
});
+ it('adds the CUSTOM_EVENTS.openInsertVideoModal custom event listener', () => {
+ expect(addCustomEventListener).toHaveBeenCalledWith(
+ wrapper.vm.editorApi,
+ CUSTOM_EVENTS.openInsertVideoModal,
+ wrapper.vm.onOpenInsertVideoModal,
+ );
+ });
+
it('registers HTML to markdown renderer', () => {
expect(registerHTMLToMarkdownRenderer).toHaveBeenCalledWith(wrapper.vm.editorApi);
});
@@ -141,6 +153,16 @@ describe('Rich Content Editor', () => {
wrapper.vm.onOpenAddImageModal,
);
});
+
+ it('removes the CUSTOM_EVENTS.openInsertVideoModal custom event listener', () => {
+ wrapper.vm.$destroy();
+
+ expect(removeCustomEventListener).toHaveBeenCalledWith(
+ wrapper.vm.editorApi,
+ CUSTOM_EVENTS.openInsertVideoModal,
+ wrapper.vm.onOpenInsertVideoModal,
+ );
+ });
});
describe('add image modal', () => {
@@ -161,4 +183,23 @@ describe('Rich Content Editor', () => {
expect(addImage).toHaveBeenCalledWith(mockInstance, mockImage);
});
});
+
+ describe('insert video modal', () => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
+ it('renders an insertVideoModal component', () => {
+ expect(findInsertVideoModal().exists()).toBe(true);
+ });
+
+ it('calls the onInsertVideo method when the insertVideo event is emitted', () => {
+ const mockUrl = 'https://www.youtube.com/embed/someId';
+ const mockInstance = { exec: jest.fn() };
+ wrapper.vm.$refs.editor = mockInstance;
+
+ findInsertVideoModal().vm.$emit('insertVideo', mockUrl);
+ expect(insertVideo).toHaveBeenCalledWith(mockInstance, mockUrl);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js
index a6c712eeb31..b31684a400e 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js
@@ -1,22 +1,21 @@
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_html_block';
import { buildUneditableHtmlAsTextTokens } from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
-import { normalTextNode } from './mock_data';
+describe('rich_content_editor/services/renderers/render_html_block', () => {
+ const htmlBlockNode = {
+ literal: '<div><h1>Heading</h1><p>Paragraph.</p></div>',
+ type: 'htmlBlock',
+ };
-const htmlBlockNode = {
- firstChild: null,
- literal: '<div><h1>Heading</h1><p>Paragraph.</p></div>',
- type: 'htmlBlock',
-};
-
-describe('Render HTML renderer', () => {
describe('canRender', () => {
- it('should return true when the argument is an html block', () => {
- expect(renderer.canRender(htmlBlockNode)).toBe(true);
- });
-
- it('should return false when the argument is not an html block', () => {
- expect(renderer.canRender(normalTextNode)).toBe(false);
+ it.each`
+ input | result
+ ${htmlBlockNode} | ${true}
+ ${{ literal: '<iframe></iframe>', type: 'htmlBlock' }} | ${true}
+ ${{ literal: '<iframe src="https://www.youtube.com"></iframe>', type: 'htmlBlock' }} | ${false}
+ ${{ literal: '<iframe></iframe>', type: 'text' }} | ${false}
+ `('returns $result when input=$input', ({ input, result }) => {
+ expect(renderer.canRender(input)).toBe(result);
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js
new file mode 100644
index 00000000000..f2182ef60d7
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js
@@ -0,0 +1,11 @@
+import sanitizeHTML from '~/vue_shared/components/rich_content_editor/services/sanitize_html';
+
+describe('rich_content_editor/services/sanitize_html', () => {
+ it.each`
+ input | result
+ ${'<iframe src="https://www.youtube.com"></iframe>'} | ${'<iframe src="https://www.youtube.com"></iframe>'}
+ ${'<iframe src="https://gitlab.com"></iframe>'} | ${''}
+ `('removes iframes if the iframe source origin is not allowed', ({ input, result }) => {
+ expect(sanitizeHTML(input)).toBe(result);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js b/spec/frontend/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js
index 31316a93ecd..240d6cb5a34 100644
--- a/spec/frontend/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/collapsed_calendar_icon_spec.js
@@ -18,7 +18,7 @@ describe('collapsedCalendarIcon', () => {
});
it('should hide calendar icon if showIcon', () => {
- expect(vm.$el.querySelector('.fa-calendar')).toBeNull();
+ expect(vm.$el.querySelector('[data-testid="calendar-icon"]')).toBeNull();
});
it('should render text', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js b/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
index 65255968bc7..08fc822577e 100644
--- a/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/collapsed_grouped_date_picker_spec.js
@@ -80,7 +80,7 @@ describe('collapsedGroupedDatePicker', () => {
it('should have tooltip as `Start and due date`', () => {
const icons = vm.$el.querySelectorAll('.sidebar-collapsed-icon');
- expect(icons[0].dataset.originalTitle).toBe('Start and due date');
+ expect(icons[0].title).toBe('Start and due date');
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 589be0ad7a4..a9350bc059d 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -69,6 +69,16 @@ describe('DropdownContentsLabelsView', () => {
expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
});
+ it('returns matching labels with fuzzy filtering', () => {
+ wrapper.setData({
+ searchKey: 'bg',
+ });
+
+ expect(wrapper.vm.visibleLabels.length).toBe(2);
+ expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
+ expect(wrapper.vm.visibleLabels[1].title).toBe('Boog');
+ });
+
it('returns all labels when `searchKey` is empty', () => {
wrapper.setData({
searchKey: '',
@@ -133,6 +143,19 @@ describe('DropdownContentsLabelsView', () => {
expect(wrapper.vm.currentHighlightItem).toBe(2);
});
+ it('resets the search text when the Enter key is pressed', () => {
+ wrapper.setData({
+ currentHighlightItem: 1,
+ searchKey: 'bug',
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: ENTER_KEY_CODE,
+ });
+
+ expect(wrapper.vm.searchKey).toBe('');
+ });
+
it('calls action `updateSelectedLabels` with currently highlighted label when Enter key is pressed', () => {
jest.spyOn(wrapper.vm, 'updateSelectedLabels').mockImplementation();
wrapper.setData({
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
index e1008d13fc2..9697d6c30f2 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
@@ -24,6 +24,13 @@ export const mockLabels = [
color: '#FF0000',
textColor: '#FFFFFF',
},
+ {
+ id: 29,
+ title: 'Boog',
+ description: 'Label for bugs',
+ color: '#FF0000',
+ textColor: '#FFFFFF',
+ },
];
export const mockConfig = {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
index bfb8e263d81..c742220ba8a 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -259,21 +259,6 @@ describe('LabelsSelect Actions', () => {
});
});
- describe('replaceSelectedLabels', () => {
- it('replaces `state.selectedLabels`', done => {
- const selectedLabels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
-
- testAction(
- actions.replaceSelectedLabels,
- selectedLabels,
- state,
- [{ type: types.REPLACE_SELECTED_LABELS, payload: selectedLabels }],
- [],
- done,
- );
- });
- });
-
describe('updateSelectedLabels', () => {
it('updates `state.labels` based on provided `labels` param', done => {
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
index 3414eec8a63..8081806e314 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/mutations_spec.js
@@ -152,19 +152,6 @@ describe('LabelsSelect Mutations', () => {
});
});
- describe(`${types.REPLACE_SELECTED_LABELS}`, () => {
- it('replaces `state.selectedLabels`', () => {
- const state = {
- selectedLabels: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }],
- };
- const newSelectedLabels = [{ id: 2 }, { id: 5 }];
-
- mutations[types.REPLACE_SELECTED_LABELS](state, newSelectedLabels);
-
- expect(state.selectedLabels).toEqual(newSelectedLabels);
- });
- });
-
describe(`${types.UPDATE_SELECTED_LABELS}`, () => {
const labels = [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }];
diff --git a/spec/frontend/vue_shared/components/sidebar/toggle_sidebar_spec.js b/spec/frontend/vue_shared/components/sidebar/toggle_sidebar_spec.js
index 4342f5e2105..f1c3e8a1ddc 100644
--- a/spec/frontend/vue_shared/components/sidebar/toggle_sidebar_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/toggle_sidebar_spec.js
@@ -11,15 +11,14 @@ describe('toggleSidebar', () => {
});
});
- it('should render << when collapsed', () => {
- expect(vm.$el.querySelector('.fa').classList.contains('fa-angle-double-left')).toEqual(true);
+ it('should render the "chevron-double-lg-left" icon when collapsed', () => {
+ expect(vm.$el.querySelector('[data-testid="chevron-double-lg-left-icon"]')).not.toBeNull();
});
- it('should render >> when collapsed', () => {
+ it('should render the "chevron-double-lg-right" icon when expanded', async () => {
vm.collapsed = false;
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.fa').classList.contains('fa-angle-double-right')).toEqual(true);
- });
+ await Vue.nextTick();
+ expect(vm.$el.querySelector('[data-testid="chevron-double-lg-right-icon"]')).not.toBeNull();
});
it('should emit toggle event when button clicked', () => {
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
index f3bd4c14717..e09bc073042 100644
--- a/spec/frontend/vue_shared/components/split_button_spec.js
+++ b/spec/frontend/vue_shared/components/split_button_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import SplitButton from '~/vue_shared/components/split_button.vue';
@@ -25,10 +25,10 @@ describe('SplitButton', () => {
});
};
- const findDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findDropdown = () => wrapper.find(GlDropdown);
const findDropdownItem = (index = 0) =>
findDropdown()
- .findAll(GlDeprecatedDropdownItem)
+ .findAll(GlDropdownItem)
.at(index);
const selectItem = index => {
findDropdownItem(index).vm.$emit('click');
diff --git a/spec/frontend/vue_shared/components/todo_button_spec.js b/spec/frontend/vue_shared/components/todo_button_spec.js
index 482b5de11f6..1f8a214d632 100644
--- a/spec/frontend/vue_shared/components/todo_button_spec.js
+++ b/spec/frontend/vue_shared/components/todo_button_spec.js
@@ -33,7 +33,7 @@ describe('Todo Button', () => {
it.each`
label | isTodo
${'Mark as done'} | ${true}
- ${'Add a To-Do'} | ${false}
+ ${'Add a To Do'} | ${false}
`('sets correct label when isTodo is $isTodo', ({ label, isTodo }) => {
createComponent({ isTodo });
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index b43bb6b10e0..c208d7b0226 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -21,6 +21,9 @@ describe('User Popover Component', () => {
let wrapper;
beforeEach(() => {
+ window.gon.features = {
+ securityAutoFix: true,
+ };
loadFixtures(fixtureTemplate);
});
@@ -28,6 +31,7 @@ describe('User Popover Component', () => {
wrapper.destroy();
});
+ const findByTestId = testid => wrapper.find(`[data-testid="${testid}"]`);
const findUserStatus = () => wrapper.find('.js-user-status');
const findTarget = () => document.querySelector('.js-user-link');
@@ -196,4 +200,30 @@ describe('User Popover Component', () => {
expect(findUserStatus().exists()).toBe(false);
});
});
+
+ describe('security bot', () => {
+ const SECURITY_BOT_USER = {
+ ...DEFAULT_PROPS.user,
+ name: 'GitLab Security Bot',
+ username: 'GitLab-Security-Bot',
+ websiteUrl: '/security/bot/docs',
+ };
+ const findSecurityBotDocsLink = () => findByTestId('user-popover-bot-docs-link');
+
+ it("shows a link to the bot's documentation", () => {
+ createWrapper({ user: SECURITY_BOT_USER });
+ const securityBotDocsLink = findSecurityBotDocsLink();
+ expect(securityBotDocsLink.exists()).toBe(true);
+ expect(securityBotDocsLink.attributes('href')).toBe(SECURITY_BOT_USER.websiteUrl);
+ });
+
+ it('does not show the link if the feature flag is disabled', () => {
+ window.gon.features = {
+ securityAutoFix: false,
+ };
+ createWrapper({ user: SECURITY_BOT_USER });
+
+ expect(findSecurityBotDocsLink().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 57f511903d9..8ed072bed13 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -3,9 +3,27 @@ import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
+const TEST_EDIT_URL = '/gitlab-test/test/-/edit/master/';
const TEST_WEB_IDE_URL = '/-/ide/project/gitlab-test/test/edit/master/-/';
const TEST_GITPOD_URL = 'https://gitpod.test/';
+const ACTION_EDIT = {
+ href: TEST_EDIT_URL,
+ key: 'edit',
+ text: 'Edit',
+ secondaryText: 'Edit this file only.',
+ tooltip: '',
+ attrs: {
+ 'data-qa-selector': 'edit_button',
+ 'data-track-event': 'click_edit',
+ 'data-track-label': 'Edit',
+ },
+};
+const ACTION_EDIT_CONFIRM_FORK = {
+ ...ACTION_EDIT,
+ href: '#modal-confirm-fork-edit',
+ handle: expect.any(Function),
+};
const ACTION_WEB_IDE = {
href: TEST_WEB_IDE_URL,
key: 'webide',
@@ -14,13 +32,16 @@ const ACTION_WEB_IDE = {
text: 'Web IDE',
attrs: {
'data-qa-selector': 'web_ide_button',
+ 'data-track-event': 'click_edit_ide',
+ 'data-track-label': 'Web IDE',
},
};
-const ACTION_WEB_IDE_FORK = {
+const ACTION_WEB_IDE_CONFIRM_FORK = {
...ACTION_WEB_IDE,
- href: '#modal-confirm-fork',
+ href: '#modal-confirm-fork-webide',
handle: expect.any(Function),
};
+const ACTION_WEB_IDE_EDIT_FORK = { ...ACTION_WEB_IDE, text: 'Edit fork in Web IDE' };
const ACTION_GITPOD = {
href: TEST_GITPOD_URL,
key: 'gitpod',
@@ -43,6 +64,7 @@ describe('Web IDE link component', () => {
function createComponent(props) {
wrapper = shallowMount(WebIdeLink, {
propsData: {
+ editUrl: TEST_EDIT_URL,
webIdeUrl: TEST_WEB_IDE_URL,
gitpodUrl: TEST_GITPOD_URL,
...props,
@@ -57,14 +79,36 @@ describe('Web IDE link component', () => {
const findActionsButton = () => wrapper.find(ActionsButton);
const findLocalStorageSync = () => wrapper.find(LocalStorageSync);
- it.each`
- props | expectedActions
- ${{}} | ${[ACTION_WEB_IDE]}
- ${{ needsToFork: true }} | ${[ACTION_WEB_IDE_FORK]}
- ${{ showWebIdeButton: false, showGitpodButton: true, gitpodEnabled: true }} | ${[ACTION_GITPOD]}
- ${{ showWebIdeButton: false, showGitpodButton: true, gitpodEnabled: false }} | ${[ACTION_GITPOD_ENABLE]}
- ${{ showGitpodButton: true, gitpodEnabled: false }} | ${[ACTION_WEB_IDE, ACTION_GITPOD_ENABLE]}
- `('renders actions with props=$props', ({ props, expectedActions }) => {
+ it.each([
+ {
+ props: {},
+ expectedActions: [ACTION_WEB_IDE, ACTION_EDIT],
+ },
+ {
+ props: { isFork: true },
+ expectedActions: [ACTION_WEB_IDE_EDIT_FORK, ACTION_EDIT],
+ },
+ {
+ props: { needsToFork: true },
+ expectedActions: [ACTION_WEB_IDE_CONFIRM_FORK, ACTION_EDIT_CONFIRM_FORK],
+ },
+ {
+ props: { showWebIdeButton: false, showGitpodButton: true, gitpodEnabled: true },
+ expectedActions: [ACTION_EDIT, ACTION_GITPOD],
+ },
+ {
+ props: { showWebIdeButton: false, showGitpodButton: true, gitpodEnabled: false },
+ expectedActions: [ACTION_EDIT, ACTION_GITPOD_ENABLE],
+ },
+ {
+ props: { showGitpodButton: true, gitpodEnabled: false },
+ expectedActions: [ACTION_WEB_IDE, ACTION_EDIT, ACTION_GITPOD_ENABLE],
+ },
+ {
+ props: { showEditButton: false },
+ expectedActions: [ACTION_WEB_IDE],
+ },
+ ])('renders actions with appropriately for given props', ({ props, expectedActions }) => {
createComponent(props);
expect(findActionsButton().props('actions')).toEqual(expectedActions);
@@ -72,7 +116,12 @@ describe('Web IDE link component', () => {
describe('with multiple actions', () => {
beforeEach(() => {
- createComponent({ showWebIdeButton: true, showGitpodButton: true, gitpodEnabled: true });
+ createComponent({
+ showEditButton: false,
+ showWebIdeButton: true,
+ showGitpodButton: true,
+ gitpodEnabled: true,
+ });
});
it('selected Web IDE by default', () => {
diff --git a/spec/frontend/vue_shared/directives/tooltip_spec.js b/spec/frontend/vue_shared/directives/tooltip_spec.js
index 9d3dd3c5f75..4217b8d3c02 100644
--- a/spec/frontend/vue_shared/directives/tooltip_spec.js
+++ b/spec/frontend/vue_shared/directives/tooltip_spec.js
@@ -1,42 +1,59 @@
import $ from 'jquery';
+import { escape } from 'lodash';
import { mount } from '@vue/test-utils';
import tooltip from '~/vue_shared/directives/tooltip';
+const DEFAULT_TOOLTIP_TEMPLATE = '<div v-tooltip :title="tooltip"></div>';
+const HTML_TOOLTIP_TEMPLATE = '<div v-tooltip data-html="true" :title="tooltip"></div>';
+
describe('Tooltip directive', () => {
- let vm;
+ let wrapper;
+
+ function createTooltipContainer({
+ template = DEFAULT_TOOLTIP_TEMPLATE,
+ text = 'some text',
+ } = {}) {
+ wrapper = mount(
+ {
+ directives: { tooltip },
+ data: () => ({ tooltip: text }),
+ template,
+ },
+ { attachToDocument: true },
+ );
+ }
+
+ async function showTooltip() {
+ $(wrapper.vm.$el).tooltip('show');
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ }
+
+ function findTooltipInnerHtml() {
+ return document.querySelector('.tooltip-inner').innerHTML;
+ }
+
+ function findTooltipHtml() {
+ return document.querySelector('.tooltip').innerHTML;
+ }
afterEach(() => {
- if (vm) {
- vm.$destroy();
- }
+ wrapper.destroy();
+ wrapper = null;
});
describe('with a single tooltip', () => {
- beforeEach(() => {
- const wrapper = mount(
- {
- directives: {
- tooltip,
- },
- data() {
- return {
- tooltip: 'some text',
- };
- },
- template: '<div v-tooltip :title="tooltip"></div>',
- },
- { attachToDocument: true },
- );
-
- vm = wrapper.vm;
- });
-
it('should have tooltip plugin applied', () => {
- expect($(vm.$el).data('bs.tooltip')).toBeDefined();
+ createTooltipContainer();
+
+ expect($(wrapper.vm.$el).data('bs.tooltip')).toBeDefined();
});
it('displays the title as tooltip', () => {
- $(vm.$el).tooltip('show');
+ createTooltipContainer();
+
+ $(wrapper.vm.$el).tooltip('show');
+
jest.runOnlyPendingTimers();
const tooltipElement = document.querySelector('.tooltip-inner');
@@ -44,52 +61,98 @@ describe('Tooltip directive', () => {
expect(tooltipElement.textContent).toContain('some text');
});
- it('updates a visible tooltip', () => {
- $(vm.$el).tooltip('show');
+ it.each`
+ condition | template | sanitize
+ ${'does not contain any html'} | ${DEFAULT_TOOLTIP_TEMPLATE} | ${false}
+ ${'contains html'} | ${HTML_TOOLTIP_TEMPLATE} | ${true}
+ `('passes sanitize=$sanitize if the tooltip $condition', ({ template, sanitize }) => {
+ createTooltipContainer({ template });
+
+ expect($(wrapper.vm.$el).data('bs.tooltip').config.sanitize).toEqual(sanitize);
+ });
+
+ it('updates a visible tooltip', async () => {
+ createTooltipContainer();
+
+ $(wrapper.vm.$el).tooltip('show');
jest.runOnlyPendingTimers();
const tooltipElement = document.querySelector('.tooltip-inner');
- vm.tooltip = 'other text';
+ wrapper.vm.tooltip = 'other text';
jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ expect(tooltipElement.textContent).toContain('other text');
+ });
+
+ describe('tooltip sanitization', () => {
+ it('reads tooltip content as text if data-html is not passed', async () => {
+ createTooltipContainer({ text: 'sample text<script>alert("XSS!!")</script>' });
- return vm.$nextTick().then(() => {
- expect(tooltipElement.textContent).toContain('other text');
+ await showTooltip();
+
+ const result = findTooltipInnerHtml();
+ expect(result).toEqual('sample text&lt;script&gt;alert("XSS!!")&lt;/script&gt;');
+ });
+
+ it('sanitizes tooltip if data-html is passed', async () => {
+ createTooltipContainer({
+ template: HTML_TOOLTIP_TEMPLATE,
+ text: 'sample text<script>alert("XSS!!")</script>',
+ });
+
+ await showTooltip();
+
+ const result = findTooltipInnerHtml();
+ expect(result).toEqual('sample text');
+ expect(result).not.toContain('XSS!!');
+ });
+
+ it('sanitizes tooltip if data-template is passed', async () => {
+ const tooltipTemplate = escape(
+ '<div class="tooltip" role="tooltip"><div onclick="alert(\'XSS!\')" class="arrow"></div><div class="tooltip-inner"></div></div>',
+ );
+
+ createTooltipContainer({
+ template: `<div v-tooltip :title="tooltip" data-html="false" data-template="${tooltipTemplate}"></div>`,
+ });
+
+ await showTooltip();
+
+ const result = findTooltipHtml();
+ expect(result).toEqual(
+ // objectionable element is removed
+ '<div class="arrow"></div><div class="tooltip-inner">some text</div>',
+ );
+ expect(result).not.toContain('XSS!!');
});
});
});
describe('with multiple tooltips', () => {
beforeEach(() => {
- const wrapper = mount(
- {
- directives: {
- tooltip,
- },
- template: `
- <div>
- <div
- v-tooltip
- class="js-look-for-tooltip"
- title="foo">
- </div>
- <div
- v-tooltip
- title="bar">
- </div>
+ createTooltipContainer({
+ template: `
+ <div>
+ <div
+ v-tooltip
+ class="js-look-for-tooltip"
+ title="foo">
</div>
- `,
- },
- { attachToDocument: true },
- );
-
- vm = wrapper.vm;
+ <div
+ v-tooltip
+ title="bar">
+ </div>
+ </div>
+ `,
+ });
});
it('should have tooltip plugin applied to all instances', () => {
expect(
- $(vm.$el)
+ $(wrapper.vm.$el)
.find('.js-look-for-tooltip')
.data('bs.tooltip'),
).toBeDefined();
diff --git a/spec/frontend/vue_shared/droplab_dropdown_button_spec.js b/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
deleted file mode 100644
index e57c730ecee..00000000000
--- a/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
+++ /dev/null
@@ -1,132 +0,0 @@
-import { mount } from '@vue/test-utils';
-
-import DroplabDropdownButton from '~/vue_shared/components/droplab_dropdown_button.vue';
-
-const mockActions = [
- {
- title: 'Foo',
- description: 'Some foo action',
- },
- {
- title: 'Bar',
- description: 'Some bar action',
- },
-];
-
-const createComponent = ({
- size = '',
- dropdownClass = '',
- actions = mockActions,
- defaultAction = 0,
-}) =>
- mount(DroplabDropdownButton, {
- propsData: {
- size,
- dropdownClass,
- actions,
- defaultAction,
- },
- });
-
-describe('DroplabDropdownButton', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = createComponent({});
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('data', () => {
- it('contains `selectedAction` representing value of `defaultAction` prop', () => {
- expect(wrapper.vm.selectedAction).toBe(0);
- });
- });
-
- describe('computed', () => {
- describe('selectedActionTitle', () => {
- it('returns string containing title of selected action', () => {
- wrapper.setData({ selectedAction: 0 });
-
- expect(wrapper.vm.selectedActionTitle).toBe(mockActions[0].title);
-
- wrapper.setData({ selectedAction: 1 });
-
- expect(wrapper.vm.selectedActionTitle).toBe(mockActions[1].title);
- });
- });
-
- describe('buttonSizeClass', () => {
- it('returns string containing button sizing class based on `size` prop', done => {
- const wrapperWithSize = createComponent({
- size: 'sm',
- });
-
- wrapperWithSize.vm.$nextTick(() => {
- expect(wrapperWithSize.vm.buttonSizeClass).toBe('btn-sm');
-
- done();
- wrapperWithSize.destroy();
- });
- });
- });
- });
-
- describe('methods', () => {
- describe('handlePrimaryActionClick', () => {
- it('emits `onActionClick` event on component with selectedAction object as param', () => {
- jest.spyOn(wrapper.vm, '$emit');
-
- wrapper.setData({ selectedAction: 0 });
- wrapper.vm.handlePrimaryActionClick();
-
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('onActionClick', mockActions[0]);
- });
- });
-
- describe('handleActionClick', () => {
- it('emits `onActionSelect` event on component with selectedAction index as param', () => {
- jest.spyOn(wrapper.vm, '$emit');
-
- wrapper.vm.handleActionClick(1);
-
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('onActionSelect', 1);
- });
- });
- });
-
- describe('template', () => {
- it('renders default action button', () => {
- const defaultButton = wrapper.findAll('.btn').at(0);
-
- expect(defaultButton.text()).toBe(mockActions[0].title);
- });
-
- it('renders dropdown button', () => {
- const dropdownButton = wrapper.findAll('.dropdown-toggle').at(0);
-
- expect(dropdownButton.isVisible()).toBe(true);
- });
-
- it('renders dropdown actions', () => {
- const dropdownActions = wrapper.findAll('.dropdown-menu li button');
-
- Array(dropdownActions.length)
- .fill()
- .forEach((_, index) => {
- const actionContent = dropdownActions.at(index).find('.description');
-
- expect(actionContent.find('strong').text()).toBe(mockActions[index].title);
- expect(actionContent.find('p').text()).toBe(mockActions[index].description);
- });
- });
-
- it('renders divider between dropdown actions', () => {
- const dropdownDivider = wrapper.find('.dropdown-menu .divider');
-
- expect(dropdownDivider.isVisible()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
new file mode 100644
index 00000000000..31bdfc931ac
--- /dev/null
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -0,0 +1,118 @@
+import { mount } from '@vue/test-utils';
+import Api from '~/api';
+import Flash from '~/flash';
+import SecurityReportsApp from '~/vue_shared/security_reports/security_reports_app.vue';
+
+jest.mock('~/flash');
+
+describe('Grouped security reports app', () => {
+ let wrapper;
+ let mrTabsMock;
+
+ const props = {
+ pipelineId: 123,
+ projectId: 456,
+ securityReportsDocsPath: '/docs',
+ };
+
+ const createComponent = () => {
+ wrapper = mount(SecurityReportsApp, {
+ propsData: { ...props },
+ });
+ };
+
+ const findPipelinesTabAnchor = () => wrapper.find('[data-testid="show-pipelines"]');
+ const findHelpLink = () => wrapper.find('[data-testid="help"]');
+ const setupMrTabsMock = () => {
+ mrTabsMock = { tabShown: jest.fn() };
+ window.mrTabs = mrTabsMock;
+ };
+ const setupMockJobArtifact = reportType => {
+ jest
+ .spyOn(Api, 'pipelineJobs')
+ .mockResolvedValue({ data: [{ artifacts: [{ file_type: reportType }] }] });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ delete window.mrTabs;
+ });
+
+ describe.each(SecurityReportsApp.reportTypes)('given a report type %p', reportType => {
+ beforeEach(() => {
+ window.mrTabs = { tabShown: jest.fn() };
+ setupMockJobArtifact(reportType);
+ createComponent();
+ });
+
+ it('calls the pipelineJobs API correctly', () => {
+ expect(Api.pipelineJobs).toHaveBeenCalledWith(props.projectId, props.pipelineId);
+ });
+
+ it('renders the expected message', () => {
+ expect(wrapper.text()).toMatchInterpolatedText(SecurityReportsApp.i18n.scansHaveRun);
+ });
+
+ describe('clicking the anchor to the pipelines tab', () => {
+ beforeEach(() => {
+ setupMrTabsMock();
+ findPipelinesTabAnchor().trigger('click');
+ });
+
+ it('calls the mrTabs.tabShown global', () => {
+ expect(mrTabsMock.tabShown.mock.calls).toEqual([['pipelines']]);
+ });
+ });
+
+ it('renders a help link', () => {
+ expect(findHelpLink().attributes()).toMatchObject({
+ href: props.securityReportsDocsPath,
+ });
+ });
+ });
+
+ describe('given a report type "foo"', () => {
+ beforeEach(() => {
+ setupMockJobArtifact('foo');
+ createComponent();
+ });
+
+ it('calls the pipelineJobs API correctly', () => {
+ expect(Api.pipelineJobs).toHaveBeenCalledWith(props.projectId, props.pipelineId);
+ });
+
+ it('renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+ });
+
+ describe('given an error from the API', () => {
+ let error;
+
+ beforeEach(() => {
+ error = new Error('an error');
+ jest.spyOn(Api, 'pipelineJobs').mockRejectedValue(error);
+ createComponent();
+ });
+
+ it('calls the pipelineJobs API correctly', () => {
+ expect(Api.pipelineJobs).toHaveBeenCalledWith(props.projectId, props.pipelineId);
+ });
+
+ it('renders nothing', () => {
+ expect(wrapper.html()).toBe('');
+ });
+
+ it('calls Flash correctly', () => {
+ expect(Flash.mock.calls).toEqual([
+ [
+ {
+ message: SecurityReportsApp.i18n.apiError,
+ captureError: true,
+ error,
+ },
+ ],
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/vuex_shared/modules/members/actions_spec.js b/spec/frontend/vuex_shared/modules/members/actions_spec.js
new file mode 100644
index 00000000000..833bd4cc175
--- /dev/null
+++ b/spec/frontend/vuex_shared/modules/members/actions_spec.js
@@ -0,0 +1,110 @@
+import { noop } from 'lodash';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import { members, group } from 'jest/vue_shared/components/members/mock_data';
+import testAction from 'helpers/vuex_action_helper';
+import httpStatusCodes from '~/lib/utils/http_status';
+import * as types from '~/vuex_shared/modules/members/mutation_types';
+import {
+ updateMemberRole,
+ showRemoveGroupLinkModal,
+ hideRemoveGroupLinkModal,
+} from '~/vuex_shared/modules/members/actions';
+
+describe('Vuex members actions', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('updateMemberRole', () => {
+ const memberId = members[0].id;
+ const accessLevel = { integerValue: 30, stringValue: 'Developer' };
+
+ const payload = {
+ memberId,
+ accessLevel,
+ };
+ const state = {
+ members,
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ requestFormatter: noop,
+ removeGroupLinkModalVisible: false,
+ groupLinkToRemove: null,
+ };
+
+ describe('successful request', () => {
+ it(`commits ${types.RECEIVE_MEMBER_ROLE_SUCCESS} mutation`, async () => {
+ let requestPath;
+ mock.onPut().replyOnce(config => {
+ requestPath = config.url;
+ return [httpStatusCodes.OK, {}];
+ });
+
+ await testAction(updateMemberRole, payload, state, [
+ {
+ type: types.RECEIVE_MEMBER_ROLE_SUCCESS,
+ payload,
+ },
+ ]);
+
+ expect(requestPath).toBe('/groups/foo-bar/-/group_members/238');
+ });
+ });
+
+ describe('unsuccessful request', () => {
+ beforeEach(() => {
+ mock.onPut().replyOnce(httpStatusCodes.BAD_REQUEST, { message: 'Bad request' });
+ });
+
+ it(`commits ${types.RECEIVE_MEMBER_ROLE_ERROR} mutation`, async () => {
+ try {
+ await testAction(updateMemberRole, payload, state, [
+ {
+ type: types.RECEIVE_MEMBER_ROLE_SUCCESS,
+ },
+ ]);
+ } catch {
+ // Do nothing
+ }
+ });
+
+ it('throws error', async () => {
+ await expect(testAction(updateMemberRole, payload, state)).rejects.toThrowError();
+ });
+ });
+ });
+
+ describe('Group Link Modal', () => {
+ const state = {
+ removeGroupLinkModalVisible: false,
+ groupLinkToRemove: null,
+ };
+
+ describe('showRemoveGroupLinkModal', () => {
+ it(`commits ${types.SHOW_REMOVE_GROUP_LINK_MODAL} mutation`, () => {
+ testAction(showRemoveGroupLinkModal, group, state, [
+ {
+ type: types.SHOW_REMOVE_GROUP_LINK_MODAL,
+ payload: group,
+ },
+ ]);
+ });
+ });
+
+ describe('hideRemoveGroupLinkModal', () => {
+ it(`commits ${types.HIDE_REMOVE_GROUP_LINK_MODAL} mutation`, () => {
+ testAction(hideRemoveGroupLinkModal, group, state, [
+ {
+ type: types.HIDE_REMOVE_GROUP_LINK_MODAL,
+ },
+ ]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vuex_shared/modules/members/mutations_spec.js b/spec/frontend/vuex_shared/modules/members/mutations_spec.js
new file mode 100644
index 00000000000..7338b19cfc9
--- /dev/null
+++ b/spec/frontend/vuex_shared/modules/members/mutations_spec.js
@@ -0,0 +1,90 @@
+import { members, group } from 'jest/vue_shared/components/members/mock_data';
+import mutations from '~/vuex_shared/modules/members/mutations';
+import * as types from '~/vuex_shared/modules/members/mutation_types';
+
+describe('Vuex members mutations', () => {
+ describe(types.RECEIVE_MEMBER_ROLE_SUCCESS, () => {
+ it('updates member', () => {
+ const state = {
+ members,
+ };
+
+ const accessLevel = { integerValue: 30, stringValue: 'Developer' };
+
+ mutations[types.RECEIVE_MEMBER_ROLE_SUCCESS](state, {
+ memberId: members[0].id,
+ accessLevel,
+ });
+
+ expect(state.members[0].accessLevel).toEqual(accessLevel);
+ });
+ });
+
+ describe(types.RECEIVE_MEMBER_ROLE_ERROR, () => {
+ it('shows error message', () => {
+ const state = {
+ showError: false,
+ errorMessage: '',
+ };
+
+ mutations[types.RECEIVE_MEMBER_ROLE_ERROR](state);
+
+ expect(state.showError).toBe(true);
+ expect(state.errorMessage).toBe(
+ "An error occurred while updating the member's role, please try again.",
+ );
+ });
+ });
+
+ describe(types.HIDE_ERROR, () => {
+ it('sets `showError` to `false`', () => {
+ const state = {
+ showError: true,
+ errorMessage: 'foo bar',
+ };
+
+ mutations[types.HIDE_ERROR](state);
+
+ expect(state.showError).toBe(false);
+ });
+
+ it('sets `errorMessage` to an empty string', () => {
+ const state = {
+ showError: true,
+ errorMessage: 'foo bar',
+ };
+
+ mutations[types.HIDE_ERROR](state);
+
+ expect(state.errorMessage).toBe('');
+ });
+ });
+
+ describe(types.SHOW_REMOVE_GROUP_LINK_MODAL, () => {
+ it('sets `removeGroupLinkModalVisible` and `groupLinkToRemove`', () => {
+ const state = {
+ removeGroupLinkModalVisible: false,
+ groupLinkToRemove: null,
+ };
+
+ mutations[types.SHOW_REMOVE_GROUP_LINK_MODAL](state, group);
+
+ expect(state).toEqual({
+ removeGroupLinkModalVisible: true,
+ groupLinkToRemove: group,
+ });
+ });
+ });
+
+ describe(types.HIDE_REMOVE_GROUP_LINK_MODAL, () => {
+ it('sets `removeGroupLinkModalVisible` to `false`', () => {
+ const state = {
+ removeGroupLinkModalVisible: false,
+ };
+
+ mutations[types.HIDE_REMOVE_GROUP_LINK_MODAL](state);
+
+ expect(state.removeGroupLinkModalVisible).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vuex_shared/modules/members/utils_spec.js b/spec/frontend/vuex_shared/modules/members/utils_spec.js
new file mode 100644
index 00000000000..4fc3445dac0
--- /dev/null
+++ b/spec/frontend/vuex_shared/modules/members/utils_spec.js
@@ -0,0 +1,14 @@
+import { members } from 'jest/vue_shared/components/members/mock_data';
+import { findMember } from '~/vuex_shared/modules/members/utils';
+
+describe('Members Vuex utils', () => {
+ describe('findMember', () => {
+ it('finds member by ID', () => {
+ const state = {
+ members,
+ };
+
+ expect(findMember(state, members[0].id)).toEqual(members[0]);
+ });
+ });
+});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index 59d05f68fdd..77c2ae19d1f 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -1,26 +1,30 @@
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlDrawer } from '@gitlab/ui';
+import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import App from '~/whats_new/components/app.vue';
const localVue = createLocalVue();
localVue.use(Vuex);
describe('App', () => {
+ const propsData = { storageKey: 'storage-key' };
let wrapper;
let store;
let actions;
let state;
- let propsData = { features: '[ {"title":"Whats New Drawer"} ]' };
+ let trackingSpy;
const buildWrapper = () => {
actions = {
openDrawer: jest.fn(),
closeDrawer: jest.fn(),
+ fetchItems: jest.fn(),
};
state = {
open: true,
+ features: null,
};
store = new Vuex.Store({
@@ -35,12 +39,20 @@ describe('App', () => {
});
};
- beforeEach(() => {
+ beforeEach(async () => {
+ document.body.dataset.page = 'test-page';
+ document.body.dataset.namespaceId = 'namespace-840';
+
+ trackingSpy = mockTracking('_category_', null, jest.spyOn);
buildWrapper();
+
+ wrapper.vm.$store.state.features = [{ title: 'Whats New Drawer', url: 'www.url.com' }];
+ await wrapper.vm.$nextTick();
});
afterEach(() => {
wrapper.destroy();
+ unmockTracking();
});
const getDrawer = () => wrapper.find(GlDrawer);
@@ -50,7 +62,11 @@ describe('App', () => {
});
it('dispatches openDrawer when mounted', () => {
- expect(actions.openDrawer).toHaveBeenCalled();
+ expect(actions.openDrawer).toHaveBeenCalledWith(expect.any(Object), 'storage-key');
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_whats_new_drawer', {
+ label: 'namespace_id',
+ value: 'namespace-840',
+ });
});
it('dispatches closeDrawer when clicking close', () => {
@@ -66,14 +82,24 @@ describe('App', () => {
expect(getDrawer().props('open')).toBe(openState);
});
- it('renders features when provided as props', () => {
+ it('renders features when provided via ajax', () => {
+ expect(actions.fetchItems).toHaveBeenCalled();
expect(wrapper.find('h5').text()).toBe('Whats New Drawer');
});
- it('handles bad json argument gracefully', () => {
- propsData = { features: 'this is not json' };
- buildWrapper();
+ it('send an event when feature item is clicked', () => {
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
- expect(getDrawer().exists()).toBe(true);
+ const link = wrapper.find('[data-testid="whats-new-title-link"]');
+ triggerEvent(link.element);
+
+ expect(trackingSpy.mock.calls[1]).toMatchObject([
+ '_category_',
+ 'click_whats_new_item',
+ {
+ label: 'Whats New Drawer',
+ property: 'www.url.com',
+ },
+ ]);
});
});
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
index d95453c9175..95ab667d611 100644
--- a/spec/frontend/whats_new/store/actions_spec.js
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -1,11 +1,19 @@
import testAction from 'helpers/vuex_action_helper';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import actions from '~/whats_new/store/actions';
import * as types from '~/whats_new/store/mutation_types';
+import axios from '~/lib/utils/axios_utils';
describe('whats new actions', () => {
describe('openDrawer', () => {
+ useLocalStorageSpy();
+
it('should commit openDrawer', () => {
- testAction(actions.openDrawer, {}, {}, [{ type: types.OPEN_DRAWER }]);
+ testAction(actions.openDrawer, 'storage-key', {}, [{ type: types.OPEN_DRAWER }]);
+
+ expect(window.localStorage.setItem).toHaveBeenCalledWith('storage-key', 'false');
});
});
@@ -14,4 +22,27 @@ describe('whats new actions', () => {
testAction(actions.closeDrawer, {}, {}, [{ type: types.CLOSE_DRAWER }]);
});
});
+
+ describe('fetchItems', () => {
+ let axiosMock;
+
+ beforeEach(async () => {
+ axiosMock = new MockAdapter(axios);
+ axiosMock
+ .onGet('/-/whats_new')
+ .replyOnce(200, [{ title: 'Whats New Drawer', url: 'www.url.com' }]);
+
+ await waitForPromises();
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ });
+
+ it('should commit setFeatures', () => {
+ testAction(actions.fetchItems, {}, {}, [
+ { type: types.SET_FEATURES, payload: [{ title: 'Whats New Drawer', url: 'www.url.com' }] },
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/whats_new/store/mutations_spec.js b/spec/frontend/whats_new/store/mutations_spec.js
index 3c33364fed3..feaa1dd2a3b 100644
--- a/spec/frontend/whats_new/store/mutations_spec.js
+++ b/spec/frontend/whats_new/store/mutations_spec.js
@@ -22,4 +22,11 @@ describe('whats new mutations', () => {
expect(state.open).toBe(false);
});
});
+
+ describe('setFeatures', () => {
+ it('sets features to data', () => {
+ mutations[types.SET_FEATURES](state, 'bells and whistles');
+ expect(state.features).toBe('bells and whistles');
+ });
+ });
});
diff --git a/spec/frontend/wikis_spec.js b/spec/frontend/wikis_spec.js
index 3469be4da1c..cf1ea972697 100644
--- a/spec/frontend/wikis_spec.js
+++ b/spec/frontend/wikis_spec.js
@@ -146,7 +146,7 @@ describe('Wikis', () => {
expect(Tracking.event).toHaveBeenCalledWith(trackingPage, 'view_wiki_page', {
label: 'view_wiki_page',
context: {
- schema: 'iglu:com.gitlab/wiki_page_context/jsonschema/1-0-0',
+ schema: 'iglu:com.gitlab/wiki_page_context/jsonschema/1-0-1',
data: trackingContext,
},
});
diff --git a/spec/frontend_integration/.eslintrc.yml b/spec/frontend_integration/.eslintrc.yml
index 26b6f935ffb..2460e218f59 100644
--- a/spec/frontend_integration/.eslintrc.yml
+++ b/spec/frontend_integration/.eslintrc.yml
@@ -4,3 +4,5 @@ settings:
import/resolver:
jest:
jestConfigFile: 'jest.config.integration.js'
+globals:
+ mockServer: false
diff --git a/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap b/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
index 6beb5eab6db..6c120898f01 100644
--- a/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
+++ b/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
@@ -20,6 +20,7 @@ exports[`WebIDE runs 1`] = `
>
<div
class="multi-file-commit-panel-inner"
+ data-testid="ide-side-bar-inner"
>
<div
class="multi-file-loading-container"
diff --git a/spec/frontend_integration/ide/ide_helper.js b/spec/frontend_integration/ide/ide_helper.js
new file mode 100644
index 00000000000..a43695fea8f
--- /dev/null
+++ b/spec/frontend_integration/ide/ide_helper.js
@@ -0,0 +1,102 @@
+import { findAllByText, fireEvent, getByLabelText, screen } from '@testing-library/dom';
+
+const isFileRowOpen = row => row.matches('.is-open');
+
+const getLeftSidebar = () => screen.getByTestId('left-sidebar');
+
+const clickOnLeftSidebarTab = name => {
+ const sidebar = getLeftSidebar();
+
+ const button = getByLabelText(sidebar, name);
+
+ button.click();
+};
+
+const findMonacoEditor = () =>
+ screen.findByLabelText(/Editor content;/).then(x => x.closest('.monaco-editor'));
+
+const findAndSetEditorValue = async value => {
+ const editor = await findMonacoEditor();
+ const uri = editor.getAttribute('data-uri');
+
+ window.monaco.editor.getModel(uri).setValue(value);
+};
+
+const findTreeBody = () => screen.findByTestId('ide-tree-body', {}, { timeout: 5000 });
+
+const findFileRowContainer = (row = null) =>
+ row ? Promise.resolve(row.parentElement) : findTreeBody();
+
+const findFileChild = async (row, name, index = 0) => {
+ const container = await findFileRowContainer(row);
+ const children = await findAllByText(container, name, { selector: '.file-row-name' });
+
+ return children.map(x => x.closest('.file-row')).find(x => x.dataset.level === index.toString());
+};
+
+const openFileRow = row => {
+ if (!row || isFileRowOpen(row)) {
+ return;
+ }
+
+ row.click();
+};
+
+const findAndTraverseToPath = async (path, index = 0, row = null) => {
+ if (!path) {
+ return row;
+ }
+
+ const [name, ...restOfPath] = path.split('/');
+
+ openFileRow(row);
+
+ const child = await findFileChild(row, name, index);
+
+ return findAndTraverseToPath(restOfPath.join('/'), index + 1, child);
+};
+
+const clickFileRowAction = (row, name) => {
+ fireEvent.mouseOver(row);
+
+ const dropdownButton = getByLabelText(row, 'Create new file or directory');
+ dropdownButton.click();
+
+ const dropdownAction = getByLabelText(dropdownButton.parentNode, name);
+ dropdownAction.click();
+};
+
+const findAndSetFileName = async value => {
+ const nameField = await screen.findByTestId('file-name-field');
+ fireEvent.input(nameField, { target: { value } });
+
+ const createButton = screen.getByText('Create file');
+ createButton.click();
+};
+
+export const createFile = async (path, content) => {
+ const parentPath = path
+ .split('/')
+ .slice(0, -1)
+ .join('/');
+
+ const parentRow = await findAndTraverseToPath(parentPath);
+ clickFileRowAction(parentRow, 'New file');
+
+ await findAndSetFileName(path);
+ await findAndSetEditorValue(content);
+};
+
+export const deleteFile = async path => {
+ const row = await findAndTraverseToPath(path);
+ clickFileRowAction(row, 'Delete');
+};
+
+export const commit = async () => {
+ clickOnLeftSidebarTab('Commit');
+ screen.getByTestId('begin-commit-button').click();
+
+ await screen.findByLabelText(/Commit to .+ branch/).then(x => x.click());
+
+ screen.getByText('Commit').click();
+};
diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js
index 91d89c26ec1..c4d0c4df8de 100644
--- a/spec/frontend_integration/ide/ide_integration_spec.js
+++ b/spec/frontend_integration/ide/ide_integration_spec.js
@@ -1,17 +1,10 @@
-/**
- * WARNING: WIP
- *
- * Please do not copy from this spec or use it as an example for anything.
- *
- * This is in place to iteratively set up the frontend integration testing environment
- * and will be improved upon in a later iteration.
- *
- * See https://gitlab.com/gitlab-org/gitlab/-/issues/208800 for more information.
- */
import { TEST_HOST } from 'helpers/test_constants';
+import { waitForText } from 'helpers/wait_for_text';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
+import { createCommitId } from 'test_helpers/factories/commit_id';
import { initIde } from '~/ide';
import extendStore from '~/ide/stores/extend';
+import * as ideHelper from './ide_helper';
const TEST_DATASET = {
emptyStateSvgPath: '/test/empty_state.svg',
@@ -59,4 +52,38 @@ describe('WebIDE', () => {
expect(root).toMatchSnapshot();
});
+
+ it('user commits changes', async () => {
+ createComponent();
+
+ await ideHelper.createFile('foo/bar/test.txt', 'Lorem ipsum dolar sit');
+ await ideHelper.deleteFile('foo/bar/.gitkeep');
+ await ideHelper.commit();
+
+ const commitId = createCommitId(1);
+ const commitShortId = commitId.slice(0, 8);
+
+ await waitForText('All changes are committed');
+ await waitForText(commitShortId);
+
+ expect(mockServer.db.branches.findBy({ name: 'master' }).commit).toMatchObject({
+ short_id: commitShortId,
+ id: commitId,
+ message: 'Update foo/bar/test.txt\nDeleted foo/bar/.gitkeep',
+ __actions: [
+ {
+ action: 'create',
+ content: 'Lorem ipsum dolar sit\n',
+ encoding: 'text',
+ file_path: 'foo/bar/test.txt',
+ last_commit_id: '',
+ },
+ {
+ action: 'delete',
+ encoding: 'text',
+ file_path: 'foo/bar/.gitkeep',
+ },
+ ],
+ });
+ });
});
diff --git a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
index 343aeebf88e..43a21deed25 100644
--- a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
+++ b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
@@ -1,13 +1,12 @@
import { createMockServer } from '../mock_server';
beforeEach(() => {
+ if (global.mockServer) {
+ global.mockServer.shutdown();
+ }
+
const server = createMockServer();
server.logging = false;
global.mockServer = server;
});
-
-afterEach(() => {
- global.mockServer.shutdown();
- global.mockServer = null;
-});
diff --git a/spec/graphql/features/feature_flag_spec.rb b/spec/graphql/features/feature_flag_spec.rb
index b484663d675..9ebc6e595a6 100644
--- a/spec/graphql/features/feature_flag_spec.rb
+++ b/spec/graphql/features/feature_flag_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe 'Graphql Field feature flags' do
let(:query_string) { '{ item { name } }' }
let(:result) { execute_query(query_type)['data'] }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
subject { result }
describe 'Feature flagged field' do
diff --git a/spec/graphql/mutations/design_management/move_spec.rb b/spec/graphql/mutations/design_management/move_spec.rb
index 7519347d07c..d17483e69b3 100644
--- a/spec/graphql/mutations/design_management/move_spec.rb
+++ b/spec/graphql/mutations/design_management/move_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Mutations::DesignManagement::Move do
next_design: next_design&.to_global_id
}.compact
- mutation.resolve(args)
+ mutation.resolve(**args)
end
shared_examples "resource not available" do
diff --git a/spec/graphql/mutations/discussions/toggle_resolve_spec.rb b/spec/graphql/mutations/discussions/toggle_resolve_spec.rb
index d779a2227c1..2e5d41a8f1e 100644
--- a/spec/graphql/mutations/discussions/toggle_resolve_spec.rb
+++ b/spec/graphql/mutations/discussions/toggle_resolve_spec.rb
@@ -50,8 +50,8 @@ RSpec.describe Mutations::Discussions::ToggleResolve do
it 'raises an error' do
expect { subject }.to raise_error(
- Gitlab::Graphql::Errors::ArgumentError,
- "#{discussion.to_global_id} is not a valid ID for Discussion."
+ GraphQL::CoercionError,
+ "\"#{discussion.to_global_id}\" does not represent an instance of Discussion"
)
end
end
diff --git a/spec/graphql/mutations/issues/create_spec.rb b/spec/graphql/mutations/issues/create_spec.rb
new file mode 100644
index 00000000000..57658f6b358
--- /dev/null
+++ b/spec/graphql/mutations/issues/create_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Issues::Create do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:assignee1) { create(:user) }
+ let_it_be(:assignee2) { create(:user) }
+ let_it_be(:project_label1) { create(:label, project: project) }
+ let_it_be(:project_label2) { create(:label, project: project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:new_label1) { FFaker::Lorem.word }
+ let_it_be(:new_label2) { new_label1 + 'Extra' }
+
+ let(:expected_attributes) do
+ {
+ title: 'new title',
+ description: 'new description',
+ confidential: true,
+ due_date: Date.tomorrow,
+ discussion_locked: true
+ }
+ end
+
+ let(:mutation_params) do
+ {
+ project_path: project.full_path,
+ milestone_id: milestone.to_global_id,
+ labels: [project_label1.title, project_label2.title, new_label1, new_label2],
+ assignee_ids: [assignee1.to_global_id, assignee2.to_global_id]
+ }.merge(expected_attributes)
+ end
+
+ let(:special_params) do
+ {
+ iid: non_existing_record_id,
+ created_at: 2.days.ago
+ }
+ end
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+ let(:mutated_issue) { subject[:issue] }
+
+ specify { expect(described_class).to require_graphql_authorizations(:create_issue) }
+
+ describe '#resolve' do
+ before do
+ stub_licensed_features(multiple_issue_assignees: false, issue_weights: false)
+ project.add_guest(assignee1)
+ project.add_guest(assignee2)
+ end
+
+ subject { mutation.resolve(mutation_params) }
+
+ context 'when the user does not have permission to create an issue' do
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when the user can create an issue' do
+ context 'when creating an issue a developer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'creates issue with correct values' do
+ expect(mutated_issue).to have_attributes(expected_attributes)
+ expect(mutated_issue.milestone_id).to eq(milestone.id)
+ expect(mutated_issue.labels.pluck(:title)).to eq([project_label1.title, project_label2.title, new_label1, new_label2])
+ expect(mutated_issue.assignees.pluck(:id)).to eq([assignee1.id])
+ end
+
+ context 'when passing in label_ids' do
+ before do
+ mutation_params.delete(:labels)
+ mutation_params.merge!(label_ids: [project_label1.to_global_id, project_label2.to_global_id])
+ end
+
+ it 'creates issue with correct values' do
+ expect(mutated_issue.labels.pluck(:title)).to eq([project_label1.title, project_label2.title])
+ end
+ end
+
+ context 'when trying to create issue with restricted params' do
+ before do
+ mutation_params.merge!(special_params)
+ end
+
+ it 'ignores the special params' do
+ expect(mutated_issue).not_to be_like_time(special_params[:created_at])
+ expect(mutated_issue.iid).not_to eq(special_params[:iid])
+ end
+ end
+ end
+
+ context 'when creating an issue as owner' do
+ let_it_be(:user) { project.owner }
+
+ before do
+ mutation_params.merge!(special_params)
+ end
+
+ it 'sets the special params' do
+ expect(mutated_issue.created_at).to be_like_time(special_params[:created_at])
+ expect(mutated_issue.iid).to eq(special_params[:iid])
+ end
+ end
+ end
+ end
+
+ describe "#ready?" do
+ context 'when passing in both labels and label_ids' do
+ before do
+ mutation_params.merge!(label_ids: [project_label1.to_global_id, project_label2.to_global_id])
+ end
+
+ it 'raises exception when mutually exclusive params are given' do
+ expect { mutation.ready?(mutation_params) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, /one and only one of/)
+ end
+ end
+
+ context 'when passing only `discussion_to_resolve` param' do
+ before do
+ mutation_params.merge!(discussion_to_resolve: 'abc')
+ end
+
+ it 'raises exception when mutually exclusive params are given' do
+ expect { mutation.ready?(mutation_params) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, /to resolve a discussion please also provide `merge_request_to_resolve_discussions_of` parameter/)
+ end
+ end
+
+ context 'when passing only `merge_request_to_resolve_discussions_of` param' do
+ before do
+ mutation_params.merge!(merge_request_to_resolve_discussions_of: 'abc')
+ end
+
+ it 'raises exception when mutually exclusive params are given' do
+ expect { mutation.ready?(mutation_params) }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/issues/move_spec.rb b/spec/graphql/mutations/issues/move_spec.rb
new file mode 100644
index 00000000000..c8e9c556a3f
--- /dev/null
+++ b/spec/graphql/mutations/issues/move_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Issues::Move do
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:target_project) { create(:project) }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ describe '#resolve' do
+ subject(:resolve) { mutation.resolve(project_path: issue.project.full_path, iid: issue.iid, target_project_path: target_project.full_path) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { resolve }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when user does not have permissions' do
+ before do
+ issue.project.add_developer(user)
+ end
+
+ it 'returns error message' do
+ expect(resolve[:issue]).to eq(nil)
+ expect(resolve[:errors].first).to eq('Cannot move issue due to insufficient permissions!')
+ end
+ end
+
+ context 'when user has sufficient permissions' do
+ before do
+ issue.project.add_developer(user)
+ target_project.add_developer(user)
+ end
+
+ it 'moves issue' do
+ expect(resolve[:issue].project).to eq(target_project)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index 15c15afd9b7..f9f4bdeb6fa 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -70,6 +70,23 @@ RSpec.describe Mutations::Issues::Update do
end
end
+ context 'when changing state' do
+ let_it_be_with_refind(:issue) { create(:issue, project: project, state: :opened) }
+
+ it 'closes issue' do
+ mutation_params[:state_event] = 'close'
+
+ expect { subject }.to change { issue.reload.state }.from('opened').to('closed')
+ end
+
+ it 'reopens issue' do
+ issue.close
+ mutation_params[:state_event] = 'reopen'
+
+ expect { subject }.to change { issue.reload.state }.from('closed').to('opened')
+ end
+ end
+
context 'when changing labels' do
let_it_be(:label_1) { create(:label, project: project) }
let_it_be(:label_2) { create(:label, project: project) }
diff --git a/spec/graphql/mutations/todos/mark_done_spec.rb b/spec/graphql/mutations/todos/mark_done_spec.rb
index 51ad3e1a5d7..b5f2ff5d044 100644
--- a/spec/graphql/mutations/todos/mark_done_spec.rb
+++ b/spec/graphql/mutations/todos/mark_done_spec.rb
@@ -52,7 +52,8 @@ RSpec.describe Mutations::Todos::MarkDone do
end
it 'ignores invalid GIDs' do
- expect { mutation.resolve(id: 'invalid_gid') }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ expect { mutation.resolve(id: author.to_global_id.to_s) }
+ .to raise_error(::GraphQL::CoercionError)
expect(todo1.reload.state).to eq('pending')
expect(todo2.reload.state).to eq('done')
diff --git a/spec/graphql/mutations/todos/restore_many_spec.rb b/spec/graphql/mutations/todos/restore_many_spec.rb
index b3b3e057745..59995e33f2d 100644
--- a/spec/graphql/mutations/todos/restore_many_spec.rb
+++ b/spec/graphql/mutations/todos/restore_many_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::Todos::RestoreMany do
+ include GraphqlHelpers
+
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
@@ -44,8 +46,9 @@ RSpec.describe Mutations::Todos::RestoreMany do
expect_states_were_not_changed
end
- it 'ignores invalid GIDs' do
- expect { mutation.resolve(ids: ['invalid_gid']) }.to raise_error(URI::BadURIError)
+ it 'raises an error with invalid or non-Todo GIDs' do
+ expect { mutation.resolve(ids: [author.to_global_id.to_s]) }
+ .to raise_error(GraphQL::CoercionError)
expect_states_were_not_changed
end
@@ -78,38 +81,12 @@ RSpec.describe Mutations::Todos::RestoreMany do
it 'fails if too many todos are requested for update' do
expect { restore_mutation([todo1] * 51) }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
end
-
- it 'does not update todos from another app' do
- todo4 = create(:todo)
- todo4_gid = ::URI::GID.parse("gid://otherapp/Todo/#{todo4.id}")
-
- result = mutation.resolve(ids: [todo4_gid.to_s])
-
- expect(result[:updated_ids]).to be_empty
-
- expect_states_were_not_changed
- end
-
- it 'does not update todos from another model' do
- todo4 = create(:todo)
- todo4_gid = ::URI::GID.parse("gid://#{GlobalID.app}/Project/#{todo4.id}")
-
- result = mutation.resolve(ids: [todo4_gid.to_s])
-
- expect(result[:updated_ids]).to be_empty
-
- expect_states_were_not_changed
- end
end
def restore_mutation(todos)
mutation.resolve(ids: todos.map { |todo| global_id_of(todo) } )
end
- def global_id_of(todo)
- todo.to_global_id.to_s
- end
-
def expect_states_were_not_changed
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('pending')
diff --git a/spec/graphql/mutations/todos/restore_spec.rb b/spec/graphql/mutations/todos/restore_spec.rb
index 9043d7a44a8..22fb1bba7a8 100644
--- a/spec/graphql/mutations/todos/restore_spec.rb
+++ b/spec/graphql/mutations/todos/restore_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::Todos::Restore do
+ include GraphqlHelpers
+
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
@@ -49,8 +51,9 @@ RSpec.describe Mutations::Todos::Restore do
expect(other_user_todo.reload.state).to eq('done')
end
- it 'ignores invalid GIDs' do
- expect { mutation.resolve(id: 'invalid_gid') }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'raises error for invalid GID' do
+ expect { mutation.resolve(id: author.to_global_id.to_s) }
+ .to raise_error(::GraphQL::CoercionError)
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('pending')
@@ -61,8 +64,4 @@ RSpec.describe Mutations::Todos::Restore do
def restore_mutation(todo)
mutation.resolve(id: global_id_of(todo))
end
-
- def global_id_of(todo)
- todo.to_global_id.to_s
- end
end
diff --git a/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb b/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb
index 76854be2daa..c5637d43382 100644
--- a/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb
+++ b/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsResolver do
include GraphqlHelpers
+ let_it_be(:admin_user) { create(:user, :admin) }
+ let(:current_user) { admin_user }
+
describe '#resolve' do
let_it_be(:user) { create(:user) }
- let_it_be(:admin_user) { create(:user, :admin) }
let_it_be(:project_measurement_new) { create(:instance_statistics_measurement, :project_count, recorded_at: 2.days.ago) }
let_it_be(:project_measurement_old) { create(:instance_statistics_measurement, :project_count, recorded_at: 10.days.ago) }
@@ -39,6 +41,37 @@ RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsReso
end
end
end
+
+ context 'when requesting pipeline counts by pipeline status' do
+ let_it_be(:pipelines_succeeded_measurement) { create(:instance_statistics_measurement, :pipelines_succeeded_count, recorded_at: 2.days.ago) }
+ let_it_be(:pipelines_skipped_measurement) { create(:instance_statistics_measurement, :pipelines_skipped_count, recorded_at: 2.days.ago) }
+
+ subject { resolve_measurements({ identifier: identifier }, { current_user: current_user }) }
+
+ context 'filter for pipelines_succeeded' do
+ let(:identifier) { 'pipelines_succeeded' }
+
+ it { is_expected.to eq([pipelines_succeeded_measurement]) }
+ end
+
+ context 'filter for pipelines_skipped' do
+ let(:identifier) { 'pipelines_skipped' }
+
+ it { is_expected.to eq([pipelines_skipped_measurement]) }
+ end
+
+ context 'filter for pipelines_failed' do
+ let(:identifier) { 'pipelines_failed' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'filter for pipelines_canceled' do
+ let(:identifier) { 'pipelines_canceled' }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
def resolve_measurements(args = {}, context = {})
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
index fb6a5ccb781..c1d8041a1e0 100644
--- a/spec/graphql/resolvers/board_lists_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -101,6 +101,12 @@ RSpec.describe Resolvers::BoardListsResolver do
end
def resolve_board_lists(args: {}, current_user: user)
- resolve(described_class, obj: board, args: args, ctx: { current_user: current_user })
+ context = GraphQL::Query::Context.new(
+ query: OpenStruct.new(schema: nil),
+ values: { current_user: current_user },
+ object: nil
+ )
+
+ resolve(described_class, obj: board, args: args, ctx: context )
end
end
diff --git a/spec/graphql/resolvers/board_resolver_spec.rb b/spec/graphql/resolvers/board_resolver_spec.rb
new file mode 100644
index 00000000000..c70c696005f
--- /dev/null
+++ b/spec/graphql/resolvers/board_resolver_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::BoardResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:dummy_gid) { 'gid://gitlab/Board/1' }
+
+ shared_examples_for 'group and project boards resolver' do
+ it 'does not create a default board' do
+ expect(resolve_board(id: dummy_gid)).to eq nil
+ end
+
+ it 'calls Boards::ListService' do
+ expect_next_instance_of(Boards::ListService) do |service|
+ expect(service).to receive(:execute).and_return([])
+ end
+
+ resolve_board(id: dummy_gid)
+ end
+
+ it 'requires an ID' do
+ expect do
+ resolve(described_class, obj: board_parent, args: {}, ctx: { current_user: user })
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+
+ context 'when querying for a single board' do
+ let(:board1) { create(:board, name: 'One', resource_parent: board_parent) }
+
+ it 'returns specified board' do
+ expect(resolve_board(id: global_id_of(board1))).to eq board1
+ end
+
+ it 'returns nil if board not found' do
+ outside_parent = create(board_parent.class.underscore.to_sym) # rubocop:disable Rails/SaveBang
+ outside_board = create(:board, name: 'outside board', resource_parent: outside_parent)
+
+ expect(resolve_board(id: global_id_of(outside_board))).to eq nil
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when there is no parent' do
+ let(:board_parent) { nil }
+
+ it 'returns nil if parent is nil' do
+ expect(resolve_board(id: dummy_gid)).to eq(nil)
+ end
+ end
+
+ context 'when project boards' do
+ let(:board_parent) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+
+ it_behaves_like 'group and project boards resolver'
+ end
+
+ context 'when group boards' do
+ let(:board_parent) { create(:group) }
+
+ it_behaves_like 'group and project boards resolver'
+ end
+ end
+
+ def resolve_board(id:)
+ resolve(described_class, obj: board_parent, args: { id: id }, ctx: { current_user: user })
+ end
+end
diff --git a/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
new file mode 100644
index 00000000000..1eb6f363d5b
--- /dev/null
+++ b/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::RunnerPlatformsResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ subject(:resolve_subject) { resolve(described_class) }
+
+ it 'returns all possible runner platforms' do
+ expect(resolve_subject).to include(
+ hash_including(name: :linux), hash_including(name: :osx),
+ hash_including(name: :windows), hash_including(name: :docker),
+ hash_including(name: :kubernetes)
+ )
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
index f13823085b8..ebea9e5522b 100644
--- a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
+++ b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
@@ -117,20 +117,6 @@ RSpec.describe LooksAhead do
query.result
end
- context 'the feature flag is off' do
- before do
- stub_feature_flags(described_class::FEATURE_FLAG => false)
- end
-
- it_behaves_like 'a working query on the test schema'
-
- it 'does not preload labels on issues' do
- expect(the_user.issues).not_to receive(:preload).with(:labels)
-
- query.result
- end
- end
-
it 'issues fewer queries than the naive approach' do
the_user.reload # ensure no attributes are loaded before we begin
naive = <<-GQL
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index 05d0ec38192..d8ff8e9c1f2 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -15,6 +15,12 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
let_it_be(:now) { Time.now }
let_it_be(:group) { create(:group, :private) }
+ def args(**arguments)
+ satisfy("contain only #{arguments.inspect}") do |passed|
+ expect(passed.compact).to match(arguments)
+ end
+ end
+
before_all do
group.add_developer(current_user)
end
@@ -30,7 +36,7 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
context 'without parameters' do
it 'calls MilestonesFinder to retrieve all milestones' do
expect(MilestonesFinder).to receive(:new)
- .with(ids: nil, group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
+ .with(args(group_ids: group.id, state: 'all'))
.and_call_original
resolve_group_milestones
@@ -43,11 +49,22 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
end_date = start_date + 1.hour
expect(MilestonesFinder).to receive(:new)
- .with(ids: nil, group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date)
+ .with(args(group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date))
.and_call_original
resolve_group_milestones(start_date: start_date, end_date: end_date, state: 'closed')
end
+
+ it 'understands the timeframe argument' do
+ start_date = now
+ end_date = start_date + 1.hour
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date))
+ .and_call_original
+
+ resolve_group_milestones(timeframe: { start: start_date, end: end_date }, state: 'closed')
+ end
end
context 'by ids' do
@@ -55,7 +72,7 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
milestone = create(:milestone, group: group)
expect(MilestonesFinder).to receive(:new)
- .with(ids: [milestone.id.to_s], group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
+ .with(args(ids: [milestone.id.to_s], group_ids: group.id, state: 'all'))
.and_call_original
resolve_group_milestones(ids: [milestone.to_global_id])
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index db5d009f0e7..3a6507f906c 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -46,6 +46,13 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(assignee_username: assignee.username)).to contain_exactly(issue2)
end
+ it 'filters by two assignees' do
+ assignee2 = create(:user)
+ issue2.update!(assignees: [assignee, assignee2])
+
+ expect(resolve_issues(assignee_id: [assignee.id, assignee2.id])).to contain_exactly(issue2)
+ end
+
it 'filters by assignee_id' do
expect(resolve_issues(assignee_id: assignee.id)).to contain_exactly(issue2)
end
@@ -58,6 +65,10 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(assignee_id: IssuableFinder::Params::FILTER_NONE)).to contain_exactly(issue1)
end
+ it 'filters by author' do
+ expect(resolve_issues(author_username: issue1.author.username)).to contain_exactly(issue1, issue2)
+ end
+
it 'filters by labels' do
expect(resolve_issues(label_name: [label1.title])).to contain_exactly(issue1, issue2)
expect(resolve_issues(label_name: [label1.title, label2.title])).to contain_exactly(issue2)
@@ -219,6 +230,21 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(sort: :milestone_due_desc).items).to eq([milestone_issue3, milestone_issue2, milestone_issue1])
end
end
+
+ context 'when sorting by severity' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue_high_severity) { create_issue_with_severity(project, severity: :high) }
+ let_it_be(:issue_low_severity) { create_issue_with_severity(project, severity: :low) }
+ let_it_be(:issue_no_severity) { create(:incident, project: project) }
+
+ it 'sorts issues ascending' do
+ expect(resolve_issues(sort: :severity_asc)).to eq([issue_no_severity, issue_low_severity, issue_high_severity])
+ end
+
+ it 'sorts issues descending' do
+ expect(resolve_issues(sort: :severity_desc)).to eq([issue_high_severity, issue_low_severity, issue_no_severity])
+ end
+ end
end
it 'returns issues user can see' do
@@ -304,6 +330,13 @@ RSpec.describe Resolvers::IssuesResolver do
expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
end
+ def create_issue_with_severity(project, severity:)
+ issue = create(:incident, project: project)
+ create(:issuable_severity, issue: issue, severity: severity)
+
+ issue
+ end
+
def resolve_issues(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context)
end
diff --git a/spec/graphql/resolvers/project_milestones_resolver_spec.rb b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
index e0b250cfe7c..b641a54393e 100644
--- a/spec/graphql/resolvers/project_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
@@ -13,13 +13,19 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
project.add_developer(current_user)
end
+ def args(**arguments)
+ satisfy("contain only #{arguments.inspect}") do |passed|
+ expect(passed.compact).to match(arguments)
+ end
+ end
+
def resolve_project_milestones(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context)
end
it 'calls MilestonesFinder to retrieve all milestones' do
expect(MilestonesFinder).to receive(:new)
- .with(ids: nil, project_ids: project.id, state: 'all', start_date: nil, end_date: nil)
+ .with(args(project_ids: project.id, state: 'all'))
.and_call_original
resolve_project_milestones
@@ -36,7 +42,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(ids: nil, project_ids: project.id, group_ids: contain_exactly(group, parent_group), state: 'all', start_date: nil, end_date: nil)
+ .with(args(project_ids: project.id, group_ids: contain_exactly(group, parent_group), state: 'all'))
.and_call_original
resolve_project_milestones(include_ancestors: true)
@@ -48,7 +54,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
milestone = create(:milestone, project: project)
expect(MilestonesFinder).to receive(:new)
- .with(ids: [milestone.id.to_s], project_ids: project.id, state: 'all', start_date: nil, end_date: nil)
+ .with(args(ids: [milestone.id.to_s], project_ids: project.id, state: 'all'))
.and_call_original
resolve_project_milestones(ids: [milestone.to_global_id])
@@ -58,7 +64,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
context 'by state' do
it 'calls MilestonesFinder with correct parameters' do
expect(MilestonesFinder).to receive(:new)
- .with(ids: nil, project_ids: project.id, state: 'closed', start_date: nil, end_date: nil)
+ .with(args(project_ids: project.id, state: 'closed'))
.and_call_original
resolve_project_milestones(state: 'closed')
@@ -72,7 +78,7 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
end_date = Time.now + 5.days
expect(MilestonesFinder).to receive(:new)
- .with(ids: nil, project_ids: project.id, state: 'all', start_date: start_date, end_date: end_date)
+ .with(args(project_ids: project.id, state: 'all', start_date: start_date, end_date: end_date))
.and_call_original
resolve_project_milestones(start_date: start_date, end_date: end_date)
@@ -102,6 +108,51 @@ RSpec.describe Resolvers::ProjectMilestonesResolver do
end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
end
end
+
+ context 'when passing a timeframe' do
+ it 'calls MilestonesFinder with correct parameters' do
+ start_date = Time.now
+ end_date = Time.now + 5.days
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(project_ids: project.id, state: 'all', start_date: start_date, end_date: end_date))
+ .and_call_original
+
+ resolve_project_milestones(timeframe: { start: start_date, end: end_date })
+ end
+ end
+ end
+
+ context 'when title is present' do
+ it 'calls MilestonesFinder with correct parameters' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(title: '13.5', state: 'all', project_ids: project.id))
+ .and_call_original
+
+ resolve_project_milestones(title: '13.5')
+ end
+ end
+
+ context 'when search_title is present' do
+ it 'calls MilestonesFinder with correct parameters' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(search_title: '13', state: 'all', project_ids: project.id))
+ .and_call_original
+
+ resolve_project_milestones(search_title: '13')
+ end
+ end
+
+ context 'when containing date is present' do
+ it 'calls MilestonesFinder with correct parameters' do
+ t = Time.now
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(args(containing_date: t, state: 'all', project_ids: project.id))
+ .and_call_original
+
+ resolve_project_milestones(containing_date: t)
+ end
end
context 'when user cannot read milestones' do
diff --git a/spec/graphql/resolvers/projects_resolver_spec.rb b/spec/graphql/resolvers/projects_resolver_spec.rb
index d22ffeed740..83a26062957 100644
--- a/spec/graphql/resolvers/projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects_resolver_spec.rb
@@ -8,10 +8,14 @@ RSpec.describe Resolvers::ProjectsResolver do
describe '#resolve' do
subject { resolve(described_class, obj: nil, args: filters, ctx: { current_user: current_user }) }
+ let_it_be(:group) { create(:group, name: 'public-group') }
+ let_it_be(:private_group) { create(:group, name: 'private-group') }
let_it_be(:project) { create(:project, :public) }
let_it_be(:other_project) { create(:project, :public) }
+ let_it_be(:group_project) { create(:project, :public, group: group) }
let_it_be(:private_project) { create(:project, :private) }
let_it_be(:other_private_project) { create(:project, :private) }
+ let_it_be(:private_group_project) { create(:project, :private, group: private_group) }
let_it_be(:user) { create(:user) }
@@ -20,6 +24,11 @@ RSpec.describe Resolvers::ProjectsResolver do
before_all do
project.add_developer(user)
private_project.add_developer(user)
+ private_group.add_developer(user)
+ end
+
+ before do
+ stub_feature_flags(project_finder_similarity_sort: false)
end
context 'when user is not logged in' do
@@ -27,7 +36,7 @@ RSpec.describe Resolvers::ProjectsResolver do
context 'when no filters are applied' do
it 'returns all public projects' do
- is_expected.to contain_exactly(project, other_project)
+ is_expected.to contain_exactly(project, other_project, group_project)
end
context 'when search filter is provided' do
@@ -45,6 +54,22 @@ RSpec.describe Resolvers::ProjectsResolver do
is_expected.to be_empty
end
end
+
+ context 'when searchNamespaces filter is provided' do
+ let(:filters) { { search: 'group', search_namespaces: true } }
+
+ it 'returns projects in a matching namespace' do
+ is_expected.to contain_exactly(group_project)
+ end
+ end
+
+ context 'when searchNamespaces filter false' do
+ let(:filters) { { search: 'group', search_namespaces: false } }
+
+ it 'returns ignores namespace matches' do
+ is_expected.to be_empty
+ end
+ end
end
end
@@ -53,7 +78,7 @@ RSpec.describe Resolvers::ProjectsResolver do
context 'when no filters are applied' do
it 'returns all visible projects for the user' do
- is_expected.to contain_exactly(project, other_project, private_project)
+ is_expected.to contain_exactly(project, other_project, group_project, private_project, private_group_project)
end
context 'when search filter is provided' do
@@ -68,7 +93,23 @@ RSpec.describe Resolvers::ProjectsResolver do
let(:filters) { { membership: true } }
it 'returns projects that user is member of' do
- is_expected.to contain_exactly(project, private_project)
+ is_expected.to contain_exactly(project, private_project, private_group_project)
+ end
+ end
+
+ context 'when searchNamespaces filter is provided' do
+ let(:filters) { { search: 'group', search_namespaces: true } }
+
+ it 'returns projects from matching group' do
+ is_expected.to contain_exactly(group_project, private_group_project)
+ end
+ end
+
+ context 'when searchNamespaces filter false' do
+ let(:filters) { { search: 'group', search_namespaces: false } }
+
+ it 'returns ignores namespace matches' do
+ is_expected.to be_empty
end
end
@@ -79,6 +120,24 @@ RSpec.describe Resolvers::ProjectsResolver do
is_expected.to contain_exactly(project)
end
end
+
+ context 'when sort is similarity' do
+ let_it_be(:named_project1) { create(:project, :public, name: 'projAB', path: 'projAB') }
+ let_it_be(:named_project2) { create(:project, :public, name: 'projABC', path: 'projABC') }
+ let_it_be(:named_project3) { create(:project, :public, name: 'projA', path: 'projA') }
+
+ let(:filters) { { search: 'projA', sort: 'similarity' } }
+
+ it 'returns projects in order of similarity to search' do
+ stub_feature_flags(project_finder_similarity_sort: true)
+
+ is_expected.to eq([named_project3, named_project1, named_project2])
+ end
+
+ it 'returns projects not in order of similarity to search if flag is off' do
+ is_expected.not_to eq([named_project3, named_project1, named_project2])
+ end
+ end
end
end
end
diff --git a/spec/graphql/resolvers/snippets/blobs_resolver_spec.rb b/spec/graphql/resolvers/snippets/blobs_resolver_spec.rb
new file mode 100644
index 00000000000..fdbd87c32be
--- /dev/null
+++ b/spec/graphql/resolvers/snippets/blobs_resolver_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Snippets::BlobsResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:snippet) { create(:personal_snippet, :private, :repository, author: current_user) }
+
+ context 'when user is not authorized' do
+ let(:other_user) { create(:user) }
+
+ it 'raises an error' do
+ expect do
+ resolve_blobs(snippet, user: other_user)
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when using no filter' do
+ it 'returns all snippet blobs' do
+ expect(resolve_blobs(snippet).map(&:path)).to contain_exactly(*snippet.list_files)
+ end
+ end
+
+ context 'when using filters' do
+ context 'when paths is a single string' do
+ it 'returns an array of files' do
+ path = 'CHANGELOG'
+
+ expect(resolve_blobs(snippet, args: { paths: path }).first.path).to eq(path)
+ end
+ end
+
+ context 'when paths is an array of string' do
+ it 'returns an array of files' do
+ paths = ['CHANGELOG', 'README.md']
+
+ expect(resolve_blobs(snippet, args: { paths: paths }).map(&:path)).to contain_exactly(*paths)
+ end
+ end
+ end
+ end
+
+ def resolve_blobs(snippet, user: current_user, args: {})
+ resolve(described_class, args: args, ctx: { current_user: user }, obj: snippet)
+ end
+end
diff --git a/spec/graphql/resolvers/terraform/states_resolver_spec.rb b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
new file mode 100644
index 00000000000..64b515528cd
--- /dev/null
+++ b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Terraform::StatesResolver do
+ include GraphqlHelpers
+
+ it { expect(described_class.type).to eq(Types::Terraform::StateType) }
+ it { expect(described_class.null).to be_truthy }
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:production_state) { create(:terraform_state, project: project) }
+ let_it_be(:staging_state) { create(:terraform_state, project: project) }
+ let_it_be(:other_state) { create(:terraform_state) }
+
+ let(:ctx) { Hash(current_user: user) }
+ let(:user) { create(:user, developer_projects: [project]) }
+
+ subject { resolve(described_class, obj: project, ctx: ctx) }
+
+ it 'returns states associated with the agent' do
+ expect(subject).to contain_exactly(production_state, staging_state)
+ end
+
+ context 'user does not have permission' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/graphql/types/alert_management/alert_type_spec.rb b/spec/graphql/types/alert_management/alert_type_spec.rb
index e14c189d4b6..82b48a20708 100644
--- a/spec/graphql/types/alert_management/alert_type_spec.rb
+++ b/spec/graphql/types/alert_management/alert_type_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe GitlabSchema.types['AlertManagementAlert'] do
todos
details_url
prometheus_alert
+ environment
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/alert_management/status_enum_spec.rb b/spec/graphql/types/alert_management/status_enum_spec.rb
index ac7a8eb53f6..1252efabe4c 100644
--- a/spec/graphql/types/alert_management/status_enum_spec.rb
+++ b/spec/graphql/types/alert_management/status_enum_spec.rb
@@ -9,10 +9,10 @@ RSpec.describe GitlabSchema.types['AlertManagementStatus'] do
using RSpec::Parameterized::TableSyntax
where(:status_name, :status_value) do
- 'TRIGGERED' | 0
- 'ACKNOWLEDGED' | 1
- 'RESOLVED' | 2
- 'IGNORED' | 3
+ 'TRIGGERED' | :triggered
+ 'ACKNOWLEDGED' | :acknowledged
+ 'RESOLVED' | :resolved
+ 'IGNORED' | :ignored
end
with_them do
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index bcfbd7f2480..d61ea6aa6e9 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -126,6 +126,10 @@ RSpec.describe Types::BaseField do
let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE, feature_flag: flag, null: false) }
let(:context) { {} }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
it 'returns false if the feature is not enabled' do
stub_feature_flags(flag => false)
diff --git a/spec/graphql/types/ci/detailed_status_type_spec.rb b/spec/graphql/types/ci/detailed_status_type_spec.rb
index 67199848df0..ddb3a1450df 100644
--- a/spec/graphql/types/ci/detailed_status_type_spec.rb
+++ b/spec/graphql/types/ci/detailed_status_type_spec.rb
@@ -8,6 +8,6 @@ RSpec.describe Types::Ci::DetailedStatusType do
it "has all fields" do
expect(described_class).to have_graphql_fields(:group, :icon, :favicon,
:details_path, :has_details,
- :label, :text, :tooltip)
+ :label, :text, :tooltip, :action)
end
end
diff --git a/spec/graphql/types/ci/group_type_spec.rb b/spec/graphql/types/ci/group_type_spec.rb
index 8d547b19af3..d7ce5602612 100644
--- a/spec/graphql/types/ci/group_type_spec.rb
+++ b/spec/graphql/types/ci/group_type_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Types::Ci::GroupType do
name
size
jobs
+ detailedStatus
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index faf3a95cf25..3a54ed2efed 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe Types::Ci::JobType do
expected_fields = %i[
name
needs
+ detailedStatus
+ scheduledAt
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/runner_architecture_type_spec.rb b/spec/graphql/types/ci/runner_architecture_type_spec.rb
new file mode 100644
index 00000000000..527adef8cf9
--- /dev/null
+++ b/spec/graphql/types/ci/runner_architecture_type_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::RunnerArchitectureType do
+ specify { expect(described_class.graphql_name).to eq('RunnerArchitecture') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ download_location
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/runner_platform_type_spec.rb b/spec/graphql/types/ci/runner_platform_type_spec.rb
new file mode 100644
index 00000000000..66b83f607d0
--- /dev/null
+++ b/spec/graphql/types/ci/runner_platform_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::RunnerPlatformType do
+ specify { expect(described_class.graphql_name).to eq('RunnerPlatform') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ human_readable_name
+ architectures
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/stage_type_spec.rb b/spec/graphql/types/ci/stage_type_spec.rb
index 0c352ed27aa..9a8d4fa96a3 100644
--- a/spec/graphql/types/ci/stage_type_spec.rb
+++ b/spec/graphql/types/ci/stage_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Types::Ci::StageType do
expected_fields = %i[
name
groups
+ detailedStatus
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/status_action_type_spec.rb b/spec/graphql/types/ci/status_action_type_spec.rb
new file mode 100644
index 00000000000..8a99068e44f
--- /dev/null
+++ b/spec/graphql/types/ci/status_action_type_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::StatusActionType do
+ specify { expect(described_class.graphql_name).to eq('StatusAction') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ buttonTitle
+ icon
+ path
+ method
+ title
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb b/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
new file mode 100644
index 00000000000..f536d91aeda
--- /dev/null
+++ b/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['DesignCollectionCopyState'] do
+ it { expect(described_class.graphql_name).to eq('DesignCollectionCopyState') }
+
+ it 'exposes the correct event states' do
+ expect(described_class.values.keys).to match_array(%w(READY IN_PROGRESS ERROR))
+ end
+end
diff --git a/spec/graphql/types/design_management/design_collection_type_spec.rb b/spec/graphql/types/design_management/design_collection_type_spec.rb
index 6b1d3a87c2d..83208705249 100644
--- a/spec/graphql/types/design_management/design_collection_type_spec.rb
+++ b/spec/graphql/types/design_management/design_collection_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['DesignCollection'] do
it { expect(described_class).to require_graphql_authorizations(:read_design) }
it 'has the expected fields' do
- expected_fields = %i[project issue designs versions version designAtVersion design]
+ expected_fields = %i[project issue designs versions version designAtVersion design copyState]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/environment_type_spec.rb b/spec/graphql/types/environment_type_spec.rb
index abeeeba543f..2220f847e4e 100644
--- a/spec/graphql/types/environment_type_spec.rb
+++ b/spec/graphql/types/environment_type_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe GitlabSchema.types['Environment'] do
it 'has the expected fields' do
expected_fields = %w[
- name id state metrics_dashboard latest_opened_most_severe_alert
+ name id state metrics_dashboard latest_opened_most_severe_alert path
]
expect(described_class).to have_graphql_fields(*expected_fields)
@@ -28,6 +28,7 @@ RSpec.describe GitlabSchema.types['Environment'] do
project(fullPath: "#{project.full_path}") {
environment(name: "#{environment.name}") {
name
+ path
state
}
}
@@ -43,6 +44,18 @@ RSpec.describe GitlabSchema.types['Environment'] do
expect(subject['data']['project']['environment']['name']).to eq(environment.name)
end
+ it 'returns the path when the feature is enabled' do
+ expect(subject['data']['project']['environment']['path']).to eq(
+ Gitlab::Routing.url_helpers.project_environment_path(project, environment)
+ )
+ end
+
+ it 'does not return the path when the feature is disabled' do
+ stub_feature_flags(expose_environment_path_in_alert_details: false)
+
+ expect(subject['data']['project']['environment']['path']).to be_nil
+ end
+
context 'when query alert data for the environment' do
let_it_be(:query) do
%(
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index 2a7b26f66b0..7589b0e285e 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -99,8 +99,6 @@ RSpec.describe Types::GlobalIDType do
end
describe 'compatibility' do
- # Simplified schema to test compatibility
-
def query(doc, vars)
GraphQL::Query.new(schema, document: doc, context: {}, variables: vars)
end
@@ -112,6 +110,7 @@ RSpec.describe Types::GlobalIDType do
all_types = [::GraphQL::ID_TYPE, ::Types::GlobalIDType, ::Types::GlobalIDType[::Project]]
shared_examples 'a working query' do
+ # Simplified schema to test compatibility
let!(:schema) do
# capture values so they can be closed over
arg_type = argument_type
@@ -135,10 +134,21 @@ RSpec.describe Types::GlobalIDType do
argument :id, arg_type, required: true
end
+ # This is needed so that all types are always registered as input types
+ field :echo, String, null: true do
+ argument :id, ::GraphQL::ID_TYPE, required: false
+ argument :gid, ::Types::GlobalIDType, required: false
+ argument :pid, ::Types::GlobalIDType[::Project], required: false
+ end
+
def project_by_id(id:)
gid = ::Types::GlobalIDType[::Project].coerce_isolated_input(id)
gid.model_class.find(gid.model_id)
end
+
+ def echo(id: nil, gid: nil, pid: nil)
+ "id: #{id}, gid: #{gid}, pid: #{pid}"
+ end
end)
end
end
@@ -152,7 +162,7 @@ RSpec.describe Types::GlobalIDType do
end
end
- context 'when the argument is declared as ID' do
+ context 'when the client declares the argument as ID the actual argument can be any type' do
let(:document) do
<<-GRAPHQL
query($projectId: ID!){
@@ -163,16 +173,16 @@ RSpec.describe Types::GlobalIDType do
GRAPHQL
end
- let(:argument_type) { ::GraphQL::ID_TYPE }
-
- where(:result_type) { all_types }
+ where(:result_type, :argument_type) do
+ all_types.flat_map { |arg_type| all_types.zip([arg_type].cycle) }
+ end
with_them do
it_behaves_like 'a working query'
end
end
- context 'when the argument is declared as GlobalID' do
+ context 'when the client passes the argument as GlobalID' do
let(:document) do
<<-GRAPHQL
query($projectId: GlobalID!) {
@@ -192,7 +202,7 @@ RSpec.describe Types::GlobalIDType do
end
end
- context 'when the argument is declared as ProjectID' do
+ context 'when the client passes the argument as ProjectID' do
let(:document) do
<<-GRAPHQL
query($projectId: ProjectID!) {
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index bf7ddebaf5b..7d14ef87551 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe GitlabSchema.types['Group'] do
subgroup_creation_level require_two_factor_authentication
two_factor_grace_period auto_devops_enabled emails_disabled
mentions_disabled parent boards milestones group_members
+ merge_requests
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/issue_sort_enum_spec.rb b/spec/graphql/types/issue_sort_enum_spec.rb
index 9313d3aee84..4433709d193 100644
--- a/spec/graphql/types/issue_sort_enum_spec.rb
+++ b/spec/graphql/types/issue_sort_enum_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['IssueSort'] do
it 'exposes all the existing issue sort values' do
expect(described_class.values.keys).to include(
- *%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC]
+ *%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC SEVERITY_ASC SEVERITY_DESC]
)
end
end
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 46aebbdabeb..9d901655b7b 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -27,16 +27,51 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
upvotes downvotes head_pipeline pipelines task_completion_status
milestone assignees participants subscribed labels discussion_locked time_estimate
total_time_spent reference author merged_at commit_count current_user_todos
- conflicts auto_merge_enabled
+ conflicts auto_merge_enabled approved_by
]
if Gitlab.ee?
expected_fields << 'approved'
expected_fields << 'approvals_left'
expected_fields << 'approvals_required'
- expected_fields << 'approved_by'
end
expect(described_class).to have_graphql_fields(*expected_fields)
end
+
+ describe '#diff_stats_summary' do
+ subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json }
+
+ let(:current_user) { create :admin }
+ let(:query) do
+ %(
+ {
+ project(fullPath: "#{project.full_path}") {
+ mergeRequests {
+ nodes {
+ diffStatsSummary {
+ additions, deletions
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ let(:project) { create(:project, :public) }
+ let(:merge_request) { create(:merge_request, target_project: project, source_project: project) }
+
+ let(:response) { subject.dig('data', 'project', 'mergeRequests', 'nodes').first['diffStatsSummary'] }
+
+ context 'when MR metrics has additions and deletions' do
+ before do
+ merge_request.metrics.update!(added_lines: 5, removed_lines: 8)
+ end
+
+ it 'pulls out data from metrics object' do
+ expect(response).to match('additions' => 5, 'deletions' => 8)
+ end
+ end
+ end
end
diff --git a/spec/graphql/types/package_type_enum_spec.rb b/spec/graphql/types/package_type_enum_spec.rb
index 80a20a68bc2..407d5786f65 100644
--- a/spec/graphql/types/package_type_enum_spec.rb
+++ b/spec/graphql/types/package_type_enum_spec.rb
@@ -4,6 +4,6 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['PackageTypeEnum'] do
it 'exposes all package types' do
- expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC])
+ expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC GOLANG DEBIAN])
end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 44a89bfa35e..8aa9e1138cc 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe GitlabSchema.types['Project'] do
environment boards jira_import_status jira_imports services releases release
alert_management_alerts alert_management_alert alert_management_alert_status_counts
container_expiration_policy service_desk_enabled service_desk_address
- issue_status_counts
+ issue_status_counts terraform_states
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -154,5 +154,12 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::ContainerExpirationPolicyType) }
end
+ describe 'terraform states field' do
+ subject { described_class.fields['terraformStates'] }
+
+ it { is_expected.to have_graphql_type(Types::Terraform::StateType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Terraform::StatesResolver) }
+ end
+
it_behaves_like 'a GraphQL type with labels'
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 11f780a4f3f..1d9ca8323f8 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe GitlabSchema.types['Query'] do
users
issue
instance_statistics_measurements
+ runner_platforms
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
@@ -67,8 +68,16 @@ RSpec.describe GitlabSchema.types['Query'] do
describe 'instance_statistics_measurements field' do
subject { described_class.fields['instanceStatisticsMeasurements'] }
- it 'returns issue' do
+ it 'returns instance statistics measurements' do
is_expected.to have_graphql_type(Types::Admin::Analytics::InstanceStatistics::MeasurementType.connection_type)
end
end
+
+ describe 'runner_platforms field' do
+ subject { described_class.fields['runnerPlatforms'] }
+
+ it 'returns runner platforms' do
+ is_expected.to have_graphql_type(Types::Ci::RunnerPlatformType.connection_type)
+ end
+ end
end
diff --git a/spec/graphql/types/range_input_type_spec.rb b/spec/graphql/types/range_input_type_spec.rb
new file mode 100644
index 00000000000..aa6fd72cf13
--- /dev/null
+++ b/spec/graphql/types/range_input_type_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::RangeInputType do
+ let(:of_integer) { ::GraphQL::INT_TYPE }
+
+ context 'parameterized on Integer' do
+ let(:type) { described_class[of_integer] }
+
+ it 'accepts start and end' do
+ input = { start: 1, end: 10 }
+ output = { start: 1, end: 10 }
+
+ expect(type.coerce_isolated_input(input)).to eq(output)
+ end
+
+ it 'rejects inverted ranges' do
+ input = { start: 10, end: 1 }
+
+ expect { type.coerce_isolated_input(input) }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+
+ it 'follows expected subtyping relationships for instances' do
+ context = GraphQL::Query::Context.new(
+ query: OpenStruct.new(schema: nil),
+ values: {},
+ object: nil
+ )
+ instance = described_class[of_integer].new(context: context, defaults_used: [], ruby_kwargs: {})
+
+ expect(instance).to be_a_kind_of(described_class)
+ expect(instance).to be_a_kind_of(described_class[of_integer])
+ expect(instance).not_to be_a_kind_of(described_class[GraphQL::ID_TYPE])
+ end
+
+ it 'follows expected subtyping relationships for classes' do
+ expect(described_class[of_integer]).to be < described_class
+ expect(described_class[of_integer]).not_to be < described_class[GraphQL::ID_TYPE]
+ expect(described_class[of_integer]).not_to be < described_class[of_integer, false]
+ end
+end
diff --git a/spec/graphql/types/root_storage_statistics_type_spec.rb b/spec/graphql/types/root_storage_statistics_type_spec.rb
index f01c55cbccb..79d474f13ad 100644
--- a/spec/graphql/types/root_storage_statistics_type_spec.rb
+++ b/spec/graphql/types/root_storage_statistics_type_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe GitlabSchema.types['RootStorageStatistics'] do
it 'has all the required fields' do
expect(described_class).to have_graphql_fields(:storage_size, :repository_size, :lfs_objects_size,
- :build_artifacts_size, :packages_size, :wiki_size, :snippets_size)
+ :build_artifacts_size, :packages_size, :wiki_size, :snippets_size,
+ :pipeline_artifacts_size)
end
specify { expect(described_class).to require_graphql_authorizations(:read_statistics) }
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index 86af69f1294..e73665a1b1d 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -16,6 +16,15 @@ RSpec.describe GitlabSchema.types['Snippet'] do
expect(described_class).to have_graphql_fields(*expected_fields)
end
+ describe 'blobs field' do
+ subject { described_class.fields['blobs'] }
+
+ it 'returns blobs' do
+ is_expected.to have_graphql_type(Types::Snippets::BlobType.connection_type)
+ is_expected.to have_graphql_resolver(Resolvers::Snippets::BlobsResolver)
+ end
+ end
+
context 'when restricted visibility level is set to public' do
let_it_be(:snippet) { create(:personal_snippet, :repository, :public, author: user) }
@@ -115,7 +124,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
end
describe '#blob' do
- let(:query_blob) { subject.dig('data', 'snippets', 'edges')[0]['node']['blob'] }
+ let(:query_blob) { subject.dig('data', 'snippets', 'nodes')[0]['blob'] }
subject { GitlabSchema.execute(snippet_query_for(field: 'blob'), context: { current_user: user }).as_json }
@@ -142,9 +151,26 @@ RSpec.describe GitlabSchema.types['Snippet'] do
describe '#blobs' do
let_it_be(:snippet) { create(:personal_snippet, :public, author: user) }
- let(:query_blobs) { subject.dig('data', 'snippets', 'edges')[0]['node']['blobs'] }
+ let(:query_blobs) { subject.dig('data', 'snippets', 'nodes')[0].dig('blobs', 'nodes') }
+ let(:paths) { [] }
+ let(:query) do
+ %(
+ {
+ snippets {
+ nodes {
+ blobs(paths: #{paths}) {
+ nodes {
+ name
+ path
+ }
+ }
+ }
+ }
+ }
+ )
+ end
- subject { GitlabSchema.execute(snippet_query_for(field: 'blobs'), context: { current_user: user }).as_json }
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
shared_examples 'an array' do
it 'returns an array of snippet blobs' do
@@ -174,6 +200,18 @@ RSpec.describe GitlabSchema.types['Snippet'] do
expect(resulting_blobs_names).to match_array(blobs.map(&:name))
end
+
+ context 'when specific path is set' do
+ let(:paths) { ['CHANGELOG'] }
+
+ it_behaves_like 'an array'
+
+ it 'returns specific files' do
+ resulting_blobs_names = query_blobs.map { |b| b['name'] }
+
+ expect(resulting_blobs_names).to match(paths)
+ end
+ end
end
end
@@ -181,12 +219,10 @@ RSpec.describe GitlabSchema.types['Snippet'] do
%(
{
snippets {
- edges {
- node {
- #{field} {
- name
- path
- }
+ nodes {
+ #{field} {
+ name
+ path
}
}
}
diff --git a/spec/graphql/types/terraform/state_type_spec.rb b/spec/graphql/types/terraform/state_type_spec.rb
new file mode 100644
index 00000000000..51508208046
--- /dev/null
+++ b/spec/graphql/types/terraform/state_type_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['TerraformState'] do
+ it { expect(described_class.graphql_name).to eq('TerraformState') }
+ it { expect(described_class).to require_graphql_authorizations(:read_terraform_state) }
+
+ describe 'fields' do
+ let(:fields) { %i[id name locked_by_user locked_at created_at updated_at] }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+
+ it { expect(described_class.fields['id'].type).to be_non_null }
+ it { expect(described_class.fields['name'].type).to be_non_null }
+ it { expect(described_class.fields['lockedByUser'].type).not_to be_non_null }
+ it { expect(described_class.fields['lockedAt'].type).not_to be_non_null }
+ it { expect(described_class.fields['createdAt'].type).to be_non_null }
+ it { expect(described_class.fields['updatedAt'].type).to be_non_null }
+ end
+end
diff --git a/spec/graphql/types/timeframe_type_spec.rb b/spec/graphql/types/timeframe_type_spec.rb
new file mode 100644
index 00000000000..dfde3242897
--- /dev/null
+++ b/spec/graphql/types/timeframe_type_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Timeframe'] do
+ let(:input) { { start: "2018-06-04", end: "2020-10-06" } }
+ let(:output) { { start: Date.parse(input[:start]), end: Date.parse(input[:end]) } }
+
+ it 'coerces ISO-dates into Time objects' do
+ expect(described_class.coerce_isolated_input(input)).to eq(output)
+ end
+
+ it 'rejects invalid input' do
+ input[:start] = 'foo'
+
+ expect { described_class.coerce_isolated_input(input) }
+ .to raise_error(GraphQL::CoercionError)
+ end
+
+ it 'accepts times as input' do
+ with_time = input.merge(start: '2018-06-04T13:48:14Z')
+
+ expect(described_class.coerce_isolated_input(with_time)).to eq(output)
+ end
+
+ it 'requires both ends of the range' do
+ types = described_class.arguments.slice('start', 'end').values.map(&:type)
+
+ expect(types).to all(be_non_null)
+ end
+
+ it 'rejects invalid range' do
+ input.merge!(start: input[:end], end: input[:start])
+
+ expect { described_class.coerce_isolated_input(input) }
+ .to raise_error(::Gitlab::Graphql::Errors::ArgumentError, 'start must be before end')
+ end
+end
diff --git a/spec/haml_lint/linter/documentation_links_spec.rb b/spec/haml_lint/linter/documentation_links_spec.rb
index 68de8317b82..5de455b6e8c 100644
--- a/spec/haml_lint/linter/documentation_links_spec.rb
+++ b/spec/haml_lint/linter/documentation_links_spec.rb
@@ -8,75 +8,85 @@ require Rails.root.join('haml_lint/linter/documentation_links')
RSpec.describe HamlLint::Linter::DocumentationLinks do
include_context 'linter'
- context 'when link_to points to the existing file path' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md')" }
+ shared_examples 'link validation rules' do |link_pattern|
+ context 'when link_to points to the existing file path' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when link_to points to the existing file with valid anchor' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md', anchor: 'overview'), target: '_blank'" }
+ context 'when link_to points to the existing file with valid anchor' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', anchor: 'overview'), target: '_blank'" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when link_to points to the existing file path without .md extension' do
- let(:haml) { "= link_to 'Description', help_page_path('README')" }
+ context 'when link_to points to the existing file path without .md extension' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when anchor is not correct' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md', anchor: 'wrong')" }
+ context 'when anchor is not correct' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', anchor: 'wrong')" }
- it { is_expected.to report_lint }
+ it { is_expected.to report_lint }
- context 'when help_page_path has multiple options' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md', key: :value, anchor: 'wrong')" }
+ context "when #{link_pattern} has multiple options" do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', key: :value, anchor: 'wrong')" }
+
+ it { is_expected.to report_lint }
+ end
+ end
+
+ context 'when file path is wrong' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('wrong.md'), target: '_blank'" }
it { is_expected.to report_lint }
end
- end
- context 'when file path is wrong' do
- let(:haml) { "= link_to 'Description', help_page_path('wrong.md'), target: '_blank'" }
+ context 'when link with wrong file path is assigned to a variable' do
+ let(:haml) { "- my_link = link_to 'Description', #{link_pattern}('wrong.md')" }
- it { is_expected.to report_lint }
- end
+ it { is_expected.to report_lint }
+ end
- context 'when link with wrong file path is assigned to a variable' do
- let(:haml) { "- my_link = link_to 'Description', help_page_path('wrong.md')" }
+ context 'when it is a broken code' do
+ let(:haml) { "= I am broken! ]]]]" }
- it { is_expected.to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when it is a broken code' do
- let(:haml) { "= I am broken! ]]]]" }
+ context 'when anchor belongs to a different element' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md'), target: (anchor: 'blank')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when anchor belongs to a different element' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md'), target: (anchor: 'blank')" }
+ context "when a simple #{link_pattern}" do
+ let(:haml) { "- url = #{link_pattern}('wrong.md')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.to report_lint }
+ end
- context 'when a simple help_page_path' do
- let(:haml) { "- url = help_page_path('wrong.md')" }
+ context 'when link is not a string' do
+ let(:haml) { "- url = #{link_pattern}(help_url)" }
- it { is_expected.to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when link is not a string' do
- let(:haml) { "- url = help_page_path(help_url)" }
+ context 'when link is a part of the tag' do
+ let(:haml) { ".data-form{ data: { url: #{link_pattern}('wrong.md') } }" }
- it { is_expected.not_to report_lint }
+ it { is_expected.to report_lint }
+ end
end
- context 'when link is a part of the tag' do
- let(:haml) { ".data-form{ data: { url: help_page_path('wrong.md') } }" }
+ context 'help_page_path' do
+ it_behaves_like 'link validation rules', 'help_page_path'
+ end
- it { is_expected.to report_lint }
+ context 'help_page_url' do
+ it_behaves_like 'link validation rules', 'help_page_url'
end
end
diff --git a/spec/helpers/analytics/unique_visits_helper_spec.rb b/spec/helpers/analytics/unique_visits_helper_spec.rb
index ff9769078c4..ff363e81ac7 100644
--- a/spec/helpers/analytics/unique_visits_helper_spec.rb
+++ b/spec/helpers/analytics/unique_visits_helper_spec.rb
@@ -22,15 +22,6 @@ RSpec.describe Analytics::UniqueVisitsHelper do
helper.track_visit(target_id)
end
- it 'does not track visits if usage ping is disabled' do
- sign_in(current_user)
- expect(Gitlab::CurrentSettings).to receive(:usage_ping_enabled?).and_return(false)
-
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
-
- helper.track_visit(target_id)
- end
-
it 'does not track visit if user is not logged in' do
expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index ce4e73bdc55..a557e9e04da 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -222,6 +222,32 @@ RSpec.describe ApplicationHelper do
end
end
+ describe '#instance_review_permitted?' do
+ let_it_be(:non_admin_user) { create :user }
+ let_it_be(:admin_user) { create :user, :admin }
+
+ before do
+ allow(::Gitlab::CurrentSettings).to receive(:instance_review_permitted?).and_return(app_setting)
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
+
+ subject { helper.instance_review_permitted? }
+
+ where(app_setting: [true, false], is_admin: [true, false, nil])
+
+ with_them do
+ let(:current_user) do
+ if is_admin.nil?
+ nil
+ else
+ is_admin ? admin_user : non_admin_user
+ end
+ end
+
+ it { is_expected.to be(app_setting && is_admin) }
+ end
+ end
+
describe '#locale_path' do
it 'returns the locale path with an `_`' do
Gitlab::I18n.with_locale('pt-BR') do
diff --git a/spec/helpers/application_settings_helper_spec.rb b/spec/helpers/application_settings_helper_spec.rb
index c5fd88ada8f..7f25721801f 100644
--- a/spec/helpers/application_settings_helper_spec.rb
+++ b/spec/helpers/application_settings_helper_spec.rb
@@ -146,4 +146,24 @@ RSpec.describe ApplicationSettingsHelper do
])
end
end
+
+ describe '.show_documentation_base_url_field?' do
+ subject { helper.show_documentation_base_url_field? }
+
+ before do
+ stub_feature_flags(help_page_documentation_redirect: feature_flag)
+ end
+
+ context 'when feature flag is enabled' do
+ let(:feature_flag) { true }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag) { false }
+
+ it { is_expected.to eq(false) }
+ end
+ end
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index 06f86e7716a..baa97781efa 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -5,16 +5,6 @@ require 'spec_helper'
RSpec.describe BlobHelper do
include TreeHelper
- describe '#highlight' do
- it 'wraps highlighted content' do
- expect(helper.highlight('test.rb', '52')).to eq(%q[<pre class="code highlight"><code><span id="LC1" class="line" lang="ruby"><span class="mi">52</span></span></code></pre>])
- end
-
- it 'handles plain version' do
- expect(helper.highlight('test.rb', '52', plain: true)).to eq(%q[<pre class="code highlight"><code><span id="LC1" class="line" lang="">52</span></code></pre>])
- end
- end
-
describe "#sanitize_svg_data" do
let(:input_svg_path) { File.join(Rails.root, 'spec', 'fixtures', 'unsanitized.svg') }
let(:data) { File.read(input_svg_path) }
@@ -455,13 +445,14 @@ RSpec.describe BlobHelper do
end
describe '#ide_fork_and_edit_path' do
- let(:project) { create(:project) }
- let(:current_user) { create(:user) }
- let(:can_push_code) { true }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:current_user) { user }
before do
allow(helper).to receive(:current_user).and_return(current_user)
- allow(helper).to receive(:can?).and_return(can_push_code)
+ allow(helper).to receive(:can?).and_return(true)
end
it 'returns path to fork the repo with a redirect param to the full IDE path' do
@@ -482,6 +473,35 @@ RSpec.describe BlobHelper do
end
end
+ describe '#fork_and_edit_path' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:current_user) { user }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:can?).and_return(true)
+ end
+
+ it 'returns path to fork the repo with a redirect param to the full edit path' do
+ uri = URI(helper.fork_and_edit_path(project, "master", ""))
+ params = CGI.unescape(uri.query)
+
+ expect(uri.path).to eq("/#{project.namespace.path}/#{project.path}/-/forks")
+ expect(params).to include("continue[to]=/#{project.namespace.path}/#{project.path}/-/edit/master/")
+ expect(params).to include("namespace_key=#{current_user.namespace.id}")
+ end
+
+ context 'when user is not logged in' do
+ let(:current_user) { nil }
+
+ it 'returns nil' do
+ expect(helper.ide_fork_and_edit_path(project, "master", "")).to be_nil
+ end
+ end
+ end
+
describe '#editing_ci_config?' do
let(:project) { build(:project) }
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index a805b96a8cc..b85ebec5545 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -34,23 +34,58 @@ RSpec.describe BoardsHelper do
end
describe '#board_data' do
- let(:user) { create(:user) }
- let(:board) { create(:board, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:board) { create(:board, project: project) }
- before do
- assign(:board, board)
- assign(:project, project)
+ context 'project_board' do
+ before do
+ assign(:project, project)
+ assign(:board, board)
- allow(helper).to receive(:current_user) { user }
- allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, board).and_return(true)
- end
+ allow(helper).to receive(:current_user) { user }
+ allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, board).and_return(true)
+ allow(helper).to receive(:can?).with(user, :admin_issue, board).and_return(true)
+ end
+
+ it 'returns a board_lists_path as lists_endpoint' do
+ expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(board))
+ end
- it 'returns a board_lists_path as lists_endpoint' do
- expect(helper.board_data[:lists_endpoint]).to eq(board_lists_path(board))
+ it 'returns board type as parent' do
+ expect(helper.board_data[:parent]).to eq('project')
+ end
+
+ it 'returns can_update for user permissions on the board' do
+ expect(helper.board_data[:can_update]).to eq('true')
+ end
+
+ it 'returns required label endpoints' do
+ expect(helper.board_data[:labels_fetch_path]).to eq("/#{project.full_path}/-/labels.json?include_ancestor_groups=true")
+ expect(helper.board_data[:labels_manage_path]).to eq("/#{project.full_path}/-/labels")
+ end
end
- it 'returns board type as parent' do
- expect(helper.board_data[:parent]).to eq('project')
+ context 'group board' do
+ let_it_be(:group) { create(:group, path: 'base') }
+ let_it_be(:board) { create(:board, group: group) }
+
+ before do
+ assign(:group, group)
+ assign(:board, board)
+
+ allow(helper).to receive(:current_user) { user }
+ allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, board).and_return(true)
+ allow(helper).to receive(:can?).with(user, :admin_issue, board).and_return(true)
+ end
+
+ it 'returns correct path for base group' do
+ expect(helper.build_issue_link_base).to eq('/base/:project_path/issues')
+ end
+
+ it 'returns required label endpoints' do
+ expect(helper.board_data[:labels_fetch_path]).to eq("/groups/base/-/labels.json?include_ancestor_groups=true&only_group_labels=true")
+ expect(helper.board_data[:labels_manage_path]).to eq("/groups/base/-/labels")
+ end
end
end
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index a006933a2a5..38caae91ef2 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -53,4 +53,25 @@ RSpec.describe Ci::RunnersHelper do
end
end
end
+
+ describe '#group_shared_runners_settings_data' do
+ let(:group) { create(:group, parent: parent, shared_runners_enabled: false) }
+ let(:parent) { create(:group) }
+
+ it 'returns group data for top level group' do
+ data = group_shared_runners_settings_data(parent)
+
+ expect(data[:update_path]).to eq("/api/v4/groups/#{parent.id}")
+ expect(data[:shared_runners_availability]).to eq('enabled')
+ expect(data[:parent_shared_runners_availability]).to eq(nil)
+ end
+
+ it 'returns group data for child group' do
+ data = group_shared_runners_settings_data(group)
+
+ expect(data[:update_path]).to eq("/api/v4/groups/#{group.id}")
+ expect(data[:shared_runners_availability]).to eq('disabled_and_unoverridable')
+ expect(data[:parent_shared_runners_availability]).to eq('enabled')
+ end
+ end
end
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 6164f3b5e8d..6b08b6515cf 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -59,6 +59,24 @@ RSpec.describe ClustersHelper do
end
end
+ describe '#js_cluster_agents_list_data' do
+ let_it_be(:project) { build(:project, :repository) }
+
+ subject { helper.js_cluster_agents_list_data(project) }
+
+ it 'displays project default branch' do
+ expect(subject[:default_branch_name]).to eq(project.default_branch)
+ end
+
+ it 'displays image path' do
+ expect(subject[:empty_state_image]).to match(%r(/illustrations/logos/clusters_empty|svg))
+ end
+
+ it 'displays project path' do
+ expect(subject[:project_path]).to eq(project.full_path)
+ end
+ end
+
describe '#js_clusters_list_data' do
subject { helper.js_clusters_list_data('/path') }
@@ -89,32 +107,6 @@ RSpec.describe ClustersHelper do
end
end
- describe '#provider_icon' do
- it 'will return GCP logo with gcp argument' do
- logo = helper.provider_icon('gcp')
-
- expect(logo).to match(%r(img alt="Google GKE" data-src="|/illustrations/logos/google_gke|svg))
- end
-
- it 'will return AWS logo with aws argument' do
- logo = helper.provider_icon('aws')
-
- expect(logo).to match(%r(img alt="Amazon EKS" data-src="|/illustrations/logos/amazon_eks|svg))
- end
-
- it 'will return default logo with unknown provider' do
- logo = helper.provider_icon('unknown')
-
- expect(logo).to match(%r(img alt="Kubernetes Cluster" data-src="|/illustrations/logos/kubernetes|svg))
- end
-
- it 'will return default logo when provider is empty' do
- logo = helper.provider_icon
-
- expect(logo).to match(%r(img alt="Kubernetes Cluster" data-src="|/illustrations/logos/kubernetes|svg))
- end
- end
-
describe '#cluster_type_label' do
subject { helper.cluster_type_label(cluster_type) }
diff --git a/spec/helpers/container_expiration_policies_helper_spec.rb b/spec/helpers/container_expiration_policies_helper_spec.rb
index b2a03f8d90f..7ad3804e3a9 100644
--- a/spec/helpers/container_expiration_policies_helper_spec.rb
+++ b/spec/helpers/container_expiration_policies_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ContainerExpirationPoliciesHelper do
+ using RSpec::Parameterized::TableSyntax
+
describe '#keep_n_options' do
it 'returns keep_n options formatted for dropdown usage' do
expected_result = [
@@ -44,4 +46,27 @@ RSpec.describe ContainerExpirationPoliciesHelper do
expect(helper.older_than_options).to eq(expected_result)
end
end
+
+ describe '#container_expiration_policies_historic_entry_enabled?' do
+ let_it_be(:project) { build_stubbed(:project) }
+
+ subject { helper.container_expiration_policies_historic_entry_enabled?(project) }
+
+ where(:application_setting, :feature_flag, :expected_result) do
+ true | true | true
+ true | false | true
+ false | true | true
+ false | false | false
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(container_expiration_policies_historic_entry: false)
+ stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
+ stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ end
+
+ it { is_expected.to eq(expected_result) }
+ end
+ end
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 96ac4015c77..ef8b342a3f6 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -361,4 +361,116 @@ RSpec.describe EmailsHelper do
end
end
end
+
+ describe '#change_reviewer_notification_text' do
+ let(:mary) { build(:user, name: 'Mary') }
+ let(:john) { build(:user, name: 'John') }
+ let(:ted) { build(:user, name: 'Ted') }
+
+ context 'to new reviewers only' do
+ let(:previous_reviewers) { [] }
+ let(:new_reviewers) { [john] }
+
+ context 'with no html tag' do
+ let(:expected_output) do
+ 'Reviewer changed to John'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+
+ context 'with <strong> tag' do
+ let(:expected_output) do
+ 'Reviewer changed to <strong>John</strong>'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+ end
+
+ context 'from previous reviewers to new reviewers' do
+ let(:previous_reviewers) { [john, mary] }
+ let(:new_reviewers) { [ted] }
+
+ context 'with no html tag' do
+ let(:expected_output) do
+ 'Reviewer changed from John and Mary to Ted'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+
+ context 'with <strong> tag' do
+ let(:expected_output) do
+ 'Reviewer changed from <strong>John and Mary</strong> to <strong>Ted</strong>'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+ end
+
+ context 'from previous reviewers to no reviewers' do
+ let(:previous_reviewers) { [john, mary] }
+ let(:new_reviewers) { [] }
+
+ context 'with no html tag' do
+ let(:expected_output) do
+ 'Reviewer changed from John and Mary to Unassigned'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+
+ context 'with <strong> tag' do
+ let(:expected_output) do
+ 'Reviewer changed from <strong>John and Mary</strong> to <strong>Unassigned</strong>'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+ end
+
+ context "with a <script> tag in user's name" do
+ let(:previous_reviewers) { [] }
+ let(:new_reviewers) { [fishy_user] }
+ let(:fishy_user) { build(:user, name: "<script>alert('hi')</script>") }
+
+ let(:expected_output) do
+ 'Reviewer changed to <strong>&lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;</strong>'
+ end
+
+ it 'escapes the html tag' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+
+ context "with url in user's name" do
+ subject(:email_helper) { Object.new.extend(described_class) }
+
+ let(:previous_reviewers) { [] }
+ let(:new_reviewers) { [fishy_user] }
+ let(:fishy_user) { build(:user, name: "example.com") }
+
+ let(:expected_output) do
+ 'Reviewer changed to example_com'
+ end
+
+ it "sanitizes user's name" do
+ expect(email_helper).to receive(:sanitize_name).and_call_original
+ expect(email_helper.change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+ end
end
diff --git a/spec/helpers/external_link_helper_spec.rb b/spec/helpers/external_link_helper_spec.rb
index b1a1884d887..f5bb0568824 100644
--- a/spec/helpers/external_link_helper_spec.rb
+++ b/spec/helpers/external_link_helper_spec.rb
@@ -6,12 +6,15 @@ RSpec.describe ExternalLinkHelper do
include IconsHelper
it 'returns external link with icon' do
- expect(external_link('https://gitlab.com', 'https://gitlab.com').to_s)
- .to eq('<a target="_blank" rel="noopener noreferrer" href="https://gitlab.com">https://gitlab.com <i aria-hidden="true" data-hidden="true" class="fa fa-external-link"></i></a>')
+ link = external_link('https://gitlab.com', 'https://gitlab.com').to_s
+ expect(link).to start_with('<a target="_blank" rel="noopener noreferrer" href="https://gitlab.com">https://gitlab.com')
+ expect(link).to include('data-testid="external-link-icon"')
end
it 'allows options when creating external link with icon' do
- expect(external_link('https://gitlab.com', 'https://gitlab.com', { "data-foo": "bar", class: "externalLink" }).to_s)
- .to eq('<a target="_blank" rel="noopener noreferrer" data-foo="bar" class="externalLink" href="https://gitlab.com">https://gitlab.com <i aria-hidden="true" data-hidden="true" class="fa fa-external-link"></i></a>')
+ link = external_link('https://gitlab.com', 'https://gitlab.com', { "data-foo": "bar", class: "externalLink" }).to_s
+
+ expect(link).to start_with('<a target="_blank" rel="noopener noreferrer" data-foo="bar" class="externalLink" href="https://gitlab.com">https://gitlab.com')
+ expect(link).to include('data-testid="external-link-icon"')
end
end
diff --git a/spec/helpers/feature_flags_helper_spec.rb b/spec/helpers/feature_flags_helper_spec.rb
new file mode 100644
index 00000000000..9a080736595
--- /dev/null
+++ b/spec/helpers/feature_flags_helper_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagsHelper do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#unleash_api_url' do
+ subject { helper.unleash_api_url(project) }
+
+ it { is_expected.to end_with("/api/v4/feature_flags/unleash/#{project.id}") }
+ end
+
+ describe '#unleash_api_instance_id' do
+ subject { helper.unleash_api_instance_id(project) }
+
+ it { is_expected.not_to be_empty }
+ end
+end
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index 1ad7c7bb9ff..0088f739879 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -187,7 +187,7 @@ RSpec.describe GitlabRoutingHelper do
let(:ref) { 'test-ref' }
let(:args) { {} }
- subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, args) }
+ subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, **args) }
it_behaves_like 'snippet blob raw path'
@@ -222,7 +222,7 @@ RSpec.describe GitlabRoutingHelper do
let(:ref) { 'snippet-test-ref' }
let(:args) { {} }
- subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, args) }
+ subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, **args) }
it_behaves_like 'snippet blob raw url'
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index a25bf1c4157..bb92445cb19 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -5,9 +5,15 @@ require "spec_helper"
RSpec.describe Groups::GroupMembersHelper do
include MembersPresentation
- describe '.group_member_select_options' do
- let(:group) { create(:group) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ before do
+ allow(helper).to receive(:can?).with(current_user, :owner_access, group).and_return(true)
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
+ describe '.group_member_select_options' do
before do
helper.instance_variable_set(:@group, group)
end
@@ -28,14 +34,6 @@ RSpec.describe Groups::GroupMembersHelper do
end
describe '#members_data_json' do
- let(:current_user) { create(:user) }
- let(:group) { create(:group) }
-
- before do
- allow(helper).to receive(:can?).with(current_user, :owner_access, group).and_return(true)
- allow(helper).to receive(:current_user).and_return(current_user)
- end
-
shared_examples 'group_members.json' do
it 'matches json schema' do
json = helper.members_data_json(group, present_members([group_member]))
@@ -48,6 +46,14 @@ RSpec.describe Groups::GroupMembersHelper do
let(:group_member) { create(:group_member, group: group, created_by: current_user) }
it_behaves_like 'group_members.json'
+
+ context 'with user status set' do
+ let(:user) { create(:user) }
+ let!(:status) { create(:user_status, user: user) }
+ let(:group_member) { create(:group_member, group: group, user: user, created_by: current_user) }
+
+ it_behaves_like 'group_members.json'
+ end
end
context 'for an invited group member' do
@@ -62,4 +68,36 @@ RSpec.describe Groups::GroupMembersHelper do
it_behaves_like 'group_members.json'
end
end
+
+ describe '#group_members_list_data_attributes' do
+ let(:group_member) { create(:group_member, group: group, created_by: current_user) }
+
+ before do
+ allow(helper).to receive(:group_group_member_path).with(group, ':id').and_return('/groups/foo-bar/-/group_members/:id')
+ end
+
+ it 'returns expected hash' do
+ expect(helper.group_members_list_data_attributes(group, present_members([group_member]))).to include({
+ members: helper.members_data_json(group, present_members([group_member])),
+ member_path: '/groups/foo-bar/-/group_members/:id',
+ group_id: group.id
+ })
+ end
+ end
+
+ describe '#linked_groups_list_data_attributes' do
+ include_context 'group_group_link'
+
+ before do
+ allow(helper).to receive(:group_group_link_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_links/:id')
+ end
+
+ it 'returns expected hash' do
+ expect(helper.linked_groups_list_data_attributes(shared_group)).to include({
+ members: helper.linked_groups_data_json(shared_group.shared_with_group_links),
+ member_path: '/groups/foo-bar/-/group_links/:id',
+ group_id: shared_group.id
+ })
+ end
+ end
end
diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb
index 872aa821560..94012de3877 100644
--- a/spec/helpers/icons_helper_spec.rb
+++ b/spec/helpers/icons_helper_spec.rb
@@ -114,128 +114,128 @@ RSpec.describe IconsHelper do
end
describe 'file_type_icon_class' do
- it 'returns folder class' do
- expect(file_type_icon_class('folder', 0, 'folder_name')).to eq 'folder'
+ it 'returns folder-o class' do
+ expect(file_type_icon_class('folder', 0, 'folder_name')).to eq 'folder-o'
end
it 'returns share class' do
expect(file_type_icon_class('file', '120000', 'link')).to eq 'share'
end
- it 'returns file-pdf-o class with .pdf' do
- expect(file_type_icon_class('file', 0, 'filename.pdf')).to eq 'file-pdf-o'
+ it 'returns document class with .pdf' do
+ expect(file_type_icon_class('file', 0, 'filename.pdf')).to eq 'document'
end
- it 'returns file-image-o class with .jpg' do
- expect(file_type_icon_class('file', 0, 'filename.jpg')).to eq 'file-image-o'
+ it 'returns doc-image class with .jpg' do
+ expect(file_type_icon_class('file', 0, 'filename.jpg')).to eq 'doc-image'
end
- it 'returns file-image-o class with .JPG' do
- expect(file_type_icon_class('file', 0, 'filename.JPG')).to eq 'file-image-o'
+ it 'returns doc-image class with .JPG' do
+ expect(file_type_icon_class('file', 0, 'filename.JPG')).to eq 'doc-image'
end
- it 'returns file-image-o class with .png' do
- expect(file_type_icon_class('file', 0, 'filename.png')).to eq 'file-image-o'
+ it 'returns doc-image class with .png' do
+ expect(file_type_icon_class('file', 0, 'filename.png')).to eq 'doc-image'
end
- it 'returns file-image-o class with .apng' do
- expect(file_type_icon_class('file', 0, 'filename.apng')).to eq 'file-image-o'
+ it 'returns doc-image class with .apng' do
+ expect(file_type_icon_class('file', 0, 'filename.apng')).to eq 'doc-image'
end
- it 'returns file-image-o class with .webp' do
- expect(file_type_icon_class('file', 0, 'filename.webp')).to eq 'file-image-o'
+ it 'returns doc-image class with .webp' do
+ expect(file_type_icon_class('file', 0, 'filename.webp')).to eq 'doc-image'
end
- it 'returns file-archive-o class with .tar' do
- expect(file_type_icon_class('file', 0, 'filename.tar')).to eq 'file-archive-o'
+ it 'returns doc-compressed class with .tar' do
+ expect(file_type_icon_class('file', 0, 'filename.tar')).to eq 'doc-compressed'
end
- it 'returns file-archive-o class with .TAR' do
- expect(file_type_icon_class('file', 0, 'filename.TAR')).to eq 'file-archive-o'
+ it 'returns doc-compressed class with .TAR' do
+ expect(file_type_icon_class('file', 0, 'filename.TAR')).to eq 'doc-compressed'
end
- it 'returns file-archive-o class with .tar.gz' do
- expect(file_type_icon_class('file', 0, 'filename.tar.gz')).to eq 'file-archive-o'
+ it 'returns doc-compressed class with .tar.gz' do
+ expect(file_type_icon_class('file', 0, 'filename.tar.gz')).to eq 'doc-compressed'
end
- it 'returns file-audio-o class with .mp3' do
- expect(file_type_icon_class('file', 0, 'filename.mp3')).to eq 'file-audio-o'
+ it 'returns volume-up class with .mp3' do
+ expect(file_type_icon_class('file', 0, 'filename.mp3')).to eq 'volume-up'
end
- it 'returns file-audio-o class with .MP3' do
- expect(file_type_icon_class('file', 0, 'filename.MP3')).to eq 'file-audio-o'
+ it 'returns volume-up class with .MP3' do
+ expect(file_type_icon_class('file', 0, 'filename.MP3')).to eq 'volume-up'
end
- it 'returns file-audio-o class with .m4a' do
- expect(file_type_icon_class('file', 0, 'filename.m4a')).to eq 'file-audio-o'
+ it 'returns volume-up class with .m4a' do
+ expect(file_type_icon_class('file', 0, 'filename.m4a')).to eq 'volume-up'
end
- it 'returns file-audio-o class with .wav' do
- expect(file_type_icon_class('file', 0, 'filename.wav')).to eq 'file-audio-o'
+ it 'returns volume-up class with .wav' do
+ expect(file_type_icon_class('file', 0, 'filename.wav')).to eq 'volume-up'
end
- it 'returns file-video-o class with .avi' do
- expect(file_type_icon_class('file', 0, 'filename.avi')).to eq 'file-video-o'
+ it 'returns live-preview class with .avi' do
+ expect(file_type_icon_class('file', 0, 'filename.avi')).to eq 'live-preview'
end
- it 'returns file-video-o class with .AVI' do
- expect(file_type_icon_class('file', 0, 'filename.AVI')).to eq 'file-video-o'
+ it 'returns live-preview class with .AVI' do
+ expect(file_type_icon_class('file', 0, 'filename.AVI')).to eq 'live-preview'
end
- it 'returns file-video-o class with .mp4' do
- expect(file_type_icon_class('file', 0, 'filename.mp4')).to eq 'file-video-o'
+ it 'returns live-preview class with .mp4' do
+ expect(file_type_icon_class('file', 0, 'filename.mp4')).to eq 'live-preview'
end
- it 'returns file-word-o class with .odt' do
- expect(file_type_icon_class('file', 0, 'filename.odt')).to eq 'file-word-o'
+ it 'returns doc-text class with .odt' do
+ expect(file_type_icon_class('file', 0, 'filename.odt')).to eq 'doc-text'
end
- it 'returns file-word-o class with .doc' do
- expect(file_type_icon_class('file', 0, 'filename.doc')).to eq 'file-word-o'
+ it 'returns doc-text class with .doc' do
+ expect(file_type_icon_class('file', 0, 'filename.doc')).to eq 'doc-text'
end
- it 'returns file-word-o class with .DOC' do
- expect(file_type_icon_class('file', 0, 'filename.DOC')).to eq 'file-word-o'
+ it 'returns doc-text class with .DOC' do
+ expect(file_type_icon_class('file', 0, 'filename.DOC')).to eq 'doc-text'
end
- it 'returns file-word-o class with .docx' do
- expect(file_type_icon_class('file', 0, 'filename.docx')).to eq 'file-word-o'
+ it 'returns doc-text class with .docx' do
+ expect(file_type_icon_class('file', 0, 'filename.docx')).to eq 'doc-text'
end
- it 'returns file-excel-o class with .xls' do
- expect(file_type_icon_class('file', 0, 'filename.xls')).to eq 'file-excel-o'
+ it 'returns document class with .xls' do
+ expect(file_type_icon_class('file', 0, 'filename.xls')).to eq 'document'
end
- it 'returns file-excel-o class with .XLS' do
- expect(file_type_icon_class('file', 0, 'filename.XLS')).to eq 'file-excel-o'
+ it 'returns document class with .XLS' do
+ expect(file_type_icon_class('file', 0, 'filename.XLS')).to eq 'document'
end
- it 'returns file-excel-o class with .xlsx' do
- expect(file_type_icon_class('file', 0, 'filename.xlsx')).to eq 'file-excel-o'
+ it 'returns document class with .xlsx' do
+ expect(file_type_icon_class('file', 0, 'filename.xlsx')).to eq 'document'
end
- it 'returns file-excel-o class with .odp' do
- expect(file_type_icon_class('file', 0, 'filename.odp')).to eq 'file-powerpoint-o'
+ it 'returns doc-chart class with .odp' do
+ expect(file_type_icon_class('file', 0, 'filename.odp')).to eq 'doc-chart'
end
- it 'returns file-excel-o class with .ppt' do
- expect(file_type_icon_class('file', 0, 'filename.ppt')).to eq 'file-powerpoint-o'
+ it 'returns doc-chart class with .ppt' do
+ expect(file_type_icon_class('file', 0, 'filename.ppt')).to eq 'doc-chart'
end
- it 'returns file-excel-o class with .PPT' do
- expect(file_type_icon_class('file', 0, 'filename.PPT')).to eq 'file-powerpoint-o'
+ it 'returns doc-chart class with .PPT' do
+ expect(file_type_icon_class('file', 0, 'filename.PPT')).to eq 'doc-chart'
end
- it 'returns file-excel-o class with .pptx' do
- expect(file_type_icon_class('file', 0, 'filename.pptx')).to eq 'file-powerpoint-o'
+ it 'returns doc-chart class with .pptx' do
+ expect(file_type_icon_class('file', 0, 'filename.pptx')).to eq 'doc-chart'
end
- it 'returns file-text-o class with .unknow' do
- expect(file_type_icon_class('file', 0, 'filename.unknow')).to eq 'file-text-o'
+ it 'returns doc-text class with .unknow' do
+ expect(file_type_icon_class('file', 0, 'filename.unknow')).to eq 'doc-text'
end
- it 'returns file-text-o class with no extension' do
- expect(file_type_icon_class('file', 0, 'CHANGELOG')).to eq 'file-text-o'
+ it 'returns doc-text class with no extension' do
+ expect(file_type_icon_class('file', 0, 'CHANGELOG')).to eq 'doc-text'
end
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
new file mode 100644
index 00000000000..b4e05d67553
--- /dev/null
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe InviteMembersHelper do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user, developer_projects: [project]) }
+ let(:owner) { project.owner }
+
+ before do
+ assign(:project, project)
+ end
+
+ describe "#directly_invite_members?" do
+ context 'when the user is an owner' do
+ before do
+ allow(helper).to receive(:current_user) { owner }
+ end
+
+ it 'returns false' do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_a) { false }
+
+ expect(helper.directly_invite_members?).to eq false
+ end
+
+ it 'returns true' do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_a) { true }
+
+ expect(helper.directly_invite_members?).to eq true
+ end
+ end
+
+ context 'when the user is a developer' do
+ before do
+ allow(helper).to receive(:current_user) { developer }
+ end
+
+ it 'returns false' do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_a) { true }
+
+ expect(helper.directly_invite_members?).to eq false
+ end
+ end
+ end
+
+ describe "#indirectly_invite_members?" do
+ context 'when a user is a developer' do
+ before do
+ allow(helper).to receive(:current_user) { developer }
+ end
+
+ it 'returns false' do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_b) { false }
+
+ expect(helper.indirectly_invite_members?).to eq false
+ end
+
+ it 'returns true' do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_b) { true }
+
+ expect(helper.indirectly_invite_members?).to eq true
+ end
+ end
+
+ context 'when a user is an owner' do
+ before do
+ allow(helper).to receive(:current_user) { owner }
+ end
+
+ it 'returns false' do
+ allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_b) { true }
+
+ expect(helper.indirectly_invite_members?).to eq false
+ end
+ end
+ end
+end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 89a2a92ea57..e8e5adaa274 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -44,23 +44,6 @@ RSpec.describe IssuablesHelper do
end
end
- describe '#issuable_labels_tooltip' do
- let(:label_entity) { LabelEntity.represent(label).as_json }
- let(:label2_entity) { LabelEntity.represent(label2).as_json }
-
- it 'returns label text with no labels' do
- expect(issuable_labels_tooltip([])).to eq(_('Labels'))
- end
-
- it 'returns label text with labels within max limit' do
- expect(issuable_labels_tooltip([label_entity])).to eq(label[:title])
- end
-
- it 'returns label text with labels exceeding max limit' do
- expect(issuable_labels_tooltip([label_entity, label2_entity], limit: 1)).to eq("#{label[:title]}, and 1 more")
- end
- end
-
describe '#issuables_state_counter_text' do
let(:user) { create(:user) }
@@ -306,6 +289,38 @@ RSpec.describe IssuablesHelper do
end
end
+ describe '#reviewer_sidebar_data' do
+ let(:user) { create(:user) }
+
+ subject { helper.reviewer_sidebar_data(user, merge_request: merge_request) }
+
+ context 'without merge_request' do
+ let(:merge_request) { nil }
+
+ it 'returns hash of reviewer data' do
+ is_expected.to eql({
+ avatar_url: user.avatar_url,
+ name: user.name,
+ username: user.username
+ })
+ end
+ end
+
+ context 'with merge_request' do
+ let(:merge_request) { build(:merge_request) }
+
+ where(can_merge: [true, false])
+
+ with_them do
+ before do
+ allow(merge_request).to receive(:can_be_merged_by?).and_return(can_merge)
+ end
+
+ it { is_expected.to include({ can_merge: can_merge })}
+ end
+ end
+ end
+
describe '#issuable_squash_option?' do
using RSpec::Parameterized::TableSyntax
@@ -337,4 +352,35 @@ RSpec.describe IssuablesHelper do
expect(helper.sidebar_milestone_tooltip_label(milestone)).to eq('&lt;img onerror=alert(1)&gt;<br/>Milestone')
end
end
+
+ describe '#serialize_issuable' do
+ context 'when it is a merge request' do
+ let(:merge_request) { build(:merge_request) }
+ let(:user) { build(:user) }
+
+ before do
+ allow(helper).to receive(:current_user) { user }
+ end
+
+ it 'has suggest_pipeline experiment enabled' do
+ allow(helper).to receive(:experiment_enabled?).with(:suggest_pipeline) { true }
+
+ expect_next_instance_of(MergeRequestSerializer) do |serializer|
+ expect(serializer).to receive(:represent).with(merge_request, { serializer: 'widget', experiment_enabled: :suggest_pipeline })
+ end
+
+ helper.serialize_issuable(merge_request, serializer: 'widget')
+ end
+
+ it 'suggest_pipeline experiment disabled' do
+ allow(helper).to receive(:experiment_enabled?).with(:suggest_pipeline) { false }
+
+ expect_next_instance_of(MergeRequestSerializer) do |serializer|
+ expect(serializer).to receive(:represent).with(merge_request, { serializer: 'widget' })
+ end
+
+ helper.serialize_issuable(merge_request, serializer: 'widget')
+ end
+ end
+ end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 3f84eeb12c2..1ed61bd3144 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -233,4 +233,25 @@ RSpec.describe IssuesHelper do
expect(helper.show_moved_service_desk_issue_warning?(new_issue)).to be(false)
end
end
+
+ describe '#use_startup_call' do
+ it "returns false when a query param is present" do
+ allow(controller.request).to receive(:query_parameters).and_return({ foo: 'bar' })
+
+ expect(helper.use_startup_call?).to eq(false)
+ end
+
+ it "returns false when user has stored sort preference" do
+ controller.instance_variable_set(:@sort, 'updated_asc')
+
+ expect(helper.use_startup_call?).to eq(false)
+ end
+
+ it 'returns true when request.query_parameters is empty with default sorting preference' do
+ controller.instance_variable_set(:@sort, 'created_date')
+ allow(controller.request).to receive(:query_parameters).and_return({})
+
+ expect(helper.use_startup_call?).to eq(true)
+ end
+ end
end
diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb
index 77e1d10354c..b93dc03e434 100644
--- a/spec/helpers/labels_helper_spec.rb
+++ b/spec/helpers/labels_helper_spec.rb
@@ -244,26 +244,6 @@ RSpec.describe LabelsHelper do
end
end
- describe 'label_from_hash' do
- it 'builds a group label with whitelisted attributes' do
- label = label_from_hash({ title: 'foo', color: 'bar', id: 1, group_id: 1 })
-
- expect(label).to be_a(GroupLabel)
- expect(label.id).to be_nil
- expect(label.title).to eq('foo')
- expect(label.color).to eq('bar')
- end
-
- it 'builds a project label with whitelisted attributes' do
- label = label_from_hash({ title: 'foo', color: 'bar', id: 1, project_id: 1 })
-
- expect(label).to be_a(ProjectLabel)
- expect(label.id).to be_nil
- expect(label.title).to eq('foo')
- expect(label.color).to eq('bar')
- end
- end
-
describe '#label_status_tooltip' do
let(:status) { 'unsubscribed'.inquiry }
@@ -291,4 +271,34 @@ RSpec.describe LabelsHelper do
expect(tooltip).to eq('This is an image')
end
end
+
+ describe '#show_labels_full_path?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ context 'within a project' do
+ it 'returns truthy' do
+ expect(show_labels_full_path?(project, nil)).to be_truthy
+ end
+ end
+
+ context 'within a subgroup' do
+ it 'returns truthy' do
+ expect(show_labels_full_path?(nil, subgroup)).to be_truthy
+ end
+ end
+
+ context 'within a group' do
+ it 'returns falsey' do
+ expect(show_labels_full_path?(nil, group)).to be_falsey
+ end
+ end
+
+ context 'within the admin area' do
+ it 'returns falsey' do
+ expect(show_labels_full_path?(nil, nil)).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index 41511b65cc4..f9b3b535334 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -5,19 +5,19 @@ require "spec_helper"
RSpec.describe NotesHelper do
include RepoHelpers
- let(:owner) { create(:owner) }
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:maintainer) { create(:user) }
- let(:reporter) { create(:user) }
- let(:guest) { create(:user) }
-
- let(:owner_note) { create(:note, author: owner, project: project) }
- let(:maintainer_note) { create(:note, author: maintainer, project: project) }
- let(:reporter_note) { create(:note, author: reporter, project: project) }
+ let_it_be(:owner) { create(:owner) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+
+ let_it_be(:owner_note) { create(:note, author: owner, project: project) }
+ let_it_be(:maintainer_note) { create(:note, author: maintainer, project: project) }
+ let_it_be(:reporter_note) { create(:note, author: reporter, project: project) }
let!(:notes) { [owner_note, maintainer_note, reporter_note] }
- before do
+ before_all do
group.add_owner(owner)
project.add_maintainer(maintainer)
project.add_reporter(reporter)
@@ -72,14 +72,14 @@ RSpec.describe NotesHelper do
end
describe '#discussion_path' do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:anchor) { discussion.line_code }
context 'for a merge request discusion' do
- let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) }
- let!(:merge_request_diff1) { merge_request.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
- let!(:merge_request_diff2) { merge_request.merge_request_diffs.create!(head_commit_sha: nil) }
- let!(:merge_request_diff3) { merge_request.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) }
+ let_it_be(:merge_request_diff1) { merge_request.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
+ let_it_be(:merge_request_diff2) { merge_request.merge_request_diffs.create!(head_commit_sha: nil) }
+ let_it_be(:merge_request_diff3) { merge_request.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
context 'for a diff discussion' do
context 'when the discussion is active' do
@@ -229,20 +229,18 @@ RSpec.describe NotesHelper do
end
it 'return project notes path for project snippet' do
- namespace = create(:namespace, path: 'nm')
- @project = create(:project, path: 'test', namespace: namespace)
+ @project = project
@snippet = create(:project_snippet, project: @project)
@noteable = @snippet
- expect(helper.notes_url).to eq("/nm/test/noteable/project_snippet/#{@noteable.id}/notes")
+ expect(helper.notes_url).to eq("/#{project.full_path}/noteable/project_snippet/#{@noteable.id}/notes")
end
it 'return project notes path for other noteables' do
- namespace = create(:namespace, path: 'nm')
- @project = create(:project, path: 'test', namespace: namespace)
+ @project = project
@noteable = create(:issue, project: @project)
- expect(helper.notes_url).to eq("/nm/test/noteable/issue/#{@noteable.id}/notes")
+ expect(helper.notes_url).to eq("/#{@project.full_path}/noteable/issue/#{@noteable.id}/notes")
end
end
@@ -254,19 +252,17 @@ RSpec.describe NotesHelper do
end
it 'return project notes path for project snippet' do
- namespace = create(:namespace, path: 'nm')
- @project = create(:project, path: 'test', namespace: namespace)
+ @project = project
note = create(:note_on_project_snippet, project: @project)
- expect(helper.note_url(note)).to eq("/nm/test/notes/#{note.id}")
+ expect(helper.note_url(note)).to eq("/#{project.full_path}/notes/#{note.id}")
end
it 'return project notes path for other noteables' do
- namespace = create(:namespace, path: 'nm')
- @project = create(:project, path: 'test', namespace: namespace)
+ @project = project
note = create(:note_on_issue, project: @project)
- expect(helper.note_url(note)).to eq("/nm/test/notes/#{note.id}")
+ expect(helper.note_url(note)).to eq("/#{project.full_path}/notes/#{note.id}")
end
end
@@ -279,8 +275,7 @@ RSpec.describe NotesHelper do
end
it 'returns namespace, project and note for project snippet' do
- namespace = create(:namespace, path: 'nm')
- @project = create(:project, path: 'test', namespace: namespace)
+ @project = project
@snippet = create(:project_snippet, project: @project)
@note = create(:note_on_personal_snippet)
@@ -288,8 +283,7 @@ RSpec.describe NotesHelper do
end
it 'returns namespace, project and note path for other noteables' do
- namespace = create(:namespace, path: 'nm')
- @project = create(:project, path: 'test', namespace: namespace)
+ @project = project
@note = create(:note_on_issue, project: @project)
expect(helper.form_resources).to eq([@project, @note])
@@ -297,7 +291,6 @@ RSpec.describe NotesHelper do
end
describe '#noteable_note_url' do
- let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
let(:note) { create(:note_on_issue, noteable: issue, project: project) }
diff --git a/spec/helpers/operations_helper_spec.rb b/spec/helpers/operations_helper_spec.rb
index 3dac2cf54dc..8d2fc643caa 100644
--- a/spec/helpers/operations_helper_spec.rb
+++ b/spec/helpers/operations_helper_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe OperationsHelper do
'url' => alerts_service.url,
'authorization_key' => nil,
'form_path' => project_service_path(project, alerts_service),
- 'alerts_setup_url' => help_page_path('user/project/integrations/generic_alerts.md', anchor: 'setting-up-generic-alerts'),
+ 'alerts_setup_url' => help_page_path('operations/incident_management/alert_integrations.md', anchor: 'generic-http-endpoint'),
'alerts_usage_url' => project_alert_management_index_path(project),
'prometheus_form_path' => project_service_path(project, prometheus_service),
'prometheus_reset_key_path' => reset_alerting_token_project_settings_operations_path(project),
@@ -145,7 +145,7 @@ RSpec.describe OperationsHelper do
subject { helper.operations_settings_data }
it 'returns the correct set of data' do
- is_expected.to eq(
+ is_expected.to include(
operations_settings_endpoint: project_settings_operations_path(project),
templates: '[]',
create_issue: 'false',
diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb
index 1917c851547..dacd386d01c 100644
--- a/spec/helpers/packages_helper_spec.rb
+++ b/spec/helpers/packages_helper_spec.rb
@@ -52,37 +52,14 @@ RSpec.describe PackagesHelper do
end
end
- describe 'packages_coming_soon_enabled?' do
- it 'returns false when the feature flag is disabled' do
- stub_feature_flags(packages_coming_soon: false)
+ describe 'composer_config_repository_name' do
+ let(:host) { Gitlab.config.gitlab.host }
+ let(:group_id) { 1 }
- expect(helper.packages_coming_soon_enabled?(project)).to eq(false)
- end
-
- it 'returns false when not on dev or gitlab.com' do
- expect(helper.packages_coming_soon_enabled?(project)).to eq(false)
- end
- end
-
- describe 'packages_coming_soon_data' do
- let_it_be(:group) { create(:group) }
-
- before do
- allow(Gitlab).to receive(:dev_env_or_com?) { true }
- end
-
- it 'returns the gitlab project on gitlab.com' do
- allow(Gitlab).to receive(:com?) { true }
-
- expect(helper.packages_coming_soon_data(project)).to include({ project_path: 'gitlab-org/gitlab' })
- end
-
- it 'returns the test project when not on gitlab.com' do
- expect(helper.packages_coming_soon_data(project)).to include({ project_path: 'gitlab-org/gitlab-test' })
- end
+ it 'return global unique composer registry id' do
+ id = helper.composer_config_repository_name(group_id)
- it 'works correctly with a group' do
- expect(helper.packages_coming_soon_data(group)).to include({ project_path: 'gitlab-org/gitlab-test' })
+ expect(id).to eq("#{host}/#{group_id}")
end
end
end
diff --git a/spec/helpers/projects/alert_management_helper_spec.rb b/spec/helpers/projects/alert_management_helper_spec.rb
index 183f0438c35..83b89abde58 100644
--- a/spec/helpers/projects/alert_management_helper_spec.rb
+++ b/spec/helpers/projects/alert_management_helper_spec.rb
@@ -32,7 +32,9 @@ RSpec.describe Projects::AlertManagementHelper do
'populating-alerts-help-url' => 'http://test.host/help/operations/incident_management/index.md#enable-alert-management',
'empty-alert-svg-path' => match_asset_path('/assets/illustrations/alert-management-empty-state.svg'),
'user-can-enable-alert-management' => 'true',
- 'alert-management-enabled' => 'false'
+ 'alert-management-enabled' => 'false',
+ 'text-query': nil,
+ 'assignee-username-query': nil
)
end
end
diff --git a/spec/helpers/projects/incidents_helper_spec.rb b/spec/helpers/projects/incidents_helper_spec.rb
index 0affa67a902..7a8a6d5222f 100644
--- a/spec/helpers/projects/incidents_helper_spec.rb
+++ b/spec/helpers/projects/incidents_helper_spec.rb
@@ -9,18 +9,28 @@ RSpec.describe Projects::IncidentsHelper do
let(:project_path) { project.full_path }
let(:new_issue_path) { new_project_issue_path(project) }
let(:issue_path) { project_issues_path(project) }
+ let(:params) do
+ {
+ search: 'search text',
+ author_username: 'root',
+ assignee_username: 'max.power'
+ }
+ end
describe '#incidents_data' do
- subject(:data) { helper.incidents_data(project) }
+ subject(:data) { helper.incidents_data(project, params) }
it 'returns frontend configuration' do
- expect(data).to match(
+ expect(data).to include(
'project-path' => project_path,
'new-issue-path' => new_issue_path,
'incident-template-name' => 'incident',
'incident-type' => 'incident',
'issue-path' => issue_path,
- 'empty-list-svg-path' => match_asset_path('/assets/illustrations/incident-empty-state.svg')
+ 'empty-list-svg-path' => match_asset_path('/assets/illustrations/incident-empty-state.svg'),
+ 'text-query': 'search text',
+ 'author-username-query': 'root',
+ 'assignee-username-query': 'max.power'
)
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 2b345ff3ae6..f081cf225b1 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -5,16 +5,15 @@ require 'spec_helper'
RSpec.describe ProjectsHelper do
include ProjectForksHelper
- let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be_with_refind(:project_with_repo) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- describe '#project_incident_management_setting' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+ describe '#project_incident_management_setting' do
context 'when incident_management_setting exists' do
let(:project_incident_management_setting) do
create(:project_incident_management_setting, project: project)
@@ -40,20 +39,14 @@ RSpec.describe ProjectsHelper do
end
describe '#error_tracking_setting_project_json' do
- let(:project) { create(:project) }
-
context 'error tracking setting does not exist' do
- before do
- helper.instance_variable_set(:@project, project)
- end
-
it 'returns nil' do
expect(helper.error_tracking_setting_project_json).to be_nil
end
end
context 'error tracking setting exists' do
- let!(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
+ let_it_be(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
context 'api_url present' do
let(:json) do
@@ -65,24 +58,16 @@ RSpec.describe ProjectsHelper do
}.to_json
end
- before do
- helper.instance_variable_set(:@project, project)
- end
-
it 'returns error tracking json' do
expect(helper.error_tracking_setting_project_json).to eq(json)
end
end
context 'api_url not present' do
- before do
+ it 'returns nil' do
project.error_tracking_setting.api_url = nil
project.error_tracking_setting.enabled = false
- helper.instance_variable_set(:@project, project)
- end
-
- it 'returns nil' do
expect(helper.error_tracking_setting_project_json).to be_nil
end
end
@@ -98,8 +83,7 @@ RSpec.describe ProjectsHelper do
end
describe "can_change_visibility_level?" do
- let(:project) { create(:project) }
- let(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
+ let_it_be(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
let(:forked_project) { fork_project(project, user) }
it "returns false if there are no appropriate permissions" do
@@ -142,8 +126,7 @@ RSpec.describe ProjectsHelper do
end
describe '#can_disable_emails?' do
- let(:project) { create(:project) }
- let(:user) { create(:project_member, :maintainer, user: create(:user), project: project).user }
+ let_it_be(:user) { create(:project_member, :maintainer, user: create(:user), project: project).user }
it 'returns true for the project owner' do
allow(helper).to receive(:can?).with(project.owner, :set_emails_disabled, project) { true }
@@ -166,11 +149,7 @@ RSpec.describe ProjectsHelper do
end
describe "readme_cache_key" do
- let(:project) { create(:project, :repository) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
+ let(:project) { project_with_repo }
it "returns a valid cach key" do
expect(helper.send(:readme_cache_key)).to eq("#{project.full_path}-#{project.commit.id}-readme")
@@ -184,8 +163,7 @@ RSpec.describe ProjectsHelper do
end
describe "#project_list_cache_key", :clean_gitlab_redis_shared_state do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let(:project) { project_with_repo }
before do
allow(helper).to receive(:current_user).and_return(user)
@@ -249,8 +227,6 @@ RSpec.describe ProjectsHelper do
describe '#load_pipeline_status' do
it 'loads the pipeline status in batch' do
- project = build(:project)
-
helper.load_pipeline_status([project])
# Skip lazy loading of the `pipeline_status` attribute
pipeline_status = project.instance_variable_get('@pipeline_status')
@@ -260,8 +236,6 @@ RSpec.describe ProjectsHelper do
end
describe '#show_no_ssh_key_message?' do
- let(:user) { create(:user) }
-
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -282,8 +256,6 @@ RSpec.describe ProjectsHelper do
end
describe '#show_no_password_message?' do
- let(:user) { create(:user) }
-
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -424,7 +396,6 @@ RSpec.describe ProjectsHelper do
before do
allow(helper).to receive(:current_user).and_return(user)
- helper.instance_variable_set(:@project, project)
end
context 'when there is no current_user' do
@@ -444,9 +415,6 @@ RSpec.describe ProjectsHelper do
end
describe '#get_project_nav_tabs' do
- let_it_be(:user) { create(:user) }
- let(:project) { create(:project) }
-
before do
allow(helper).to receive(:can?) { true }
end
@@ -524,7 +492,14 @@ RSpec.describe ProjectsHelper do
subject { helper.send(:can_view_operations_tab?, user, project) }
- [:read_environment, :read_cluster, :metrics_dashboard].each do |ability|
+ [
+ :metrics_dashboard,
+ :read_alert_management_alert,
+ :read_environment,
+ :read_issue,
+ :read_sentry_issue,
+ :read_cluster
+ ].each do |ability|
it 'includes operations tab' do
allow(helper).to receive(:can?).and_return(false)
allow(helper).to receive(:can?).with(user, ability, project).and_return(true)
@@ -536,7 +511,6 @@ RSpec.describe ProjectsHelper do
describe '#show_projects' do
let(:projects) do
- create(:project)
Project.all
end
@@ -561,8 +535,8 @@ RSpec.describe ProjectsHelper do
end
end
- describe('#push_to_create_project_command') do
- let(:user) { create(:user, username: 'john') }
+ describe '#push_to_create_project_command' do
+ let(:user) { build_stubbed(:user, username: 'john') }
it 'returns the command to push to create project over HTTP' do
allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enabled_git_access_protocol) { 'http' }
@@ -578,8 +552,6 @@ RSpec.describe ProjectsHelper do
end
describe '#any_projects?' do
- let!(:project) { create(:project) }
-
it 'returns true when projects will be returned' do
expect(helper.any_projects?(Project.all)).to eq(true)
end
@@ -609,7 +581,7 @@ RSpec.describe ProjectsHelper do
end
describe '#git_user_name' do
- let(:user) { double(:user, name: 'John "A" Doe53') }
+ let(:user) { build_stubbed(:user, name: 'John "A" Doe53') }
before do
allow(helper).to receive(:current_user).and_return(user)
@@ -632,8 +604,6 @@ RSpec.describe ProjectsHelper do
end
context 'user logged in' do
- let(:user) { create(:user) }
-
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -658,7 +628,6 @@ RSpec.describe ProjectsHelper do
end
describe 'show_xcode_link' do
- let!(:project) { create(:project) }
let(:mac_ua) { 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36' }
let(:ios_ua) { 'Mozilla/5.0 (iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3' }
@@ -799,7 +768,7 @@ RSpec.describe ProjectsHelper do
describe '#show_auto_devops_implicitly_enabled_banner?' do
using RSpec::Parameterized::TableSyntax
- let(:user) { create(:user) }
+ let_it_be_with_reload(:project_with_auto_devops) { create(:project, :repository, :auto_devops) }
let(:feature_visibilities) do
{
@@ -873,9 +842,9 @@ RSpec.describe ProjectsHelper do
with_them do
let(:project) do
if project_setting.nil?
- create(:project, :repository)
+ project_with_repo
else
- create(:project, :repository, :auto_devops)
+ project_with_auto_devops
end
end
@@ -896,14 +865,8 @@ RSpec.describe ProjectsHelper do
end
describe '#can_import_members?' do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
let(:owner) { project.owner }
- before do
- helper.instance_variable_set(:@project, project)
- end
-
it 'returns false if user cannot admin_project_member' do
allow(helper).to receive(:current_user) { user }
expect(helper.can_import_members?).to eq false
@@ -916,12 +879,6 @@ RSpec.describe ProjectsHelper do
end
describe '#metrics_external_dashboard_url' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
context 'metrics_setting exists' do
it 'returns external_dashboard_url' do
metrics_setting = create(:project_metrics_setting, project: project)
@@ -938,12 +895,6 @@ RSpec.describe ProjectsHelper do
end
describe '#grafana_integration_url' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
subject { helper.grafana_integration_url }
it { is_expected.to eq(nil) }
@@ -956,12 +907,6 @@ RSpec.describe ProjectsHelper do
end
describe '#grafana_integration_token' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
subject { helper.grafana_integration_masked_token }
it { is_expected.to eq(nil) }
@@ -974,12 +919,6 @@ RSpec.describe ProjectsHelper do
end
describe '#grafana_integration_enabled?' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
subject { helper.grafana_integration_enabled? }
it { is_expected.to eq(nil) }
@@ -992,7 +931,6 @@ RSpec.describe ProjectsHelper do
end
describe '#project_license_name(project)', :request_store do
- let_it_be(:project) { create(:project) }
let_it_be(:repository) { project.repository }
subject { project_license_name(project) }
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index f10a2ed8e60..704e8dc40cb 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -64,6 +64,7 @@ RSpec.describe ReleasesHelper do
describe '#data_for_edit_release_page' do
it 'has the needed data to display the "edit release" page' do
keys = %i(project_id
+ project_path
tag_name
markdown_preview_path
markdown_docs_path
@@ -80,6 +81,7 @@ RSpec.describe ReleasesHelper do
describe '#data_for_new_release_page' do
it 'has the needed data to display the "new release" page' do
keys = %i(project_id
+ project_path
releases_page_path
markdown_preview_path
markdown_docs_path
@@ -92,5 +94,15 @@ RSpec.describe ReleasesHelper do
expect(helper.data_for_new_release_page.keys).to match_array(keys)
end
end
+
+ describe '#data_for_show_page' do
+ it 'has the needed data to display the individual "release" page' do
+ keys = %i(project_id
+ project_path
+ tag_name)
+
+ expect(helper.data_for_show_page.keys).to match_array(keys)
+ end
+ end
end
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 594c5c11994..6fe071521cd 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe SearchHelper do
end
it "includes Help sections" do
- expect(search_autocomplete_opts("hel").size).to eq(9)
+ expect(search_autocomplete_opts("hel").size).to eq(8)
end
it "includes default sections" do
@@ -73,7 +73,7 @@ RSpec.describe SearchHelper do
expect(result.keys).to match_array(%i[category id label url avatar_url])
end
- it 'includes the first 5 of the users recent issues' do
+ it 'includes the users recently viewed issues' do
recent_issues = instance_double(::Gitlab::Search::RecentIssues)
expect(::Gitlab::Search::RecentIssues).to receive(:new).with(user: user).and_return(recent_issues)
project1 = create(:project, :with_avatar, namespace: user.namespace)
@@ -81,13 +81,11 @@ RSpec.describe SearchHelper do
issue1 = create(:issue, title: 'issue 1', project: project1)
issue2 = create(:issue, title: 'issue 2', project: project2)
- other_issues = create_list(:issue, 5)
-
- expect(recent_issues).to receive(:search).with('the search term').and_return(Issue.id_in_ordered([issue1.id, issue2.id, *other_issues.map(&:id)]))
+ expect(recent_issues).to receive(:search).with('the search term').and_return(Issue.id_in_ordered([issue1.id, issue2.id]))
results = search_autocomplete_opts("the search term")
- expect(results.count).to eq(5)
+ expect(results.count).to eq(2)
expect(results[0]).to include({
category: 'Recent issues',
@@ -106,7 +104,7 @@ RSpec.describe SearchHelper do
})
end
- it 'includes the first 5 of the users recent merge requests' do
+ it 'includes the users recently viewed merge requests' do
recent_merge_requests = instance_double(::Gitlab::Search::RecentMergeRequests)
expect(::Gitlab::Search::RecentMergeRequests).to receive(:new).with(user: user).and_return(recent_merge_requests)
project1 = create(:project, :with_avatar, namespace: user.namespace)
@@ -114,13 +112,11 @@ RSpec.describe SearchHelper do
merge_request1 = create(:merge_request, :unique_branches, title: 'Merge request 1', target_project: project1, source_project: project1)
merge_request2 = create(:merge_request, :unique_branches, title: 'Merge request 2', target_project: project2, source_project: project2)
- other_merge_requests = create_list(:merge_request, 5)
-
- expect(recent_merge_requests).to receive(:search).with('the search term').and_return(MergeRequest.id_in_ordered([merge_request1.id, merge_request2.id, *other_merge_requests.map(&:id)]))
+ expect(recent_merge_requests).to receive(:search).with('the search term').and_return(MergeRequest.id_in_ordered([merge_request1.id, merge_request2.id]))
results = search_autocomplete_opts("the search term")
- expect(results.count).to eq(5)
+ expect(results.count).to eq(2)
expect(results[0]).to include({
category: 'Recent merge requests',
@@ -357,14 +353,6 @@ RSpec.describe SearchHelper do
describe '#show_user_search_tab?' do
subject { show_user_search_tab? }
- context 'when users_search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
- end
-
- it { is_expected.to eq(false) }
- end
-
context 'when project search' do
before do
@project = :some_project
@@ -399,4 +387,94 @@ RSpec.describe SearchHelper do
end
end
end
+
+ describe '#repository_ref' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:params) { { repository_ref: 'the-repository-ref-param' } }
+
+ subject { repository_ref(project) }
+
+ it { is_expected.to eq('the-repository-ref-param') }
+
+ context 'when the param :repository_ref is not set' do
+ let(:params) { { repository_ref: nil } }
+
+ it { is_expected.to eq(project.default_branch) }
+ end
+
+ context 'when the repository_ref param is a number' do
+ let(:params) { { repository_ref: 111111 } }
+
+ it { is_expected.to eq('111111') }
+ end
+ end
+
+ describe '#highlight_and_truncate_issue' do
+ let(:description) { 'hello world' }
+ let(:issue) { create(:issue, description: description) }
+ let(:user) { create(:user) }
+
+ before do
+ allow(self).to receive(:current_user).and_return(user)
+ end
+
+ subject { highlight_and_truncate_issue(issue, 'test', {}) }
+
+ context 'when description is not present' do
+ let(:description) { nil }
+
+ it 'does nothing' do
+ expect(self).not_to receive(:simple_search_highlight_and_truncate)
+
+ subject
+ end
+ end
+
+ context 'when description present' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:description, :expected) do
+ 'test' | '<span class="gl-text-black-normal gl-font-weight-bold">test</span>'
+ '<span style="color: blue;">this test should not be blue</span>' | '<span>this <span class="gl-text-black-normal gl-font-weight-bold">test</span> should not be blue</span>'
+ '<a href="#" onclick="alert(\'XSS\')">Click Me test</a>' | '<a href="#">Click Me <span class="gl-text-black-normal gl-font-weight-bold">test</span></a>'
+ '<script type="text/javascript">alert(\'Another XSS\');</script> test' | ' <span class="gl-text-black-normal gl-font-weight-bold">test</span>'
+ 'Lorem test ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu, pretium quis, sem. Nulla consequat massa quis enim. Donec.' | 'Lorem <span class="gl-text-black-normal gl-font-weight-bold">test</span> ipsum dolor sit amet, consectetuer adipiscing elit. Aenean commodo ligula eget dolor. Aenean massa. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Don...'
+ end
+
+ with_them do
+ it 'sanitizes, truncates, and highlights the search term' do
+ expect(subject).to eq(expected)
+ end
+ end
+ end
+ end
+
+ describe '#search_service' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { search_service }
+
+ before do
+ allow(self).to receive(:current_user).and_return(:the_current_user)
+ end
+
+ where(:confidential, :expected) do
+ '0' | false
+ '1' | true
+ 'yes' | true
+ 'no' | false
+ true | true
+ false | false
+ end
+
+ let(:params) {{ confidential: confidential }}
+
+ with_them do
+ it 'transforms confidentiality param' do
+ expect(::SearchService).to receive(:new).with(:the_current_user, { confidential: expected })
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index a3244bec56f..5a3c8e37e8c 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -63,32 +63,6 @@ RSpec.describe SnippetsHelper do
end
end
- describe '#snippet_embed_tag' do
- subject { snippet_embed_tag(snippet) }
-
- context 'personal snippets' do
- let(:snippet) { public_personal_snippet }
-
- context 'public' do
- it 'returns a script tag with the snippet full url' do
- expect(subject).to eq(script_embed("http://test.host/-/snippets/#{snippet.id}"))
- end
- end
- end
-
- context 'project snippets' do
- let(:snippet) { public_project_snippet }
-
- it 'returns a script tag with the snippet full url' do
- expect(subject).to eq(script_embed("http://test.host/#{snippet.project.path_with_namespace}/-/snippets/#{snippet.id}"))
- end
- end
-
- def script_embed(url)
- "<script src=\"#{url}.js\"></script>"
- end
- end
-
describe '#download_raw_snippet_button' do
subject { download_raw_snippet_button(snippet) }
@@ -142,28 +116,4 @@ RSpec.describe SnippetsHelper do
end
end
end
-
- describe '#snippet_embed_input' do
- subject { snippet_embed_input(snippet) }
-
- context 'with PersonalSnippet' do
- let(:snippet) { public_personal_snippet }
-
- it 'returns the input component' do
- expect(subject).to eq embed_input(snippet_url(snippet))
- end
- end
-
- context 'with ProjectSnippet' do
- let(:snippet) { public_project_snippet }
-
- it 'returns the input component' do
- expect(subject).to eq embed_input(project_snippet_url(snippet.project, snippet))
- end
- end
-
- def embed_input(url)
- "<input type=\"text\" readonly=\"readonly\" class=\"js-snippet-url-area snippet-embed-input form-control\" data-url=\"#{url}\" value=\"<script src=&quot;#{url}.js&quot;></script>\" autocomplete=\"off\"></input>"
- end
- end
end
diff --git a/spec/helpers/startupjs_helper_spec.rb b/spec/helpers/startupjs_helper_spec.rb
new file mode 100644
index 00000000000..6d61c38d4a5
--- /dev/null
+++ b/spec/helpers/startupjs_helper_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe StartupjsHelper do
+ describe '#page_startup_graphql_calls' do
+ let(:query_location) { 'repository/path_last_commit' }
+ let(:query_content) do
+ File.read(File.join(Rails.root, 'app/graphql/queries', "#{query_location}.query.graphql"))
+ end
+
+ it 'returns an array containing GraphQL Page Startup Calls' do
+ helper.add_page_startup_graphql_call(query_location, { ref: 'foo' })
+
+ startup_graphql_calls = helper.page_startup_graphql_calls
+
+ expect(startup_graphql_calls).to include({ query: query_content, variables: { ref: 'foo' } })
+ end
+ end
+end
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 97b6802dde9..b5d356b985c 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -167,31 +167,60 @@ RSpec.describe TreeHelper do
end
end
- describe '#vue_ide_link_data' do
+ describe '#web_ide_button_data' do
+ let(:blob) { project.repository.blob_at('refs/heads/master', @path) }
+
before do
+ @path = ''
+ @project = project
+ @ref = sha
+
allow(helper).to receive(:current_user).and_return(nil)
allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
allow(helper).to receive(:can?).and_return(true)
end
- subject { helper.vue_ide_link_data(project, sha) }
+ subject { helper.web_ide_button_data(blob: blob) }
it 'returns a list of attributes related to the project' do
expect(subject).to include(
- ide_base_path: project.full_path,
+ project_path: project.full_path,
+ ref: sha,
+
+ is_fork: false,
needs_to_fork: false,
+ gitpod_enabled: false,
+ is_blob: false,
+
+ show_edit_button: false,
show_web_ide_button: true,
show_gitpod_button: false,
- gitpod_url: "",
- gitpod_enabled: nil
+
+ edit_url: '',
+ web_ide_url: "/-/ide/project/#{project.full_path}/edit/#{sha}",
+ gitpod_url: ''
)
end
+ context 'a blob is passed' do
+ before do
+ @path = 'README.md'
+ end
+
+ it 'returns edit url and webide url for the blob' do
+ expect(subject).to include(
+ show_edit_button: true,
+ edit_url: "/#{project.full_path}/-/edit/#{sha}/#{@path}",
+ web_ide_url: "/-/ide/project/#{project.full_path}/edit/#{sha}/-/#{@path}"
+ )
+ end
+ end
+
context 'user does not have write access but a personal fork exists' do
include ProjectForksHelper
let_it_be(:user) { create(:user) }
- let!(:forked_project) { create(:project, :repository, namespace: user.namespace) }
+ let(:forked_project) { create(:project, :repository, namespace: user.namespace) }
before do
project.add_guest(user)
@@ -200,9 +229,49 @@ RSpec.describe TreeHelper do
allow(helper).to receive(:current_user).and_return(user)
end
- it 'includes ide_base_path: forked_project.full_path' do
+ it 'includes forked project path as project_path' do
+ expect(subject).to include(
+ project_path: forked_project.full_path,
+ is_fork: true,
+ needs_to_fork: false,
+ show_edit_button: false,
+ web_ide_url: "/-/ide/project/#{forked_project.full_path}/edit/#{sha}"
+ )
+ end
+
+ context 'a blob is passed' do
+ before do
+ @path = 'README.md'
+ end
+
+ it 'returns edit url and web ide for the blob in the fork' do
+ expect(subject).to include(
+ is_blob: true,
+ show_edit_button: true,
+ # edit urls are automatically redirected to the fork
+ edit_url: "/#{project.full_path}/-/edit/#{sha}/#{@path}",
+ web_ide_url: "/-/ide/project/#{forked_project.full_path}/edit/#{sha}/-/#{@path}"
+ )
+ end
+ end
+ end
+
+ context 'for archived project' do
+ before do
+ allow(helper).to receive(:can_collaborate_with_project?).and_return(false)
+ allow(helper).to receive(:can?).and_return(false)
+
+ project.update!(archived: true)
+
+ @path = 'README.md'
+ end
+
+ it 'does not show any buttons' do
expect(subject).to include(
- ide_base_path: forked_project.full_path
+ is_blob: true,
+ show_edit_button: false,
+ show_web_ide_button: false,
+ show_gitpod_button: false
)
end
end
@@ -216,11 +285,32 @@ RSpec.describe TreeHelper do
allow(helper).to receive(:current_user).and_return(user)
end
- it 'includes ide_base_path: project.full_path' do
+ it 'includes original project path as project_path' do
expect(subject).to include(
- ide_base_path: project.full_path
+ project_path: project.full_path,
+
+ is_fork: false,
+ needs_to_fork: false,
+
+ show_edit_button: false,
+ web_ide_url: "/-/ide/project/#{project.full_path}/edit/#{sha}"
)
end
+
+ context 'a blob is passed' do
+ before do
+ @path = 'README.md'
+ end
+
+ it 'returns edit url and web ide for the blob in the fork' do
+ expect(subject).to include(
+ is_blob: true,
+ show_edit_button: true,
+ edit_url: "/#{project.full_path}/-/edit/#{sha}/#{@path}",
+ web_ide_url: "/-/ide/project/#{project.full_path}/edit/#{sha}/-/#{@path}"
+ )
+ end
+ end
end
context 'gitpod feature is enabled' do
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index a42be3c87fb..bcb0b5c51e7 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -139,4 +139,26 @@ RSpec.describe UserCalloutsHelper do
helper.render_flash_user_callout(:warning, 'foo', 'bar')
end
end
+
+ describe '.show_feature_flags_new_version?' do
+ subject { helper.show_feature_flags_new_version? }
+
+ let(:user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ context 'when the feature flags new version info has not been dismissed' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the feature flags new version has been dismissed' do
+ before do
+ create(:user_callout, user: user, feature_name: described_class::FEATURE_FLAGS_NEW_VERSION)
+ end
+
+ it { is_expected.to be_falsy }
+ end
+ end
end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 8dfdb23c64b..c9dc3fcff3f 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -126,6 +126,16 @@ RSpec.describe UsersHelper do
end
end
+ context 'with a pending approval user' do
+ it 'returns the pending approval badge' do
+ blocked_pending_approval_user = create(:user, :blocked_pending_approval)
+
+ badges = helper.user_badges_in_admin_section(blocked_pending_approval_user)
+
+ expect(filter_ee_badges(badges)).to eq([text: 'Pending approval', variant: 'info'])
+ end
+ end
+
context 'with an admin user' do
it "returns the admin badge" do
admin_user = create(:admin)
@@ -179,6 +189,20 @@ RSpec.describe UsersHelper do
end
end
+ describe '#can_force_email_confirmation?' do
+ subject { helper.can_force_email_confirmation?(user) }
+
+ context 'for a user that is already confirmed' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'for a user that is not confirmed' do
+ let(:user) { create(:user, :unconfirmed) }
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
describe '#work_information' do
subject { helper.work_information(user) }
diff --git a/spec/helpers/visibility_level_helper_spec.rb b/spec/helpers/visibility_level_helper_spec.rb
index 7ef911131ba..cd1fc70bbc1 100644
--- a/spec/helpers/visibility_level_helper_spec.rb
+++ b/spec/helpers/visibility_level_helper_spec.rb
@@ -47,13 +47,6 @@ RSpec.describe VisibilityLevelHelper do
.to match /group/i
end
end
-
- context 'called with a Snippet' do
- it 'delegates snippets to #snippet_visibility_level_description' do
- expect(visibility_level_description(Gitlab::VisibilityLevel::INTERNAL, project_snippet))
- .to match /snippet/i
- end
- end
end
describe "#project_visibility_level_description" do
@@ -68,23 +61,6 @@ RSpec.describe VisibilityLevelHelper do
end
end
- describe "#snippet_visibility_level_description" do
- it 'describes visibility only for me' do
- expect(snippet_visibility_level_description(Gitlab::VisibilityLevel::PRIVATE, personal_snippet))
- .to eq _('The snippet is visible only to me.')
- end
-
- it 'describes visibility for project members' do
- expect(snippet_visibility_level_description(Gitlab::VisibilityLevel::PRIVATE, project_snippet))
- .to eq _('The snippet is visible only to project members.')
- end
-
- it 'defaults to personal snippet' do
- expect(snippet_visibility_level_description(Gitlab::VisibilityLevel::PRIVATE))
- .to eq _('The snippet is visible only to me.')
- end
- end
-
describe "disallowed_visibility_level?" do
describe "forks" do
let(:project) { create(:project, :internal) }
diff --git a/spec/helpers/whats_new_helper_spec.rb b/spec/helpers/whats_new_helper_spec.rb
index db880163454..80d4ca8ddea 100644
--- a/spec/helpers/whats_new_helper_spec.rb
+++ b/spec/helpers/whats_new_helper_spec.rb
@@ -3,20 +3,49 @@
require 'spec_helper'
RSpec.describe WhatsNewHelper do
- describe '#whats_new_most_recent_release_items' do
- let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')) }
+ describe '#whats_new_storage_key' do
+ subject { helper.whats_new_storage_key }
- it 'returns json from the most recent file' do
- allow(Dir).to receive(:glob).with(Rails.root.join('data', 'whats_new', '*.yml')).and_return(fixture_dir_glob)
+ before do
+ allow(helper).to receive(:whats_new_most_recent_version).and_return(version)
+ end
+
+ context 'when version exist' do
+ let(:version) { '84.0' }
+
+ it { is_expected.to eq('display-whats-new-notification-84.0') }
+ end
+
+ context 'when recent release items do NOT exist' do
+ let(:version) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#whats_new_most_recent_release_items_count' do
+ subject { helper.whats_new_most_recent_release_items_count }
- expect(helper.whats_new_most_recent_release_items).to include({ title: "bright and sunshinin' day" }.to_json)
+ context 'when recent release items exist' do
+ let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')) }
+
+ it 'returns the count from the most recent file' do
+ expect(Dir).to receive(:glob).with(Rails.root.join('data', 'whats_new', '*.yml')).and_return(fixture_dir_glob)
+
+ expect(subject).to eq(1)
+ end
end
- it 'fails gracefully and logs an error' do
- allow(YAML).to receive(:load_file).and_raise
+ context 'when recent release items do NOT exist' do
+ before do
+ allow(YAML).to receive(:safe_load).and_raise
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ end
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- expect(helper.whats_new_most_recent_release_items).to eq(''.to_json)
+ it 'fails gracefully and logs an error' do
+ expect(subject).to be_nil
+ end
end
end
end
diff --git a/spec/helpers/wiki_helper_spec.rb b/spec/helpers/wiki_helper_spec.rb
index 65a52412f8c..45e1859893f 100644
--- a/spec/helpers/wiki_helper_spec.rb
+++ b/spec/helpers/wiki_helper_spec.rb
@@ -54,14 +54,18 @@ RSpec.describe WikiHelper do
end
describe '#wiki_attachment_upload_url' do
- it 'returns the upload endpoint for project wikis' do
- @wiki = build_stubbed(:project_wiki)
+ let_it_be(:wiki) { build_stubbed(:project_wiki) }
+
+ before do
+ @wiki = wiki
+ end
+ it 'returns the upload endpoint for project wikis' do
expect(helper.wiki_attachment_upload_url).to end_with("/api/v4/projects/#{@wiki.project.id}/wikis/attachments")
end
it 'raises an exception for unsupported wiki containers' do
- @wiki = Wiki.new(User.new)
+ allow(wiki).to receive(:container).and_return(User.new)
expect do
helper.wiki_attachment_upload_url
@@ -131,7 +135,8 @@ RSpec.describe WikiHelper do
'wiki-format' => :markdown,
'wiki-title-size' => 9,
'wiki-content-size' => 4,
- 'wiki-directory-nest-level' => 2
+ 'wiki-directory-nest-level' => 2,
+ 'wiki-container-type' => 'Project'
)
end
diff --git a/spec/initializers/sidekiq_spec.rb b/spec/initializers/sidekiq_spec.rb
new file mode 100644
index 00000000000..e34f59c3427
--- /dev/null
+++ b/spec/initializers/sidekiq_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'sidekiq' do
+ describe 'enable_reliable_fetch?' do
+ subject { enable_reliable_fetch? }
+
+ context 'when gitlab_sidekiq_reliable_fetcher is enabled' do
+ before do
+ stub_feature_flags(gitlab_sidekiq_reliable_fetcher: true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when gitlab_sidekiq_reliable_fetcher is disabled' do
+ before do
+ stub_feature_flags(gitlab_sidekiq_reliable_fetcher: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe 'enable_semi_reliable_fetch_mode?' do
+ subject { enable_semi_reliable_fetch_mode? }
+
+ context 'when gitlab_sidekiq_enable_semi_reliable_fetcher is enabled' do
+ before do
+ stub_feature_flags(gitlab_sidekiq_enable_semi_reliable_fetcher: true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when gitlab_sidekiq_enable_semi_reliable_fetcher is disabled' do
+ before do
+ stub_feature_flags(gitlab_sidekiq_enable_semi_reliable_fetcher: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+end
diff --git a/spec/lib/api/entities/snippet_spec.rb b/spec/lib/api/entities/snippet_spec.rb
index 068851f7f6c..090f09c9b61 100644
--- a/spec/lib/api/entities/snippet_spec.rb
+++ b/spec/lib/api/entities/snippet_spec.rb
@@ -21,16 +21,6 @@ RSpec.describe ::API::Entities::Snippet do
it { expect(subject[:visibility]).to eq snippet.visibility }
it { expect(subject).to include(:author) }
- context 'with snippet_multiple_files feature disabled' do
- before do
- stub_feature_flags(snippet_multiple_files: false)
- end
-
- it 'does not return files' do
- expect(subject).not_to include(:files)
- end
- end
-
describe 'file_name' do
it 'returns attribute from repository' do
expect(subject[:file_name]).to eq snippet.blobs.first.path
@@ -77,14 +67,6 @@ RSpec.describe ::API::Entities::Snippet do
let(:blob) { snippet.blobs.first }
let(:ref) { blob.repository.root_ref }
- context 'when repository does not exist' do
- it 'does not include the files attribute' do
- allow(snippet).to receive(:repository_exists?).and_return(false)
-
- expect(subject).not_to include(:files)
- end
- end
-
shared_examples 'snippet files' do
let(:file) { subject[:files].first }
@@ -99,6 +81,14 @@ RSpec.describe ::API::Entities::Snippet do
it 'has the raw url' do
expect(file[:raw_url]).to match(raw_url)
end
+
+ context 'when repository does not exist' do
+ it 'returns empty array' do
+ allow(snippet.repository).to receive(:empty?).and_return(true)
+
+ expect(subject[:files]).to be_empty
+ end
+ end
end
context 'with PersonalSnippet' do
diff --git a/spec/lib/api/github/entities_spec.rb b/spec/lib/api/github/entities_spec.rb
new file mode 100644
index 00000000000..00ea60c5d65
--- /dev/null
+++ b/spec/lib/api/github/entities_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Github::Entities do
+ describe API::Github::Entities::User do
+ let(:user) { create(:user, username: username) }
+ let(:username) { 'name_of_user' }
+ let(:gitlab_protocol_and_host) { "#{Gitlab.config.gitlab.protocol}://#{Gitlab.config.gitlab.host}" }
+ let(:expected_user_url) { "#{gitlab_protocol_and_host}/#{username}" }
+ let(:entity) { described_class.new(user) }
+
+ subject { entity.as_json }
+
+ specify :aggregate_failure do
+ expect(subject[:id]).to eq user.id
+ expect(subject[:login]).to eq 'name_of_user'
+ expect(subject[:url]).to eq expected_user_url
+ expect(subject[:html_url]).to eq expected_user_url
+ expect(subject[:avatar_url]).to include('https://www.gravatar.com/avatar')
+ end
+
+ context 'with avatar' do
+ let(:user) { create(:user, :with_avatar, username: username) }
+
+ specify do
+ expect(subject[:avatar_url]).to include("#{gitlab_protocol_and_host}/uploads/-/system/user/avatar/")
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
index ccf96bcbad6..6d06fc3618d 100644
--- a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
shared_examples 'executing redirect' do
it 'redirects to package registry' do
+ expect(helper).to receive(:track_event).with('npm_request_forward').once
expect(helper).to receive(:registry_url).once
expect(helper).to receive(:redirect).once
expect(helper).to receive(:fallback).never
@@ -63,6 +64,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
let(:package_type) { pkg_type }
it 'raises an error' do
+ allow(helper).to receive(:track_event)
expect { subject }.to raise_error(ArgumentError, "Can't build registry_url for package_type #{package_type}")
end
end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 51a45dff6a4..8e738af0fa3 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -191,41 +191,32 @@ RSpec.describe API::Helpers do
describe '#increment_unique_values' do
let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
- let(:event_name) { 'my_event' }
+ let(:event_name) { 'g_compliance_dashboard' }
let(:unknown_event) { 'unknown' }
let(:feature) { "usage_data_#{event_name}" }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
context 'with feature enabled' do
before do
stub_feature_flags(feature => true)
end
it 'tracks redis hll event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
subject.increment_unique_values(event_name, value)
end
- it 'does not track event usage ping is not enabled' do
- stub_application_setting(usage_ping_enabled: false)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- subject.increment_unique_values(event_name, value)
- end
-
it 'logs an exception for unknown event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::AppLogger).to receive(:warn).with("Redis tracking event failed for event: #{unknown_event}, message: Unknown event #{unknown_event}")
subject.increment_unique_values(unknown_event, value)
end
it 'does not track event for nil values' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
subject.increment_unique_values(unknown_event, nil)
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index c2dbaac7f15..45cc73974d6 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Backup::Files do
let(:timestamp) { Time.utc(2017, 3, 22) }
around do |example|
- Timecop.freeze(timestamp) { example.run }
+ travel_to(timestamp) { example.run }
end
describe 'folders with permission' do
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb
new file mode 100644
index 00000000000..9c139e9f954
--- /dev/null
+++ b/spec/lib/backup/repositories_spec.rb
@@ -0,0 +1,308 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Repositories do
+ let(:progress) { StringIO.new }
+
+ subject { described_class.new(progress) }
+
+ before do
+ allow(progress).to receive(:puts)
+ allow(progress).to receive(:print)
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:progress).and_return(progress)
+ end
+ end
+
+ describe '#dump' do
+ let_it_be(:projects) { create_list(:project, 5, :repository) }
+
+ RSpec.shared_examples 'creates repository bundles' do
+ specify :aggregate_failures do
+ # Add data to the wiki, design repositories, and snippets, so they will be included in the dump.
+ create(:wiki_page, container: project)
+ create(:design, :with_file, issue: create(:issue, project: project))
+ project_snippet = create(:project_snippet, :repository, project: project)
+ personal_snippet = create(:personal_snippet, :repository, author: project.owner)
+
+ subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
+
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki' + '.bundle'))
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design' + '.bundle'))
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle'))
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle'))
+ end
+ end
+
+ context 'hashed storage' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ it_behaves_like 'creates repository bundles'
+ end
+
+ context 'legacy storage' do
+ let_it_be(:project) { create(:project, :repository, :legacy_storage) }
+
+ it_behaves_like 'creates repository bundles'
+ end
+
+ context 'no concurrency' do
+ it 'creates the expected number of threads' do
+ expect(Thread).not_to receive(:new)
+
+ projects.each do |project|
+ expect(subject).to receive(:dump_project).with(project).and_call_original
+ end
+
+ subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
+ end
+
+ describe 'command failure' do
+ it 'dump_project raises an error' do
+ allow(subject).to receive(:dump_project).and_raise(IOError)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(IOError)
+ end
+
+ it 'project query raises an error' do
+ allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+ end
+
+ it 'avoids N+1 database queries' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
+ end.count
+
+ create_list(:project, 2, :repository)
+
+ expect do
+ subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
+ end.not_to exceed_query_limit(control_count)
+ end
+ end
+
+ [4, 10].each do |max_storage_concurrency|
+ context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do
+ let(:storage_keys) { %w[default test_second_storage] }
+
+ before do
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys)
+ end
+
+ it 'creates the expected number of threads' do
+ expect(Thread).to receive(:new)
+ .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
+ .and_call_original
+
+ projects.each do |project|
+ expect(subject).to receive(:dump_project).with(project).and_call_original
+ end
+
+ subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
+ end
+
+ it 'creates the expected number of threads with extra max concurrency' do
+ expect(Thread).to receive(:new)
+ .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
+ .and_call_original
+
+ projects.each do |project|
+ expect(subject).to receive(:dump_project).with(project).and_call_original
+ end
+
+ subject.dump(max_concurrency: 3, max_storage_concurrency: max_storage_concurrency)
+ end
+
+ describe 'command failure' do
+ it 'dump_project raises an error' do
+ allow(subject).to receive(:dump_project)
+ .and_raise(IOError)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(IOError)
+ end
+
+ it 'project query raises an error' do
+ allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+
+ context 'misconfigured storages' do
+ let(:storage_keys) { %w[test_second_storage] }
+
+ it 'raises an error' do
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
+ end
+ end
+ end
+
+ it 'avoids N+1 database queries' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
+ end.count
+
+ create_list(:project, 2, :repository)
+
+ expect do
+ subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
+ end.not_to exceed_query_limit(control_count)
+ end
+ end
+ end
+ end
+
+ describe '#restore' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) }
+ let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) }
+
+ let(:next_path_to_bundle) do
+ [
+ Rails.root.join('spec/fixtures/lib/backup/project_repo.bundle'),
+ Rails.root.join('spec/fixtures/lib/backup/wiki_repo.bundle'),
+ Rails.root.join('spec/fixtures/lib/backup/design_repo.bundle'),
+ Rails.root.join('spec/fixtures/lib/backup/personal_snippet_repo.bundle'),
+ Rails.root.join('spec/fixtures/lib/backup/project_snippet_repo.bundle')
+ ].to_enum
+ end
+
+ it 'restores repositories from bundles', :aggregate_failures do
+ allow_next_instance_of(described_class::BackupRestore) do |backup_restore|
+ allow(backup_restore).to receive(:path_to_bundle).and_return(next_path_to_bundle.next)
+ end
+
+ subject.restore
+
+ collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
+
+ expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
+ expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
+ expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
+ expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
+ expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1'])
+ end
+
+ describe 'command failure' do
+ before do
+ expect(Project).to receive(:find_each).and_yield(project)
+
+ allow_next_instance_of(DesignManagement::Repository) do |repository|
+ allow(repository).to receive(:create_repository) { raise 'Fail in tests' }
+ end
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:create_repository) { raise 'Fail in tests' }
+ end
+ end
+
+ context 'hashed storage' do
+ it 'shows the appropriate error' do
+ subject.restore
+
+ expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})")
+ end
+ end
+
+ context 'legacy storage' do
+ let_it_be(:project) { create(:project, :legacy_storage) }
+
+ it 'shows the appropriate error' do
+ subject.restore
+
+ expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})")
+ end
+ end
+ end
+
+ context 'restoring object pools' do
+ it 'schedules restoring of the pool', :sidekiq_might_not_need_inline do
+ pool_repository = create(:pool_repository, :failed)
+ pool_repository.delete_object_pool
+
+ subject.restore
+
+ pool_repository.reload
+ expect(pool_repository).not_to be_failed
+ expect(pool_repository.object_pool.exists?).to be(true)
+ end
+ end
+
+ it 'cleans existing repositories' do
+ success_response = ServiceResponse.success(message: "Valid Snippet Repo")
+ allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(success_response)
+
+ expect_next_instance_of(DesignManagement::Repository) do |repository|
+ expect(repository).to receive(:remove)
+ end
+
+ # 4 times = project repo + wiki repo + project_snippet repo + personal_snippet repo
+ expect(Repository).to receive(:new).exactly(4).times.and_wrap_original do |method, *original_args|
+ repository = method.call(*original_args)
+
+ expect(repository).to receive(:remove)
+
+ repository
+ end
+
+ subject.restore
+ end
+
+ context 'restoring snippets' do
+ before do
+ create(:snippet_repository, snippet: personal_snippet)
+ create(:snippet_repository, snippet: project_snippet)
+
+ allow_next_instance_of(described_class::BackupRestore) do |backup_restore|
+ allow(backup_restore).to receive(:path_to_bundle).and_return(next_path_to_bundle.next)
+ end
+ end
+
+ context 'when the repository is valid' do
+ it 'restores the snippet repositories' do
+ subject.restore
+
+ expect(personal_snippet.snippet_repository.persisted?).to be true
+ expect(personal_snippet.repository).to exist
+
+ expect(project_snippet.snippet_repository.persisted?).to be true
+ expect(project_snippet.repository).to exist
+ end
+ end
+
+ context 'when repository is invalid' do
+ before do
+ error_response = ServiceResponse.error(message: "Repository has more than one branch")
+ allow(Snippets::RepositoryValidationService).to receive_message_chain(:new, :execute).and_return(error_response)
+ end
+
+ it 'shows the appropriate error' do
+ subject.restore
+
+ expect(progress).to have_received(:puts).with("Snippet #{personal_snippet.full_path} can't be restored: Repository has more than one branch")
+ expect(progress).to have_received(:puts).with("Snippet #{project_snippet.full_path} can't be restored: Repository has more than one branch")
+ end
+
+ it 'removes the snippets from the DB' do
+ expect { subject.restore }.to change(PersonalSnippet, :count).by(-1)
+ .and change(ProjectSnippet, :count).by(-1)
+ .and change(SnippetRepository, :count).by(-2)
+ end
+
+ it 'removes the repository from disk' do
+ gitlab_shell = Gitlab::Shell.new
+ shard_name = personal_snippet.repository.shard
+ path = personal_snippet.disk_path + '.git'
+
+ subject.restore
+
+ expect(gitlab_shell.repository_exists?(shard_name, path)).to eq false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
deleted file mode 100644
index 718f38f9452..00000000000
--- a/spec/lib/backup/repository_spec.rb
+++ /dev/null
@@ -1,232 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::Repository do
- let_it_be(:project) { create(:project, :wiki_repo) }
-
- let(:progress) { StringIO.new }
-
- subject { described_class.new(progress) }
-
- before do
- allow(progress).to receive(:puts)
- allow(progress).to receive(:print)
- allow(FileUtils).to receive(:mv).and_return(true)
-
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:progress).and_return(progress)
- end
- end
-
- describe '#dump' do
- before do
- allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys)
- end
-
- let_it_be(:projects) { create_list(:project, 5, :wiki_repo) + [project] }
-
- let(:storage_keys) { %w[default test_second_storage] }
-
- context 'no concurrency' do
- it 'creates the expected number of threads' do
- expect(Thread).not_to receive(:new)
-
- projects.each do |project|
- expect(subject).to receive(:dump_project).with(project).and_call_original
- end
-
- subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
- end
-
- describe 'command failure' do
- it 'dump_project raises an error' do
- allow(subject).to receive(:dump_project).and_raise(IOError)
-
- expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(IOError)
- end
-
- it 'project query raises an error' do
- allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
-
- expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(ActiveRecord::StatementTimeout)
- end
- end
-
- it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
- subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
- end.count
-
- create_list(:project, 2, :wiki_repo)
-
- expect do
- subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
- end.not_to exceed_query_limit(control_count)
- end
- end
-
- [4, 10].each do |max_storage_concurrency|
- context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do
- it 'creates the expected number of threads' do
- expect(Thread).to receive(:new)
- .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
- .and_call_original
-
- projects.each do |project|
- expect(subject).to receive(:dump_project).with(project).and_call_original
- end
-
- subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
- end
-
- it 'creates the expected number of threads with extra max concurrency' do
- expect(Thread).to receive(:new)
- .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
- .and_call_original
-
- projects.each do |project|
- expect(subject).to receive(:dump_project).with(project).and_call_original
- end
-
- subject.dump(max_concurrency: 3, max_storage_concurrency: max_storage_concurrency)
- end
-
- describe 'command failure' do
- it 'dump_project raises an error' do
- allow(subject).to receive(:dump_project)
- .and_raise(IOError)
-
- expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(IOError)
- end
-
- it 'project query raises an error' do
- allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
-
- expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(ActiveRecord::StatementTimeout)
- end
-
- context 'misconfigured storages' do
- let(:storage_keys) { %w[test_second_storage] }
-
- it 'raises an error' do
- expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
- end
- end
- end
-
- it 'avoids N+1 database queries' do
- control_count = ActiveRecord::QueryRecorder.new do
- subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
- end.count
-
- create_list(:project, 2, :wiki_repo)
-
- expect do
- subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
- end.not_to exceed_query_limit(control_count)
- end
- end
- end
- end
-
- describe '#restore' do
- let(:timestamp) { Time.utc(2017, 3, 22) }
- let(:temp_dirs) do
- Gitlab.config.repositories.storages.map do |name, storage|
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(storage.legacy_disk_path, '..', 'repositories.old.' + timestamp.to_i.to_s)
- end
- end
- end
-
- around do |example|
- Timecop.freeze(timestamp) { example.run }
- end
-
- after do
- temp_dirs.each { |path| FileUtils.rm_rf(path) }
- end
-
- describe 'command failure' do
- before do
- # Allow us to set expectations on the project directly
- expect(Project).to receive(:find_each).and_yield(project)
- expect(project.repository).to receive(:create_repository) { raise 'Fail in tests' }
- end
-
- context 'hashed storage' do
- it 'shows the appropriate error' do
- subject.restore
-
- expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository")
- end
- end
-
- context 'legacy storage' do
- let!(:project) { create(:project, :legacy_storage) }
-
- it 'shows the appropriate error' do
- subject.restore
-
- expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository")
- end
- end
- end
-
- context 'restoring object pools' do
- it 'schedules restoring of the pool', :sidekiq_might_not_need_inline do
- pool_repository = create(:pool_repository, :failed)
- pool_repository.delete_object_pool
-
- subject.restore
-
- pool_repository.reload
- expect(pool_repository).not_to be_failed
- expect(pool_repository.object_pool.exists?).to be(true)
- end
- end
-
- it 'cleans existing repositories' do
- wiki_repository_spy = spy(:wiki)
-
- allow_next_instance_of(ProjectWiki) do |project_wiki|
- allow(project_wiki).to receive(:repository).and_return(wiki_repository_spy)
- end
-
- expect_next_instance_of(Repository) do |repo|
- expect(repo).to receive(:remove)
- end
-
- subject.restore
-
- expect(wiki_repository_spy).to have_received(:remove)
- end
- end
-
- describe '#empty_repo?' do
- context 'for a wiki' do
- let(:wiki) { create(:project_wiki) }
-
- it 'invalidates the emptiness cache' do
- expect(wiki.repository).to receive(:expire_emptiness_caches).once
-
- subject.send(:empty_repo?, wiki)
- end
-
- context 'wiki repo has content' do
- let!(:wiki_page) { create(:wiki_page, wiki: wiki) }
-
- it 'returns true, regardless of bad cache value' do
- expect(subject.send(:empty_repo?, wiki)).to be(false)
- end
- end
-
- context 'wiki repo does not have content' do
- it 'returns true, regardless of bad cache value' do
- expect(subject.send(:empty_repo?, wiki)).to be_truthy
- end
- end
- end
- end
-end
diff --git a/spec/lib/banzai/filter/design_reference_filter_spec.rb b/spec/lib/banzai/filter/design_reference_filter_spec.rb
index 1b558754932..847c398964a 100644
--- a/spec/lib/banzai/filter/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/design_reference_filter_spec.rb
@@ -74,26 +74,6 @@ RSpec.describe Banzai::Filter::DesignReferenceFilter do
it_behaves_like 'a no-op filter'
end
-
- context 'design reference filter is not enabled' do
- before do
- stub_feature_flags(described_class::FEATURE_FLAG => false)
- end
-
- it_behaves_like 'a no-op filter'
-
- it 'issues no queries' do
- expect { process(input_text) }.not_to exceed_query_limit(0)
- end
- end
-
- context 'the filter is enabled for the context project' do
- before do
- stub_feature_flags(described_class::FEATURE_FLAG => project)
- end
-
- it_behaves_like 'a good link reference'
- end
end
end
diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
index e7b6c910b8a..35ef2abfa63 100644
--- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
include FilterSpecHelper
+ let_it_be_with_refind(:project) { create(:project) }
+
shared_examples_for "external issue tracker" do
it_behaves_like 'a reference containing an element node'
@@ -116,7 +118,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "redmine project" do
- let(:project) { create(:redmine_project) }
+ let_it_be(:service) { create(:redmine_service, project: project) }
before do
project.update!(issues_enabled: false)
@@ -138,7 +140,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "youtrack project" do
- let(:project) { create(:youtrack_project) }
+ let_it_be(:service) { create(:youtrack_service, project: project) }
before do
project.update!(issues_enabled: false)
@@ -181,7 +183,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "jira project" do
- let(:project) { create(:jira_project) }
+ let_it_be(:service) { create(:jira_service, project: project) }
let(:reference) { issue.to_reference }
context "with right markdown" do
@@ -210,7 +212,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "ewm project" do
- let_it_be(:project) { create(:ewm_project) }
+ let_it_be(:service) { create(:ewm_service, project: project) }
before do
project.update!(issues_enabled: false)
diff --git a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
index 8bdb24ab08c..d29af311ee5 100644
--- a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Banzai::Filter::InlineGrafanaMetricsFilter do
it_behaves_like 'a metrics embed filter'
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
context 'when grafana is not configured' do
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index 447802d18a7..4b8b575c1f0 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -296,6 +296,12 @@ RSpec.describe Banzai::Filter::IssueReferenceFilter do
.to eq reference
end
+ it 'link with trailing slash' do
+ doc = reference_filter("Fixed (#{issue_url + "/"}.)")
+
+ expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(issue.to_reference(project))}</a>\.\)})
+ end
+
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference}.)")
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
index 62b1711ee57..276fa7952be 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
include FilterSpecHelper
- let(:parent_group) { create(:group, :public) }
- let(:group) { create(:group, :public, parent: parent_group) }
- let(:project) { create(:project, :public, group: group) }
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:group) { create(:group, :public, parent: parent_group) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:another_project) { create(:project, :public, namespace: namespace) }
it 'requires project context' do
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
@@ -188,11 +190,9 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross-project / cross-namespace complete reference' do
- let(:namespace) { create(:namespace) }
- let(:another_project) { create(:project, :public, namespace: namespace) }
- let(:milestone) { create(:milestone, project: another_project) }
- let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
- let!(:result) { reference_filter("See #{reference}") }
+ let_it_be(:milestone) { create(:milestone, project: another_project) }
+ let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
+ let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@@ -226,12 +226,10 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross-project / same-namespace complete reference' do
- let(:namespace) { create(:namespace) }
- let(:project) { create(:project, :public, namespace: namespace) }
- let(:another_project) { create(:project, :public, namespace: namespace) }
- let(:milestone) { create(:milestone, project: another_project) }
- let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
- let!(:result) { reference_filter("See #{reference}") }
+ let_it_be(:project) { create(:project, :public, namespace: namespace) }
+ let_it_be(:milestone) { create(:milestone, project: another_project) }
+ let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
+ let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@@ -265,12 +263,10 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross project shorthand reference' do
- let(:namespace) { create(:namespace) }
- let(:project) { create(:project, :public, namespace: namespace) }
- let(:another_project) { create(:project, :public, namespace: namespace) }
- let(:milestone) { create(:milestone, project: another_project) }
- let(:reference) { "#{another_project.path}%#{milestone.iid}" }
- let!(:result) { reference_filter("See #{reference}") }
+ let_it_be(:project) { create(:project, :public, namespace: namespace) }
+ let_it_be(:milestone) { create(:milestone, project: another_project) }
+ let(:reference) { "#{another_project.path}%#{milestone.iid}" }
+ let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@@ -439,13 +435,13 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
context 'when milestone is open' do
context 'project milestones' do
- let(:milestone) { create(:milestone, project: project) }
+ let_it_be_with_reload(:milestone) { create(:milestone, project: project) }
include_context 'project milestones'
end
context 'group milestones' do
- let(:milestone) { create(:milestone, group: group) }
+ let_it_be_with_reload(:milestone) { create(:milestone, group: group) }
include_context 'group milestones'
end
@@ -453,13 +449,13 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
context 'when milestone is closed' do
context 'project milestones' do
- let(:milestone) { create(:milestone, :closed, project: project) }
+ let_it_be_with_reload(:milestone) { create(:milestone, :closed, project: project) }
include_context 'project milestones'
end
context 'group milestones' do
- let(:milestone) { create(:milestone, :closed, group: group) }
+ let_it_be_with_reload(:milestone) { create(:milestone, :closed, group: group) }
include_context 'group milestones'
end
diff --git a/spec/lib/banzai/reference_redactor_spec.rb b/spec/lib/banzai/reference_redactor_spec.rb
index de774267b81..668e427cfa2 100644
--- a/spec/lib/banzai/reference_redactor_spec.rb
+++ b/spec/lib/banzai/reference_redactor_spec.rb
@@ -182,5 +182,12 @@ RSpec.describe Banzai::ReferenceRedactor do
expect(redactor.nodes_visible_to_user([node])).to eq(Set.new([node]))
end
+
+ it 'handles invalid references gracefully' do
+ doc = Nokogiri::HTML.fragment('<a data-reference-type="some_invalid_type"></a>')
+ node = doc.children[0]
+
+ expect(redactor.nodes_visible_to_user([node])).to be_empty
+ end
end
end
diff --git a/spec/lib/feature/definition_spec.rb b/spec/lib/feature/definition_spec.rb
index 49224cf4279..fa0207d829a 100644
--- a/spec/lib/feature/definition_spec.rb
+++ b/spec/lib/feature/definition_spec.rb
@@ -105,6 +105,7 @@ RSpec.describe Feature::Definition do
describe '.load_all!' do
let(:store1) { Dir.mktmpdir('path1') }
let(:store2) { Dir.mktmpdir('path2') }
+ let(:definitions) { {} }
before do
allow(described_class).to receive(:paths).and_return(
@@ -115,28 +116,30 @@ RSpec.describe Feature::Definition do
)
end
+ subject { described_class.send(:load_all!) }
+
it "when there's no feature flags a list of definitions is empty" do
- expect(described_class.load_all!).to be_empty
+ is_expected.to be_empty
end
it "when there's a single feature flag it properly loads them" do
write_feature_flag(store1, path, yaml_content)
- expect(described_class.load_all!).to be_one
+ is_expected.to be_one
end
it "when the same feature flag is stored multiple times raises exception" do
write_feature_flag(store1, path, yaml_content)
write_feature_flag(store2, path, yaml_content)
- expect { described_class.load_all! }
+ expect { subject }
.to raise_error(/Feature flag 'feature_flag' is already defined/)
end
it "when one of the YAMLs is invalid it does raise exception" do
write_feature_flag(store1, path, '{}')
- expect { described_class.load_all! }
+ expect { subject }
.to raise_error(/Feature flag is missing name/)
end
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index acd7d97ac85..5dff9dbd995 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Feature, stub_feature_flags: false do
before do
# reset Flipper AR-engine
Feature.reset
+ skip_feature_flags_yaml_validation
end
describe '.get' do
@@ -253,6 +254,9 @@ RSpec.describe Feature, stub_feature_flags: false do
end
before do
+ stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+
+ allow(Feature::Definition).to receive(:valid_usage!).and_call_original
allow(Feature::Definition).to receive(:definitions) do
{ definition.key => definition }
end
diff --git a/spec/lib/forever_spec.rb b/spec/lib/forever_spec.rb
index 6f6b3055df5..c47c03d6780 100644
--- a/spec/lib/forever_spec.rb
+++ b/spec/lib/forever_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Forever do
subject { described_class.date }
it 'returns Postgresql future date' do
- Timecop.travel(Date.new(2999, 12, 31)) do
+ travel_to(Date.new(2999, 12, 31)) do
is_expected.to be > Date.today
end
end
diff --git a/spec/lib/gitlab/alert_management/alert_params_spec.rb b/spec/lib/gitlab/alert_management/alert_params_spec.rb
deleted file mode 100644
index c3171be5e29..00000000000
--- a/spec/lib/gitlab/alert_management/alert_params_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::AlertManagement::AlertParams do
- let_it_be(:project) { create(:project, :repository, :private) }
-
- describe '.from_generic_alert' do
- let(:started_at) { Time.current.change(usec: 0).rfc3339 }
- let(:default_payload) do
- {
- 'title' => 'Alert title',
- 'description' => 'Description',
- 'monitoring_tool' => 'Monitoring tool name',
- 'service' => 'Service',
- 'hosts' => ['gitlab.com'],
- 'start_time' => started_at,
- 'some' => { 'extra' => { 'payload' => 'here' } }
- }
- end
-
- let(:payload) { default_payload }
-
- subject { described_class.from_generic_alert(project: project, payload: payload) }
-
- it 'returns Alert compatible parameters' do
- is_expected.to eq(
- project_id: project.id,
- title: 'Alert title',
- description: 'Description',
- monitoring_tool: 'Monitoring tool name',
- service: 'Service',
- severity: 'critical',
- hosts: ['gitlab.com'],
- payload: payload,
- started_at: started_at,
- ended_at: nil,
- fingerprint: nil,
- environment: nil
- )
- end
-
- context 'when severity given' do
- let(:payload) { default_payload.merge(severity: 'low') }
-
- it 'returns Alert compatible parameters' do
- expect(subject[:severity]).to eq('low')
- end
- end
-
- context 'when there are no hosts in the payload' do
- let(:payload) { {} }
-
- it 'hosts param is an empty array' do
- expect(subject[:hosts]).to be_empty
- end
- end
- end
-
- describe '.from_prometheus_alert' do
- let(:payload) do
- {
- 'status' => 'firing',
- 'labels' => {
- 'alertname' => 'GitalyFileServerDown',
- 'channel' => 'gitaly',
- 'pager' => 'pagerduty',
- 'severity' => 's1'
- },
- 'annotations' => {
- 'description' => 'Alert description',
- 'runbook' => 'troubleshooting/gitaly-down.md',
- 'title' => 'Alert title'
- },
- 'startsAt' => '2020-04-27T10:10:22.265949279Z',
- 'endsAt' => '0001-01-01T00:00:00Z',
- 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1',
- 'fingerprint' => 'b6ac4d42057c43c1'
- }
- end
-
- let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) }
-
- subject { described_class.from_prometheus_alert(project: project, parsed_alert: parsed_alert) }
-
- it 'returns Alert-compatible params' do
- is_expected.to eq(
- project_id: project.id,
- title: 'Alert title',
- description: 'Alert description',
- monitoring_tool: 'Prometheus',
- payload: payload,
- started_at: parsed_alert.starts_at,
- ended_at: parsed_alert.ends_at,
- fingerprint: parsed_alert.gitlab_fingerprint,
- environment: parsed_alert.environment,
- prometheus_alert: parsed_alert.gitlab_alert
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
index a2b8f0aa8d4..fceda763717 100644
--- a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::AlertManagement::AlertStatusCounts do
expect(counts.open).to eq(0)
expect(counts.all).to eq(0)
- AlertManagement::Alert::STATUSES.each_key do |status|
+ ::AlertManagement::Alert.status_names.each do |status|
expect(counts.send(status)).to eq(0)
end
end
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::AlertManagement::AlertStatusCounts do
end
context 'when filtering params are included' do
- let(:params) { { status: AlertManagement::Alert::STATUSES[:resolved] } }
+ let(:params) { { status: :resolved } }
it 'returns the correct counts for each status' do
expect(counts.open).to eq(0)
diff --git a/spec/lib/gitlab/alert_management/payload/base_spec.rb b/spec/lib/gitlab/alert_management/payload/base_spec.rb
index e0f63bad05d..0c26e94e596 100644
--- a/spec/lib/gitlab/alert_management/payload/base_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/base_spec.rb
@@ -120,14 +120,107 @@ RSpec.describe Gitlab::AlertManagement::Payload::Base do
end
describe '#alert_params' do
- before do
- allow(parsed_payload).to receive(:title).and_return('title')
- allow(parsed_payload).to receive(:description).and_return('description')
+ subject { parsed_payload.alert_params }
+
+ context 'with every key' do
+ let_it_be(:raw_payload) { { 'key' => 'value' } }
+ let_it_be(:stubs) do
+ {
+ description: 'description',
+ ends_at: Time.current,
+ environment: create(:environment, project: project),
+ gitlab_fingerprint: 'gitlab_fingerprint',
+ hosts: 'hosts',
+ monitoring_tool: 'monitoring_tool',
+ gitlab_alert: create(:prometheus_alert, project: project),
+ service: 'service',
+ severity: 'critical',
+ starts_at: Time.current,
+ title: 'title'
+ }
+ end
+
+ let(:expected_result) do
+ {
+ description: stubs[:description],
+ ended_at: stubs[:ends_at],
+ environment: stubs[:environment],
+ fingerprint: stubs[:gitlab_fingerprint],
+ hosts: [stubs[:hosts]],
+ monitoring_tool: stubs[:monitoring_tool],
+ payload: raw_payload,
+ project_id: project.id,
+ prometheus_alert: stubs[:gitlab_alert],
+ service: stubs[:service],
+ severity: stubs[:severity],
+ started_at: stubs[:starts_at],
+ title: stubs[:title]
+ }
+ end
+
+ before do
+ allow(parsed_payload).to receive_messages(stubs)
+ end
+
+ it { is_expected.to eq(expected_result) }
+
+ it 'can generate a valid new alert' do
+ expect(::AlertManagement::Alert.new(subject.except(:ended_at))).to be_valid
+ end
end
- subject { parsed_payload.alert_params }
+ context 'with too-long strings' do
+ let_it_be(:stubs) do
+ {
+ description: 'a' * (::AlertManagement::Alert::DESCRIPTION_MAX_LENGTH + 1),
+ hosts: 'b' * (::AlertManagement::Alert::HOSTS_MAX_LENGTH + 1),
+ monitoring_tool: 'c' * (::AlertManagement::Alert::TOOL_MAX_LENGTH + 1),
+ service: 'd' * (::AlertManagement::Alert::SERVICE_MAX_LENGTH + 1),
+ title: 'e' * (::AlertManagement::Alert::TITLE_MAX_LENGTH + 1)
+ }
+ end
- it { is_expected.to eq({ description: 'description', project_id: project.id, title: 'title' }) }
+ before do
+ allow(parsed_payload).to receive_messages(stubs)
+ end
+
+ it do
+ is_expected.to eq({
+ description: stubs[:description].truncate(AlertManagement::Alert::DESCRIPTION_MAX_LENGTH),
+ hosts: ['b' * ::AlertManagement::Alert::HOSTS_MAX_LENGTH],
+ monitoring_tool: stubs[:monitoring_tool].truncate(::AlertManagement::Alert::TOOL_MAX_LENGTH),
+ service: stubs[:service].truncate(::AlertManagement::Alert::SERVICE_MAX_LENGTH),
+ project_id: project.id,
+ title: stubs[:title].truncate(::AlertManagement::Alert::TITLE_MAX_LENGTH)
+ })
+ end
+ end
+
+ context 'with too-long hosts array' do
+ let(:hosts) { %w(abc def ghij) }
+ let(:shortened_hosts) { %w(abc def ghi) }
+
+ before do
+ stub_const('::AlertManagement::Alert::HOSTS_MAX_LENGTH', 9)
+ allow(parsed_payload).to receive(:hosts).and_return(hosts)
+ end
+
+ it { is_expected.to eq(hosts: shortened_hosts, project_id: project.id) }
+
+ context 'with host cut off between elements' do
+ let(:hosts) { %w(abcde fghij) }
+ let(:shortened_hosts) { %w(abcde fghi) }
+
+ it { is_expected.to eq({ hosts: shortened_hosts, project_id: project.id }) }
+ end
+
+ context 'with nested hosts' do
+ let(:hosts) { ['abc', ['de', 'f'], 'g', 'hij'] } # rubocop:disable Style/WordArray
+ let(:shortened_hosts) { %w(abc de f g hi) }
+
+ it { is_expected.to eq({ hosts: shortened_hosts, project_id: project.id }) }
+ end
+ end
end
describe '#gitlab_fingerprint' do
diff --git a/spec/lib/gitlab/alert_management/payload/generic_spec.rb b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
index 538a822503e..b7660462b0d 100644
--- a/spec/lib/gitlab/alert_management/payload/generic_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::AlertManagement::Payload::Generic do
subject { parsed_payload.starts_at }
around do |example|
- Timecop.freeze(current_time) { example.run }
+ travel_to(current_time) { example.run }
end
context 'without start_time' do
@@ -86,4 +86,34 @@ RSpec.describe Gitlab::AlertManagement::Payload::Generic do
it_behaves_like 'parsable alert payload field', 'gitlab_environment_name'
end
+
+ describe '#description' do
+ subject { parsed_payload.description }
+
+ it_behaves_like 'parsable alert payload field', 'description'
+ end
+
+ describe '#ends_at' do
+ let(:current_time) { Time.current.change(usec: 0).utc }
+
+ subject { parsed_payload.ends_at }
+
+ around do |example|
+ travel_to(current_time) { example.run }
+ end
+
+ context 'without end_time' do
+ it { is_expected.to be_nil }
+ end
+
+ context "with end_time" do
+ let(:value) { 10.minutes.ago.change(usec: 0).utc }
+
+ before do
+ raw_payload['end_time'] = value.to_s
+ end
+
+ it { is_expected.to eq(value) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
deleted file mode 100644
index b53b71e3f3e..00000000000
--- a/spec/lib/gitlab/alerting/alert_spec.rb
+++ /dev/null
@@ -1,299 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Alerting::Alert do
- let_it_be(:project) { create(:project) }
-
- let(:alert) { build(:alerting_alert, project: project, payload: payload) }
- let(:payload) { {} }
-
- shared_context 'gitlab alert' do
- let!(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let(:gitlab_alert_id) { gitlab_alert.id }
-
- before do
- payload['labels'] = {
- 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id.to_s,
- 'gitlab_prometheus_alert_id' => gitlab_alert_id
- }
- end
- end
-
- shared_context 'full query' do
- before do
- payload['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29'
- end
- end
-
- shared_examples 'invalid alert' do
- it 'is invalid' do
- expect(alert).not_to be_valid
- end
- end
-
- shared_examples 'parse payload' do |*pairs|
- context 'without payload' do
- it { is_expected.to be_nil }
- end
-
- pairs.each do |pair|
- context "with #{pair}" do
- let(:value) { 'some value' }
-
- before do
- section, name = pair.split('/')
- payload[section] = { name => value }
- end
-
- it { is_expected.to eq(value) }
- end
- end
- end
-
- describe '#gitlab_alert' do
- subject { alert.gitlab_alert }
-
- context 'without payload' do
- it { is_expected.to be_nil }
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- it { is_expected.to eq(gitlab_alert) }
- end
-
- context 'with unknown gitlab alert' do
- include_context 'gitlab alert' do
- let(:gitlab_alert_id) { 'unknown' }
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'when two alerts with the same metric exist' do
- include_context 'gitlab alert'
-
- let!(:second_gitlab_alert) do
- create(:prometheus_alert,
- project: project,
- prometheus_metric_id: gitlab_alert.prometheus_metric_id
- )
- end
-
- context 'alert id given in params' do
- before do
- payload['labels'] = {
- 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id.to_s,
- 'gitlab_prometheus_alert_id' => second_gitlab_alert.id
- }
- end
-
- it { is_expected.to eq(second_gitlab_alert) }
- end
-
- context 'metric id given in params' do
- # This tests the case when two alerts are found, as metric id
- # is not unique.
-
- # Note the metric id was incorrectly named as 'gitlab_alert_id'
- # in PrometheusAlert#to_param.
- before do
- payload['labels'] = { 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id }
- end
-
- it { is_expected.to be_nil }
- end
- end
- end
-
- describe '#title' do
- subject { alert.title }
-
- it_behaves_like 'parse payload',
- 'annotations/title',
- 'annotations/summary',
- 'labels/alertname'
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- context 'with annotations/title' do
- let(:value) { 'annotation title' }
-
- before do
- payload['annotations'] = { 'title' => value }
- end
-
- it { is_expected.to eq(gitlab_alert.title) }
- end
- end
- end
-
- describe '#description' do
- subject { alert.description }
-
- it_behaves_like 'parse payload', 'annotations/description'
- end
-
- describe '#annotations' do
- subject { alert.annotations }
-
- context 'without payload' do
- it { is_expected.to eq([]) }
- end
-
- context 'with payload' do
- before do
- payload['annotations'] = { 'foo' => 'value1', 'bar' => 'value2' }
- end
-
- it 'parses annotations' do
- expect(subject.size).to eq(2)
- expect(subject.map(&:label)).to eq(%w[foo bar])
- expect(subject.map(&:value)).to eq(%w[value1 value2])
- end
- end
- end
-
- describe '#environment' do
- subject { alert.environment }
-
- context 'without gitlab_alert' do
- it { is_expected.to be_nil }
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- it { is_expected.to eq(gitlab_alert.environment) }
- end
- end
-
- describe '#starts_at' do
- subject { alert.starts_at }
-
- context 'with empty startsAt' do
- before do
- payload['startsAt'] = nil
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'with invalid startsAt' do
- before do
- payload['startsAt'] = 'invalid'
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'with payload' do
- let(:time) { Time.current.change(usec: 0) }
-
- before do
- payload['startsAt'] = time.rfc3339
- end
-
- it { is_expected.to eq(time) }
- end
- end
-
- describe '#full_query' do
- using RSpec::Parameterized::TableSyntax
-
- subject { alert.full_query }
-
- where(:generator_url, :expected_query) do
- nil | nil
- 'http://localhost' | nil
- 'invalid url' | nil
- 'http://localhost:9090/graph?g1.expr=vector%281%29' | nil
- 'http://localhost:9090/graph?g0.expr=vector%281%29' | 'vector(1)'
- end
-
- with_them do
- before do
- payload['generatorURL'] = generator_url
- end
-
- it { is_expected.to eq(expected_query) }
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
- include_context 'full query'
-
- it { is_expected.to eq(gitlab_alert.full_query) }
- end
- end
-
- describe '#y_label' do
- subject { alert.y_label }
-
- it_behaves_like 'parse payload', 'annotations/gitlab_y_label'
-
- context 'when y_label is not included in the payload' do
- it_behaves_like 'parse payload', 'annotations/title'
- end
- end
-
- describe '#alert_markdown' do
- subject { alert.alert_markdown }
-
- it_behaves_like 'parse payload', 'annotations/gitlab_incident_markdown'
- end
-
- describe '#gitlab_fingerprint' do
- subject { alert.gitlab_fingerprint }
-
- context 'when the alert is a GitLab managed alert' do
- include_context 'gitlab alert'
-
- it 'returns a fingerprint' do
- plain_fingerprint = [alert.metric_id, alert.starts_at_raw].join('/')
-
- is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
- end
- end
-
- context 'when the alert is from self managed Prometheus' do
- include_context 'full query'
-
- it 'returns a fingerprint' do
- plain_fingerprint = [alert.starts_at_raw, alert.title, alert.full_query].join('/')
-
- is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
- end
- end
- end
-
- describe '#valid?' do
- before do
- payload.update(
- 'annotations' => { 'title' => 'some title' },
- 'startsAt' => Time.current.rfc3339
- )
- end
-
- subject { alert }
-
- it { is_expected.to be_valid }
-
- context 'without project' do
- let(:project) { nil }
-
- it { is_expected.not_to be_valid }
- end
-
- context 'without starts_at' do
- before do
- payload['startsAt'] = nil
- end
-
- it { is_expected.not_to be_valid }
- end
- end
-end
diff --git a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
deleted file mode 100644
index ff5ab1116fa..00000000000
--- a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
+++ /dev/null
@@ -1,204 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Alerting::NotificationPayloadParser do
- let_it_be(:project) { build(:project) }
-
- describe '.call' do
- let(:starts_at) { Time.current.change(usec: 0) }
- let(:ends_at) { Time.current.change(usec: 0) }
- let(:payload) do
- {
- 'title' => 'alert title',
- 'start_time' => starts_at.rfc3339,
- 'end_time' => ends_at.rfc3339,
- 'description' => 'Description',
- 'monitoring_tool' => 'Monitoring tool name',
- 'service' => 'Service',
- 'hosts' => ['gitlab.com'],
- 'severity' => 'low'
- }
- end
-
- subject { described_class.call(payload, project) }
-
- it 'returns Prometheus-like payload' do
- is_expected.to eq(
- {
- 'annotations' => {
- 'title' => 'alert title',
- 'description' => 'Description',
- 'monitoring_tool' => 'Monitoring tool name',
- 'service' => 'Service',
- 'hosts' => ['gitlab.com'],
- 'severity' => 'low'
- },
- 'startsAt' => starts_at.rfc3339,
- 'endsAt' => ends_at.rfc3339
- }
- )
- end
-
- context 'when title is blank' do
- before do
- payload[:title] = ''
- end
-
- it 'sets a predefined title' do
- expect(subject.dig('annotations', 'title')).to eq('New: Incident')
- end
- end
-
- context 'when hosts attribute is a string' do
- before do
- payload[:hosts] = 'gitlab.com'
- end
-
- it 'returns hosts as an array of one element' do
- expect(subject.dig('annotations', 'hosts')).to eq(['gitlab.com'])
- end
- end
-
- context 'when the time is in unsupported format' do
- before do
- payload[:start_time] = 'invalid/date/format'
- end
-
- it 'sets startsAt to a current time in RFC3339 format' do
- expect(subject['startsAt']).to eq(starts_at.rfc3339)
- end
- end
-
- context 'when payload is blank' do
- let(:payload) { {} }
-
- it 'returns default parameters' do
- is_expected.to match(
- 'annotations' => {
- 'title' => described_class::DEFAULT_TITLE,
- 'severity' => described_class::DEFAULT_SEVERITY
- },
- 'startsAt' => starts_at.rfc3339
- )
- end
-
- context 'when severity is blank' do
- before do
- payload[:severity] = ''
- end
-
- it 'sets severity to the default ' do
- expect(subject.dig('annotations', 'severity')).to eq(described_class::DEFAULT_SEVERITY)
- end
- end
- end
-
- context 'with fingerprint' do
- before do
- payload[:fingerprint] = data
- end
-
- shared_examples 'fingerprint generation' do
- it 'generates the fingerprint correctly' do
- expect(result).to eq(Gitlab::AlertManagement::Fingerprint.generate(data))
- end
- end
-
- context 'with blank fingerprint' do
- it_behaves_like 'fingerprint generation' do
- let(:data) { ' ' }
- let(:result) { subject.dig('annotations', 'fingerprint') }
- end
- end
-
- context 'with fingerprint given' do
- it_behaves_like 'fingerprint generation' do
- let(:data) { 'fingerprint' }
- let(:result) { subject.dig('annotations', 'fingerprint') }
- end
- end
-
- context 'with array fingerprint given' do
- it_behaves_like 'fingerprint generation' do
- let(:data) { [1, 'fingerprint', 'given'] }
- let(:result) { subject.dig('annotations', 'fingerprint') }
- end
- end
- end
-
- context 'with environment' do
- let(:environment) { create(:environment, project: project) }
-
- before do
- payload[:gitlab_environment_name] = environment.name
- end
-
- it 'sets the environment ' do
- expect(subject.dig('annotations', 'environment')).to eq(environment)
- end
- end
-
- context 'when payload attributes have blank lines' do
- let(:payload) do
- {
- 'title' => '',
- 'start_time' => '',
- 'end_time' => '',
- 'description' => '',
- 'monitoring_tool' => '',
- 'service' => '',
- 'hosts' => ['']
- }
- end
-
- it 'returns default parameters' do
- is_expected.to eq(
- 'annotations' => {
- 'title' => 'New: Incident',
- 'severity' => described_class::DEFAULT_SEVERITY
- },
- 'startsAt' => starts_at.rfc3339
- )
- end
- end
-
- context 'when payload has secondary params' do
- let(:payload) do
- {
- 'description' => 'Description',
- 'additional' => {
- 'params' => {
- '1' => 'Some value 1',
- '2' => 'Some value 2',
- 'blank' => ''
- }
- }
- }
- end
-
- it 'adds secondary params to annotations' do
- is_expected.to eq(
- 'annotations' => {
- 'title' => 'New: Incident',
- 'severity' => described_class::DEFAULT_SEVERITY,
- 'description' => 'Description',
- 'additional.params.1' => 'Some value 1',
- 'additional.params.2' => 'Some value 2'
- },
- 'startsAt' => starts_at.rfc3339
- )
- end
- end
-
- context 'when secondary params hash is too big' do
- before do
- allow(Gitlab::Utils::SafeInlineHash).to receive(:merge_keys!).and_raise(ArgumentError)
- end
-
- it 'catches and re-raises an error' do
- expect { subject }.to raise_error Gitlab::Alerting::NotificationPayloadParser::BadPayloadError, 'The payload is too big'
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/analytics/unique_visits_spec.rb b/spec/lib/gitlab/analytics/unique_visits_spec.rb
index 1432c9ac58f..6ac58e13f4c 100644
--- a/spec/lib/gitlab/analytics/unique_visits_spec.rb
+++ b/spec/lib/gitlab/analytics/unique_visits_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
# Without freezing the time, the test may behave inconsistently
# depending on which day of the week test is run.
reference_time = Time.utc(2020, 6, 1)
- Timecop.freeze(reference_time) { example.run }
+ travel_to(reference_time) { example.run }
end
describe '#track_visit' do
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 1ac8ebe1369..2ebde145bfd 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -419,10 +419,30 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect(find_user_from_web_access_token(:ics)).to eq(user)
end
- it 'returns the user for API requests' do
- set_header('SCRIPT_NAME', '/api/endpoint')
+ context 'for API requests' do
+ it 'returns the user' do
+ set_header('SCRIPT_NAME', '/api/endpoint')
+
+ expect(find_user_from_web_access_token(:api)).to eq(user)
+ end
+
+ it 'returns nil if URL does not start with /api/' do
+ set_header('SCRIPT_NAME', '/relative_root/api/endpoint')
+
+ expect(find_user_from_web_access_token(:api)).to be_nil
+ end
- expect(find_user_from_web_access_token(:api)).to eq(user)
+ context 'when relative_url_root is set' do
+ before do
+ stub_config_setting(relative_url_root: '/relative_root')
+ end
+
+ it 'returns the user' do
+ set_header('SCRIPT_NAME', '/relative_root/api/endpoint')
+
+ expect(find_user_from_web_access_token(:api)).to eq(user)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/auth/current_user_mode_spec.rb b/spec/lib/gitlab/auth/current_user_mode_spec.rb
index 60b403780c0..ffd7813190a 100644
--- a/spec/lib/gitlab/auth/current_user_mode_spec.rb
+++ b/spec/lib/gitlab/auth/current_user_mode_spec.rb
@@ -121,7 +121,7 @@ RSpec.describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode, :request_
subject.enable_admin_mode!(password: user.password)
expect(subject.admin_mode?).to be(true), 'admin mode is not active in the present'
- Timecop.freeze(Gitlab::Auth::CurrentUserMode::MAX_ADMIN_MODE_TIME.from_now) do
+ travel_to(Gitlab::Auth::CurrentUserMode::MAX_ADMIN_MODE_TIME.from_now) do
# in the future this will be a new request, simulate by clearing the RequestStore
Gitlab::SafeRequestStore.clear!
diff --git a/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb b/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb
new file mode 100644
index 00000000000..0c88421d456
--- /dev/null
+++ b/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::Otp::Strategies::Devise do
+ let_it_be(:user) { create(:user) }
+ let(:otp_code) { 42 }
+
+ subject(:validate) { described_class.new(user).validate(otp_code) }
+
+ it 'calls Devise' do
+ expect(user).to receive(:validate_and_consume_otp!).with(otp_code)
+
+ validate
+ end
+end
diff --git a/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb b/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb
new file mode 100644
index 00000000000..18fd6d08057
--- /dev/null
+++ b/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Auth::Otp::Strategies::FortiAuthenticator do
+ let_it_be(:user) { create(:user) }
+ let(:otp_code) { 42 }
+
+ let(:host) { 'forti_authenticator.example.com' }
+ let(:port) { '444' }
+ let(:api_username) { 'janedoe' }
+ let(:api_token) { 's3cr3t' }
+
+ let(:forti_authenticator_auth_url) { "https://#{host}:#{port}/api/v1/auth/" }
+
+ subject(:validate) { described_class.new(user).validate(otp_code) }
+
+ before do
+ stub_feature_flags(forti_authenticator: true)
+
+ stub_forti_authenticator_config(
+ host: host,
+ port: port,
+ username: api_username,
+ token: api_token
+ )
+
+ request_body = { username: user.username,
+ token_code: otp_code }
+
+ stub_request(:post, forti_authenticator_auth_url)
+ .with(body: JSON(request_body), headers: { 'Content-Type' => 'application/json' })
+ .to_return(status: response_status, body: '', headers: {})
+ end
+
+ context 'successful validation' do
+ let(:response_status) { 200 }
+
+ it 'returns success' do
+ expect(validate[:status]).to eq(:success)
+ end
+ end
+
+ context 'unsuccessful validation' do
+ let(:response_status) { 401 }
+
+ it 'returns error' do
+ expect(validate[:status]).to eq(:error)
+ end
+ end
+
+ def stub_forti_authenticator_config(forti_authenticator_settings)
+ allow(::Gitlab.config.forti_authenticator).to(receive_messages(forti_authenticator_settings))
+ end
+end
diff --git a/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb b/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
index a08055ab852..b239de841b6 100644
--- a/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
+++ b/spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::Auth::UniqueIpsLimiter, :clean_gitlab_redis_shared_state
expect(described_class.update_and_return_ips_count(user.id, 'ip2')).to eq(1)
expect(described_class.update_and_return_ips_count(user.id, 'ip3')).to eq(2)
- Timecop.travel(Time.now.utc + described_class.config.unique_ips_limit_time_window) do
+ travel_to(Time.now.utc + described_class.config.unique_ips_limit_time_window) do
expect(described_class.update_and_return_ips_count(user.id, 'ip4')).to eq(1)
expect(described_class.update_and_return_ips_count(user.id, 'ip5')).to eq(2)
end
diff --git a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
index 5cbd22827c9..d3c6cde5590 100644
--- a/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
+++ b/spec/lib/gitlab/auth/user_access_denied_reason_spec.rb
@@ -49,5 +49,13 @@ RSpec.describe Gitlab::Auth::UserAccessDeniedReason do
it { is_expected.to match /Your primary email address is not confirmed/ }
end
+
+ context 'when the user is blocked pending approval' do
+ before do
+ user.block_pending_approval!
+ end
+
+ it { is_expected.to eq('Your account is pending approval from your administrator and hence blocked.') }
+ end
end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 74360637897..1768ab41a71 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -726,6 +726,12 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect( gl_auth.find_with_user_password(username, password) ).not_to eql user
end
+ it 'does not find user in blocked_pending_approval state' do
+ user.block_pending_approval
+
+ expect( gl_auth.find_with_user_password(username, password) ).not_to eql user
+ end
+
context 'with increment_failed_attempts' do
wrong_password = 'incorrect_password'
diff --git a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb b/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb
new file mode 100644
index 00000000000..81b8b5dde08
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::AddModifiedToApprovalMergeRequestRule, schema: 20200817195628 do
+ let(:determine_if_rules_are_modified) { described_class.new }
+
+ let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') }
+ let(:projects) { table(:projects) }
+ let(:normal_project) { projects.create!(namespace_id: namespace.id) }
+ let(:overridden_project) { projects.create!(namespace_id: namespace.id) }
+ let(:rules) { table(:approval_merge_request_rules) }
+ let(:project_rules) { table(:approval_project_rules) }
+ let(:sources) { table(:approval_merge_request_rule_sources) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:groups) { table(:namespaces) }
+ let(:mr_groups) { table(:approval_merge_request_rules_groups) }
+ let(:project_groups) { table(:approval_project_rules_groups) }
+
+ before do
+ project_rule = project_rules.create!(project_id: normal_project.id, approvals_required: 3, name: 'test rule')
+ overridden_project_rule = project_rules.create!(project_id: overridden_project.id, approvals_required: 5, name: 'other test rule')
+ overridden_project_rule_two = project_rules.create!(project_id: overridden_project.id, approvals_required: 7, name: 'super cool rule')
+
+ merge_request = merge_requests.create!(target_branch: 'feature', source_branch: 'default', source_project_id: normal_project.id, target_project_id: normal_project.id)
+ overridden_merge_request = merge_requests.create!(target_branch: 'feature-2', source_branch: 'default', source_project_id: overridden_project.id, target_project_id: overridden_project.id)
+
+ merge_rule = rules.create!(merge_request_id: merge_request.id, approvals_required: 3, name: 'test rule')
+ overridden_merge_rule = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 6, name: 'other test rule')
+ overridden_merge_rule_two = rules.create!(merge_request_id: overridden_merge_request.id, approvals_required: 7, name: 'super cool rule')
+
+ sources.create!(approval_project_rule_id: project_rule.id, approval_merge_request_rule_id: merge_rule.id)
+ sources.create!(approval_project_rule_id: overridden_project_rule.id, approval_merge_request_rule_id: overridden_merge_rule.id)
+ sources.create!(approval_project_rule_id: overridden_project_rule_two.id, approval_merge_request_rule_id: overridden_merge_rule_two.id)
+
+ group1 = groups.create!(name: "group1", path: "test_group1", type: 'Group')
+ group2 = groups.create!(name: "group2", path: "test_group2", type: 'Group')
+ group3 = groups.create!(name: "group3", path: "test_group3", type: 'Group')
+
+ project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group1.id)
+ project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group2.id)
+ project_groups.create!(approval_project_rule_id: overridden_project_rule_two.id, group_id: group3.id)
+
+ mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule.id, group_id: group1.id)
+ mr_groups.create!(approval_merge_request_rule_id: overridden_merge_rule_two.id, group_id: group2.id)
+ end
+
+ describe '#perform' do
+ it 'changes the correct rules' do
+ original_count = rules.all.count
+
+ determine_if_rules_are_modified.perform(rules.minimum(:id), rules.maximum(:id))
+
+ results = rules.where(modified_from_project_rule: true)
+
+ expect(results.count).to eq 2
+ expect(results.collect(&:name)).to eq(['other test rule', 'super cool rule'])
+ expect(rules.count).to eq original_count
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
index a3840e3a22e..85a9c88ebff 100644
--- a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
+++ b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgre
described_class.new.perform
- expect(Feature.enabled?(:multiple_merge_request_assignees)).to eq(true)
+ expect(Feature.enabled?(:multiple_merge_request_assignees, type: :licensed)).to eq(true)
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
new file mode 100644
index 00000000000..33498ffa748
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20200925125321 do
+ let(:users) { table(:users) }
+
+ let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
+
+ let(:u2f_registrations) { table(:u2f_registrations) }
+ let(:webauthn_registrations) { table(:webauthn_registrations) }
+
+ let!(:u2f_registration_not_migrated) { create_u2f_registration(1, 'reg1') }
+ let!(:u2f_registration_not_migrated_no_name) { create_u2f_registration(2, nil, 2) }
+ let!(:u2f_registration_migrated) { create_u2f_registration(3, 'reg3') }
+
+ subject { described_class.new.perform(1, 3) }
+
+ before do
+ converted_credential = convert_credential_for(u2f_registration_migrated)
+ webauthn_registrations.create!(converted_credential)
+ end
+
+ it 'migrates all records' do
+ expect { subject }.to change { webauthn_registrations.count }.from(1).to(3)
+
+ all_webauthn_registrations = webauthn_registrations.all.map(&:attributes)
+
+ [u2f_registration_not_migrated, u2f_registration_not_migrated_no_name].each do |u2f_registration|
+ expected_credential = convert_credential_for(u2f_registration).except(:created_at).stringify_keys
+ expect(all_webauthn_registrations).to include(a_hash_including(expected_credential))
+ end
+ end
+
+ def create_u2f_registration(id, name, counter = 5)
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
+ u2f_registrations.create!({ id: id,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw),
+ counter: counter,
+ name: name,
+ user_id: user.id })
+ end
+
+ def convert_credential_for(u2f_registration)
+ converted_credential = WebAuthn::U2fMigrator.new(
+ app_id: Gitlab.config.gitlab.url,
+ certificate: u2f_registration.certificate,
+ key_handle: u2f_registration.key_handle,
+ public_key: u2f_registration.public_key,
+ counter: u2f_registration.counter
+ ).credential
+
+ {
+ credential_xid: Base64.strict_encode64(converted_credential.id),
+ public_key: Base64.strict_encode64(converted_credential.public_key),
+ counter: u2f_registration.counter,
+ name: u2f_registration.name || '',
+ user_id: u2f_registration.user_id,
+ u2f_registration_id: u2f_registration.id,
+ created_at: u2f_registration.created_at
+ }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb b/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb
index db3cbe7ccdc..3cec5cb4c35 100644
--- a/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb
@@ -82,21 +82,4 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateUsersBioToUserDetails, :migra
expect(user_detail).to be_nil
end
-
- context 'when `migrate_bio_to_user_details` feature flag is off' do
- before do
- stub_feature_flags(migrate_bio_to_user_details: false)
- end
-
- it 'does nothing' do
- already_existing_user_details = user_details.where(user_id: [
- user_has_different_details.id,
- user_already_has_details.id
- ])
-
- subject
-
- expect(user_details.all).to match_array(already_existing_user_details)
- end
- end
end
diff --git a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
new file mode 100644
index 00000000000..fa4f2d1fd88
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20201015073808 do
+ let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') }
+ let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') }
+ let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
+ let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
+ let(:issue_links) { table(:issue_links) }
+ let!(:blocks_link) { issue_links.create!(source_id: issue1.id, target_id: issue2.id, link_type: 1) }
+ let!(:bidirectional_link) { issue_links.create!(source_id: issue2.id, target_id: issue1.id, link_type: 2) }
+ let!(:blocked_link) { issue_links.create!(source_id: issue1.id, target_id: issue3.id, link_type: 2) }
+
+ subject { described_class.new.perform(issue_links.minimum(:id), issue_links.maximum(:id)) }
+
+ it 'deletes issue links where opposite relation already exists' do
+ expect { subject }.to change { issue_links.count }.by(-1)
+ end
+
+ it 'ignores issue links other than blocked_by' do
+ subject
+
+ expect(blocks_link.reload.link_type).to eq(1)
+ end
+
+ it 'updates blocked_by issue links' do
+ subject
+
+ link = blocked_link.reload
+ expect(link.link_type).to eq(1)
+ expect(link.source_id).to eq(issue3.id)
+ expect(link.target_id).to eq(issue1.id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
index 392b44d1a1f..2dae4a65eeb 100644
--- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
+++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
@@ -74,14 +74,14 @@ RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMent
let(:user_mentions) { merge_request_user_mentions }
let(:resource) { merge_request }
- it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest
+ it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, 'MergeRequest'
context 'when FF disabled' do
before do
stub_feature_flags(migrate_user_mentions: false)
end
- it_behaves_like 'resource migration not run', MigrateMergeRequestMentionsToDb, MergeRequest
+ it_behaves_like 'resource migration not run', MigrateMergeRequestMentionsToDb, 'MergeRequest'
end
end
@@ -103,14 +103,14 @@ RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMent
let(:user_mentions) { commit_user_mentions }
let(:resource) { commit }
- it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, Commit
+ it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, 'Commit'
context 'when FF disabled' do
before do
stub_feature_flags(migrate_user_mentions: false)
end
- it_behaves_like 'resource notes migration not run', MigrateCommitNotesMentionsToDb, Commit
+ it_behaves_like 'resource notes migration not run', MigrateCommitNotesMentionsToDb, 'Commit'
end
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index d4483bf1754..b723c31c4aa 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -312,7 +312,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
# attributes later.
existing_label.reload
- Timecop.freeze(Time.now + 1.minute) do
+ travel_to(Time.now + 1.minute) do
importer.execute
label_after_import = project.labels.find(existing_label.id)
diff --git a/spec/lib/gitlab/bulk_import/client_spec.rb b/spec/lib/gitlab/bulk_import/client_spec.rb
new file mode 100644
index 00000000000..a6f8dd6d194
--- /dev/null
+++ b/spec/lib/gitlab/bulk_import/client_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BulkImport::Client do
+ include ImportSpecHelper
+
+ let(:uri) { 'http://gitlab.example' }
+ let(:token) { 'token' }
+ let(:resource) { 'resource' }
+
+ subject { described_class.new(uri: uri, token: token) }
+
+ describe '#get' do
+ let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
+
+ shared_examples 'performs network request' do
+ it 'performs network request' do
+ expect(Gitlab::HTTP).to receive(:get).with(*expected_args).and_return(response_double)
+
+ subject.get(resource)
+ end
+ end
+
+ describe 'parsed response' do
+ it 'returns parsed response' do
+ response_double = double(code: 200, success?: true, parsed_response: [{ id: 1 }, { id: 2 }])
+
+ allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
+
+ expect(subject.get(resource)).to eq(response_double.parsed_response)
+ end
+ end
+
+ describe 'request query' do
+ include_examples 'performs network request' do
+ let(:expected_args) do
+ [
+ anything,
+ hash_including(
+ query: {
+ page: described_class::DEFAULT_PAGE,
+ per_page: described_class::DEFAULT_PER_PAGE
+ }
+ )
+ ]
+ end
+ end
+ end
+
+ describe 'request headers' do
+ include_examples 'performs network request' do
+ let(:expected_args) do
+ [
+ anything,
+ hash_including(
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Authorization' => "Bearer #{token}"
+ }
+ )
+ ]
+ end
+ end
+ end
+
+ describe 'request uri' do
+ include_examples 'performs network request' do
+ let(:expected_args) do
+ ['http://gitlab.example:80/api/v4/resource', anything]
+ end
+ end
+ end
+
+ context 'error handling' do
+ context 'when error occurred' do
+ it 'raises ConnectionError' do
+ allow(Gitlab::HTTP).to receive(:get).and_raise(Errno::ECONNREFUSED)
+
+ expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
+ end
+ end
+
+ context 'when response is not success' do
+ it 'raises ConnectionError' do
+ response_double = double(code: 503, success?: false)
+
+ allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
+
+ expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
new file mode 100644
index 00000000000..ca7ee784ee3
--- /dev/null
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Checks::MatchingMergeRequest do
+ describe '#match?' do
+ let_it_be(:newrev) { '012345678' }
+ let_it_be(:target_branch) { 'feature' }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:locked_merge_request) do
+ create(:merge_request,
+ :locked,
+ source_project: project,
+ target_project: project,
+ target_branch: target_branch,
+ in_progress_merge_commit_sha: newrev)
+ end
+
+ subject { described_class.new(newrev, target_branch, project) }
+
+ it 'matches a merge request' do
+ expect(subject.match?).to be true
+ end
+
+ it 'does not match any merge request' do
+ matcher = described_class.new(newrev, 'test', project)
+
+ expect(matcher.match?).to be false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/ansi2json/line_spec.rb b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
index 8b1cd812a70..d681447a0e8 100644
--- a/spec/lib/gitlab/ci/ansi2json/line_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
@@ -58,6 +58,15 @@ RSpec.describe Gitlab::Ci::Ansi2json::Line do
end
end
+ describe '#set_section_options' do
+ it 'sets the current section\'s options' do
+ options = { collapsed: true }
+ subject.set_section_options(options)
+
+ expect(subject.to_h[:section_options]).to eq(options)
+ end
+ end
+
describe '#set_as_section_header' do
it 'change the section_header to true' do
expect { subject.set_as_section_header }
diff --git a/spec/lib/gitlab/ci/ansi2json_spec.rb b/spec/lib/gitlab/ci/ansi2json_spec.rb
index cb6949fddc2..c9c0d1a744e 100644
--- a/spec/lib/gitlab/ci/ansi2json_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
expect(convert_json(trace)).to eq([
{
offset: 0,
- content: [{ text: "section_end:1:2<div>hello</div>" }],
+ content: [{ text: 'section_end:1:2<div>hello</div>' }],
section: 'prepare-script',
section_header: true
},
@@ -329,6 +329,32 @@ RSpec.describe Gitlab::Ci::Ansi2json do
])
end
end
+
+ context 'with section options' do
+ let(:option_section_start) { "section_start:#{section_start_time.to_i}:#{section_name}[collapsed=true,unused_option=123]\r\033[0K"}
+
+ it 'provides section options when set' do
+ trace = "#{option_section_start}hello#{section_end}"
+ expect(convert_json(trace)).to eq([
+ {
+ offset: 0,
+ content: [{ text: 'hello' }],
+ section: 'prepare-script',
+ section_header: true,
+ section_options: {
+ 'collapsed' => 'true',
+ 'unused_option' => '123'
+ }
+ },
+ {
+ offset: 83,
+ content: [],
+ section: 'prepare-script',
+ section_duration: '01:03'
+ }
+ ])
+ end
+ end
end
describe 'incremental updates' do
@@ -339,7 +365,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
context 'with split word' do
let(:pre_text) { "\e[1mHello " }
- let(:text) { "World" }
+ let(:text) { 'World' }
let(:lines) do
[
@@ -355,7 +381,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
context 'with split word on second line' do
let(:pre_text) { "Good\nmorning " }
- let(:text) { "World" }
+ let(:text) { 'World' }
let(:lines) do
[
@@ -514,7 +540,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
end
describe 'trucates' do
- let(:text) { "Hello World" }
+ let(:text) { 'Hello World' }
let(:stream) { StringIO.new(text) }
let(:subject) { described_class.convert(stream) }
@@ -522,11 +548,11 @@ RSpec.describe Gitlab::Ci::Ansi2json do
stream.seek(3, IO::SEEK_SET)
end
- it "returns truncated output" do
+ it 'returns truncated output' do
expect(subject.truncated).to be_truthy
end
- it "does not append output" do
+ it 'does not append output' do
expect(subject.append).to be_falsey
end
end
diff --git a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
index 83a37655ea9..e982f0eb015 100644
--- a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
+++ b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb
@@ -18,17 +18,6 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
end
- context 'when FF ci_new_artifact_file_reader is disabled' do
- before do
- stub_feature_flags(ci_new_artifact_file_reader: false)
- end
-
- it 'returns the content at the path' do
- is_expected.to be_present
- expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
- end
- end
-
context 'when path does not exist' do
let(:path) { 'file/does/not/exist.txt' }
let(:expected_error) do
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index f33176c3da3..8b2e0410474 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -228,4 +228,66 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
end
end
end
+
+ describe '#manual_action?' do
+ context 'when job is a manual action' do
+ let(:config) { { script: 'deploy', when: 'manual' } }
+
+ it { is_expected.to be_manual_action }
+ end
+
+ context 'when job is not a manual action' do
+ let(:config) { { script: 'deploy' } }
+
+ it { is_expected.not_to be_manual_action }
+ end
+ end
+
+ describe '#ignored?' do
+ context 'when job is a manual action' do
+ context 'when it is not specified if job is allowed to fail' do
+ let(:config) do
+ { script: 'deploy', when: 'manual' }
+ end
+
+ it { is_expected.to be_ignored }
+ end
+
+ context 'when job is allowed to fail' do
+ let(:config) do
+ { script: 'deploy', when: 'manual', allow_failure: true }
+ end
+
+ it { is_expected.to be_ignored }
+ end
+
+ context 'when job is not allowed to fail' do
+ let(:config) do
+ { script: 'deploy', when: 'manual', allow_failure: false }
+ end
+
+ it { is_expected.not_to be_ignored }
+ end
+ end
+
+ context 'when job is not a manual action' do
+ context 'when it is not specified if job is allowed to fail' do
+ let(:config) { { script: 'deploy' } }
+
+ it { is_expected.not_to be_ignored }
+ end
+
+ context 'when job is allowed to fail' do
+ let(:config) { { script: 'deploy', allow_failure: true } }
+
+ it { is_expected.to be_ignored }
+ end
+
+ context 'when job is not allowed to fail' do
+ let(:config) { { script: 'deploy', allow_failure: false } }
+
+ it { is_expected.not_to be_ignored }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 3501812b76e..80427eaa6ee 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -13,18 +13,23 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
context 'when entry config value is correct' do
let(:policy) { nil }
let(:key) { 'some key' }
+ let(:when_config) { nil }
let(:config) do
- { key: key,
+ {
+ key: key,
untracked: true,
- paths: ['some/path/'],
- policy: policy }
+ paths: ['some/path/']
+ }.tap do |config|
+ config[:policy] = policy if policy
+ config[:when] = when_config if when_config
+ end
end
describe '#value' do
shared_examples 'hash key value' do
it 'returns hash value' do
- expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push')
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
end
end
@@ -49,6 +54,48 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
expect(entry.value).to match(a_hash_including(key: nil))
end
end
+
+ context 'with `policy`' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:policy, :result) do
+ 'pull-push' | 'pull-push'
+ 'push' | 'push'
+ 'pull' | 'pull'
+ 'unknown' | 'unknown' # invalid
+ end
+
+ with_them do
+ it { expect(entry.value).to include(policy: result) }
+ end
+ end
+
+ context 'without `policy`' do
+ it 'assigns policy to default' do
+ expect(entry.value).to include(policy: 'pull-push')
+ end
+ end
+
+ context 'with `when`' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:when_config, :result) do
+ 'on_success' | 'on_success'
+ 'on_failure' | 'on_failure'
+ 'always' | 'always'
+ 'unknown' | 'unknown' # invalid
+ end
+
+ with_them do
+ it { expect(entry.value).to include(when: result) }
+ end
+ end
+
+ context 'without `when`' do
+ it 'assigns when to default' do
+ expect(entry.value).to include(when: 'on_success')
+ end
+ end
end
describe '#valid?' do
@@ -61,28 +108,41 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
end
- context 'policy is pull-push' do
- let(:policy) { 'pull-push' }
+ context 'with `policy`' do
+ using RSpec::Parameterized::TableSyntax
- it { is_expected.to be_valid }
- it { expect(entry.value).to include(policy: 'pull-push') }
- end
-
- context 'policy is push' do
- let(:policy) { 'push' }
+ where(:policy, :valid) do
+ 'pull-push' | true
+ 'push' | true
+ 'pull' | true
+ 'unknown' | false
+ end
- it { is_expected.to be_valid }
- it { expect(entry.value).to include(policy: 'push') }
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
+ end
+ end
end
- context 'policy is pull' do
- let(:policy) { 'pull' }
+ context 'with `when`' do
+ using RSpec::Parameterized::TableSyntax
- it { is_expected.to be_valid }
- it { expect(entry.value).to include(policy: 'pull') }
+ where(:when_config, :valid) do
+ 'on_success' | true
+ 'on_failure' | true
+ 'always' | true
+ 'unknown' | false
+ end
+
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
+ end
+ end
end
- context 'when key is missing' do
+ context 'with key missing' do
let(:config) do
{ untracked: true,
paths: ['some/path/'] }
@@ -110,13 +170,21 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
context 'when policy is unknown' do
- let(:config) { { policy: "unknown" } }
+ let(:config) { { policy: 'unknown' } }
it 'reports error' do
is_expected.to include('cache policy should be pull-push, push, or pull')
end
end
+ context 'when `when` is unknown' do
+ let(:config) { { when: 'unknown' } }
+
+ it 'reports error' do
+ is_expected.to include('cache when should be on_success, on_failure or always')
+ end
+ end
+
context 'when descendants are invalid' do
context 'with invalid keys' do
let(:config) { { key: 1 } }
diff --git a/spec/lib/gitlab/ci/config/entry/include_spec.rb b/spec/lib/gitlab/ci/config/entry/include_spec.rb
index 3e816f70c03..59f0b0e7a48 100644
--- a/spec/lib/gitlab/ci/config/entry/include_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/include_spec.rb
@@ -61,6 +61,31 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Include do
end
end
end
+
+ context 'when using "project"' do
+ context 'and specifying "ref" and "file"' do
+ let(:config) { { project: 'my-group/my-pipeline-library', ref: 'master', file: 'test.yml' } }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'without "ref"' do
+ let(:config) { { project: 'my-group/my-pipeline-library', file: 'test.yml' } }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'without "file"' do
+ let(:config) { { project: 'my-group/my-pipeline-library' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'has specific error' do
+ expect(include_entry.errors)
+ .to include('include config must specify the file where to fetch the config from')
+ end
+ end
+ end
end
context 'when value is something else' do
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index ab760b107f8..e0e8bc93770 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -537,7 +537,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'overrides default config' do
expect(entry[:image].value).to eq(name: 'some_image')
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push')
+ expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
end
end
@@ -552,7 +552,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'uses config from default entry' do
expect(entry[:image].value).to eq 'specified'
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push')
+ expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
index 39697884e3b..3388ae0af2f 100644
--- a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_dependency 'active_model'
RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
@@ -46,33 +46,140 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
end
end
- context 'when entry config has only one variable' do
- let(:config) do
- [
- {
- 'VAR_1' => %w[test]
- }
- ]
+ context 'with one_dimensional_matrix feature flag enabled' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: true)
+ matrix.compose!
end
- describe '#valid?' do
- it { is_expected.not_to be_valid }
- end
+ context 'when entry config has only one variable with multiple values' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[build test]
+ }
+ ]
+ end
- describe '#errors' do
- it 'returns error about too many jobs' do
- expect(matrix.errors)
- .to include('variables config requires at least 2 items')
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to be_empty
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
+ end
end
+
+ context 'when entry config has only one variable with one value' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[test]
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to be_empty
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
+ end
+ end
+ end
+ end
+ end
+
+ context 'with one_dimensional_matrix feature flag disabled' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: false)
+ matrix.compose!
end
- describe '#value' do
- before do
- matrix.compose!
+ context 'when entry config has only one variable with multiple values' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[build test]
+ }
+ ]
end
- it 'returns the value without raising an error' do
- expect(matrix.value).to eq([{ 'VAR_1' => ['test'] }])
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about too many jobs' do
+ expect(matrix.errors)
+ .to include('variables config requires at least 2 items')
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
+ end
+ end
+
+ context 'when entry config has only one variable with one value' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[test]
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to include('variables config requires at least 2 items')
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
index 230b001d620..407efb438b5 100644
--- a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+# After Feature one_dimensional_matrix is removed, this can be changed back to fast_spec_helper
+require 'spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
@@ -45,43 +46,71 @@ RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
end
end
- context 'when entry value is not correct' do
- shared_examples 'invalid variables' do |message|
- describe '#errors' do
- it 'saves errors' do
- expect(entry.errors).to include(message)
- end
+ context 'with one_dimensional_matrix feature flag enabled' do
+ context 'with only one variable' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: true)
end
+ let(:config) { { VAR: 'test' } }
describe '#valid?' do
- it 'is not valid' do
- expect(entry).not_to be_valid
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
end
end
end
+ end
- context 'with array' do
- let(:config) { [:VAR, 'test'] }
+ context 'with one_dimensional_matrix feature flag disabled' do
+ context 'when entry value is not correct' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: false)
+ end
+ shared_examples 'invalid variables' do |message|
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors).to include(message)
+ end
+ end
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
- context 'with empty array' do
- let(:config) { { VAR: 'test', VAR2: [] } }
+ context 'with array' do
+ let(:config) { [:VAR, 'test'] }
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
- context 'with nested array' do
- let(:config) { { VAR: 'test', VAR2: [1, [2]] } }
+ context 'with empty array' do
+ let(:config) { { VAR: 'test', VAR2: [] } }
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
- context 'with only one variable' do
- let(:config) { { VAR: 'test' } }
+ context 'with nested array' do
+ let(:config) { { VAR: 'test', VAR2: [1, [2]] } }
+
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
- it_behaves_like 'invalid variables', /variables config requires at least 2 items/
+ context 'with one_dimensional_matrix feature flag disabled' do
+ context 'with only one variable' do
+ let(:config) { { VAR: 'test' } }
+
+ it_behaves_like 'invalid variables', /variables config requires at least 2 items/
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 252bda6461d..79716df6b60 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'ruby:2.7' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
@@ -141,7 +141,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'ruby:2.7' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push' },
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
@@ -156,7 +156,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
image: { name: "ruby:2.7" },
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
- cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push" },
+ cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
only: { refs: %w(branches tags) },
variables: { 'VAR' => 'job' },
after_script: [],
@@ -203,7 +203,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'ruby:2.7' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: "pull-push" },
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'root' },
ignore: false,
after_script: ['make clean'],
@@ -215,7 +215,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'ruby:2.7' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: "pull-push" },
+ cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'job' },
ignore: false,
after_script: ['make clean'],
@@ -261,7 +261,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
describe '#cache_value' do
it 'returns correct cache definition' do
- expect(root.cache_value).to eq(key: 'a', policy: 'pull-push')
+ expect(root.cache_value).to eq(key: 'a', policy: 'pull-push', when: 'on_success')
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
index d6391092f63..ac33f858f43 100644
--- a/spec/lib/gitlab/ci/config/entry/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variables_spec.rb
@@ -3,56 +3,109 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Variables do
- let(:entry) { described_class.new(config) }
+ subject { described_class.new(config) }
- describe 'validations' do
- context 'when entry config value is correct' do
- let(:config) do
- { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
+ shared_examples 'valid config' do
+ describe '#value' do
+ it 'returns hash with key value strings' do
+ expect(subject.value).to eq result
end
+ end
- describe '#value' do
- it 'returns hash with key value strings' do
- expect(entry.value).to eq config
- end
-
- context 'with numeric keys and values in the config' do
- let(:config) { { 10 => 20 } }
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(subject.errors).to be_empty
+ end
+ end
- it 'converts numeric key and numeric value into strings' do
- expect(entry.value).to eq('10' => '20')
- end
- end
+ describe '#valid?' do
+ it 'is valid' do
+ expect(subject).to be_valid
end
+ end
+ end
- describe '#errors' do
- it 'does not append errors' do
- expect(entry.errors).to be_empty
- end
+ shared_examples 'invalid config' do
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(subject).not_to be_valid
end
+ end
- describe '#valid?' do
- it 'is valid' do
- expect(entry).to be_valid
- end
+ describe '#errors' do
+ it 'saves errors' do
+ expect(subject.errors)
+ .to include /should be a hash of key value pairs/
end
end
+ end
- context 'when entry value is not correct' do
- let(:config) { [:VAR, 'test'] }
+ context 'when entry config value has key-value pairs' do
+ let(:config) do
+ { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
+ end
- describe '#errors' do
- it 'saves errors' do
- expect(entry.errors)
- .to include /should be a hash of key value pairs/
- end
- end
+ let(:result) do
+ { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
+ end
- describe '#valid?' do
- it 'is not valid' do
- expect(entry).not_to be_valid
- end
- end
+ it_behaves_like 'valid config'
+ end
+
+ context 'with numeric keys and values in the config' do
+ let(:config) { { 10 => 20 } }
+ let(:result) do
+ { '10' => '20' }
+ end
+
+ it_behaves_like 'valid config'
+ end
+
+ context 'when entry config value has key-value pair and hash' do
+ let(:config) do
+ { 'VARIABLE_1' => { value: 'value 1', description: 'variable 1' },
+ 'VARIABLE_2' => 'value 2' }
+ end
+
+ let(:result) do
+ { 'VARIABLE_1' => 'value 1', 'VARIABLE_2' => 'value 2' }
+ end
+
+ it_behaves_like 'valid config'
+ end
+
+ context 'when entry value is an array' do
+ let(:config) { [:VAR, 'test'] }
+
+ it_behaves_like 'invalid config'
+ end
+
+ context 'when entry value has hash with other key-pairs' do
+ let(:config) do
+ { 'VARIABLE_1' => { value: 'value 1', hello: 'variable 1' },
+ 'VARIABLE_2' => 'value 2' }
end
+
+ it_behaves_like 'invalid config'
+ end
+
+ context 'when entry config value has hash with nil description' do
+ let(:config) do
+ { 'VARIABLE_1' => { value: 'value 1', description: nil } }
+ end
+
+ it_behaves_like 'invalid config'
+ end
+
+ context 'when entry config value has hash without description' do
+ let(:config) do
+ { 'VARIABLE_1' => { value: 'value 1' } }
+ end
+
+ let(:result) do
+ { 'VARIABLE_1' => 'value 1' }
+ end
+
+ it_behaves_like 'valid config'
end
end
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
index f724825a9cc..dd27b4045c9 100644
--- a/spec/lib/gitlab/ci/cron_parser_spec.rb
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when PST (Pacific Standard Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 1, 1)) do
+ travel_to(Time.utc(2017, 1, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -90,7 +90,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when PDT (Pacific Daylight Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 6, 1)) do
+ travel_to(Time.utc(2017, 6, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when CET (Central European Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 1, 1)) do
+ travel_to(Time.utc(2017, 1, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -125,7 +125,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when CEST (Central European Summer Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 6, 1)) do
+ travel_to(Time.utc(2017, 6, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -152,7 +152,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when EST (Eastern Standard Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 1, 1)) do
+ travel_to(Time.utc(2017, 1, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -160,7 +160,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when EDT (Eastern Daylight Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 6, 1)) do
+ travel_to(Time.utc(2017, 6, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -174,7 +174,7 @@ RSpec.describe Gitlab::Ci::CronParser do
# (e.g. America/Chicago) at the start of the test. Stubbing
# TZ doesn't appear to be enough.
it 'generates day without TZInfo::AmbiguousTime error' do
- Timecop.freeze(Time.utc(2020, 1, 1)) do
+ travel_to(Time.utc(2020, 1, 1)) do
expect(subject.year).to eq(year)
expect(subject.month).to eq(12)
expect(subject.day).to eq(1)
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index 077c0fd3162..c67f8464123 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Lint do
- let_it_be(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:lint) { described_class.new(project: project, current_user: user) }
@@ -61,6 +61,43 @@ RSpec.describe Gitlab::Ci::Lint do
end
end
+ shared_examples 'sets merged yaml' do
+ let(:content) do
+ <<~YAML
+ :include:
+ :local: another-gitlab-ci.yml
+ :test_job:
+ :stage: test
+ :script: echo
+ YAML
+ end
+
+ let(:included_content) do
+ <<~YAML
+ :another_job:
+ :script: echo
+ YAML
+ end
+
+ before do
+ project.repository.create_file(
+ project.creator,
+ 'another-gitlab-ci.yml',
+ included_content,
+ message: 'Automatically created another-gitlab-ci.yml',
+ branch_name: 'master'
+ )
+ end
+
+ it 'sets merged_config' do
+ root_config = YAML.safe_load(content, [Symbol])
+ included_config = YAML.safe_load(included_content, [Symbol])
+ expected_config = included_config.merge(root_config).except(:include)
+
+ expect(subject.merged_yaml).to eq(expected_config.to_yaml)
+ end
+ end
+
shared_examples 'content with errors and warnings' do
context 'when content has errors' do
let(:content) do
@@ -173,6 +210,8 @@ RSpec.describe Gitlab::Ci::Lint do
end
end
+ it_behaves_like 'sets merged yaml'
+
include_context 'advanced validations' do
it 'does not catch advanced logical errors' do
expect(subject).to be_valid
@@ -203,6 +242,8 @@ RSpec.describe Gitlab::Ci::Lint do
end
end
+ it_behaves_like 'sets merged yaml'
+
include_context 'advanced validations' do
it 'runs advanced logical validations' do
expect(subject).not_to be_valid
diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
index 1f497dea2bf..7da602251a5 100644
--- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
@@ -4,11 +4,12 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
describe '#parse!' do
- subject { described_class.new.parse!(junit, test_suite, args) }
+ subject { described_class.new.parse!(junit, test_suite, job: job) }
let(:test_suite) { Gitlab::Ci::Reports::TestSuite.new('rspec') }
let(:test_cases) { flattened_test_cases(test_suite) }
- let(:args) { { job: { id: 1, project: "project" } } }
+ let(:job) { double(max_test_cases_per_report: max_test_cases) }
+ let(:max_test_cases) { 0 }
context 'when data is JUnit style XML' do
context 'when there are no <testcases> in <testsuite>' do
@@ -43,7 +44,7 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
let(:junit) do
<<-EOF.strip_heredoc
<testsuites>
- <testsuite>
+ <testsuite name='Math'>
<testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
</testsuite>
</testsuites>
@@ -53,6 +54,7 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
it 'parses XML and adds a test case to a suite' do
expect { subject }.not_to raise_error
+ expect(test_cases[0].suite_name).to eq('Math')
expect(test_cases[0].classname).to eq('Calculator')
expect(test_cases[0].name).to eq('sumTest1')
expect(test_cases[0].execution_time).to eq(0.01)
@@ -62,7 +64,7 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
context 'when there is <testcase>' do
let(:junit) do
<<-EOF.strip_heredoc
- <testsuite>
+ <testsuite name='Math'>
<testcase classname='Calculator' name='sumTest1' time='0.01'>
#{testcase_content}
</testcase>
@@ -79,6 +81,7 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
shared_examples_for '<testcase> XML parser' do |status, output|
it 'parses XML and adds a test case to the suite' do
aggregate_failures do
+ expect(test_case.suite_name).to eq('Math')
expect(test_case.classname).to eq('Calculator')
expect(test_case.name).to eq('sumTest1')
expect(test_case.execution_time).to eq(0.01)
@@ -152,13 +155,15 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
expect { subject }.not_to raise_error
expect(test_cases.count).to eq(1)
+ expect(test_cases.first.suite_name).to eq("XXX\\FrontEnd\\WebBundle\\Tests\\Controller\\LogControllerTest")
+ expect(test_cases.first.name).to eq("testIndexAction")
end
end
context 'when there are two test cases' do
let(:junit) do
<<-EOF.strip_heredoc
- <testsuite>
+ <testsuite name='Math'>
<testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
<testcase classname='Calculator' name='sumTest2' time='0.02'></testcase>
</testsuite>
@@ -168,9 +173,11 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
it 'parses XML and adds test cases to a suite' do
expect { subject }.not_to raise_error
+ expect(test_cases[0].suite_name).to eq('Math')
expect(test_cases[0].classname).to eq('Calculator')
expect(test_cases[0].name).to eq('sumTest1')
expect(test_cases[0].execution_time).to eq(0.01)
+ expect(test_cases[1].suite_name).to eq('Math')
expect(test_cases[1].classname).to eq('Calculator')
expect(test_cases[1].name).to eq('sumTest2')
expect(test_cases[1].execution_time).to eq(0.02)
@@ -181,7 +188,7 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
let(:junit) do
<<-EOF.strip_heredoc
<testsuites>
- <testsuite>
+ <testsuite name='Math'>
<testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
<testcase classname='Calculator' name='sumTest2' time='0.02'></testcase>
</testsuite>
@@ -196,18 +203,81 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
it 'parses XML and adds test cases to a suite' do
expect { subject }.not_to raise_error
- expect(test_cases[0].classname).to eq('Calculator')
- expect(test_cases[0].name).to eq('sumTest1')
- expect(test_cases[0].execution_time).to eq(0.01)
- expect(test_cases[1].classname).to eq('Calculator')
- expect(test_cases[1].name).to eq('sumTest2')
- expect(test_cases[1].execution_time).to eq(0.02)
- expect(test_cases[2].classname).to eq('Statemachine')
- expect(test_cases[2].name).to eq('happy path')
- expect(test_cases[2].execution_time).to eq(100)
- expect(test_cases[3].classname).to eq('Statemachine')
- expect(test_cases[3].name).to eq('unhappy path')
- expect(test_cases[3].execution_time).to eq(200)
+ expect(test_cases).to contain_exactly(
+ have_attributes(
+ suite_name: 'Math',
+ classname: 'Calculator',
+ name: 'sumTest1',
+ execution_time: 0.01
+ ),
+ have_attributes(
+ suite_name: 'Math',
+ classname: 'Calculator',
+ name: 'sumTest2',
+ execution_time: 0.02
+ ),
+ have_attributes(
+ suite_name: test_suite.name, # Defaults to test suite instance's name
+ classname: 'Statemachine',
+ name: 'happy path',
+ execution_time: 100
+ ),
+ have_attributes(
+ suite_name: test_suite.name, # Defaults to test suite instance's name
+ classname: 'Statemachine',
+ name: 'unhappy path',
+ execution_time: 200
+ )
+ )
+ end
+ end
+
+ context 'when number of test cases exceeds the max_test_cases limit' do
+ let(:max_test_cases) { 1 }
+
+ shared_examples_for 'rejecting too many test cases' do
+ it 'attaches an error to the TestSuite object' do
+ expect { subject }.not_to raise_error
+ expect(test_suite.suite_error).to eq("JUnit data parsing failed: number of test cases exceeded the limit of #{max_test_cases}")
+ end
+ end
+
+ context 'and test cases are unique' do
+ let(:junit) do
+ <<-EOF.strip_heredoc
+ <testsuites>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
+ <testcase classname='Calculator' name='sumTest2' time='0.02'></testcase>
+ </testsuite>
+ <testsuite>
+ <testcase classname='Statemachine' name='happy path' time='100'></testcase>
+ <testcase classname='Statemachine' name='unhappy path' time='200'></testcase>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it_behaves_like 'rejecting too many test cases'
+ end
+
+ context 'and test cases are duplicates' do
+ let(:junit) do
+ <<-EOF.strip_heredoc
+ <testsuites>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
+ <testcase classname='Calculator' name='sumTest2' time='0.02'></testcase>
+ </testsuite>
+ <testsuite>
+ <testcase classname='Calculator' name='sumTest1' time='0.01'></testcase>
+ <testcase classname='Calculator' name='sumTest2' time='0.02'></testcase>
+ </testsuite>
+ </testsuites>
+ EOF
+ end
+
+ it_behaves_like 'rejecting too many test cases'
end
end
end
@@ -296,9 +366,7 @@ RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
expect(test_cases[0].has_attachment?).to be_truthy
expect(test_cases[0].attachment).to eq("some/path.png")
- expect(test_cases[0].job).to be_present
- expect(test_cases[0].job[:id]).to eq(1)
- expect(test_cases[0].job[:project]).to eq("project")
+ expect(test_cases[0].job).to eq(job)
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index 74c014b6408..570706bfaac 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -224,7 +224,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
key: 'a-key',
paths: ['vendor/ruby'],
untracked: true,
- policy: 'push'
+ policy: 'push',
+ when: 'on_success'
}
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 34df0e86a18..0b961336f3f 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
- let(:project) { create(:project, :repository) }
- let(:head_sha) { project.repository.head_commit.id }
- let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: head_sha) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:head_sha) { project.repository.head_commit.id }
+ let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
@@ -503,7 +503,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
using RSpec::Parameterized
let(:pipeline) do
- build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source)
+ build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source, project: project)
end
context 'matches' do
@@ -766,7 +766,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'with a matching changes: rule' do
let(:pipeline) do
- create(:ci_pipeline, project: project).tap do |pipeline|
+ build(:ci_pipeline, project: project).tap do |pipeline|
stub_pipeline_modified_paths(pipeline, %w[app/models/ci/pipeline.rb spec/models/ci/pipeline_spec.rb .gitlab-ci.yml])
end
end
diff --git a/spec/lib/gitlab/ci/reports/test_case_spec.rb b/spec/lib/gitlab/ci/reports/test_case_spec.rb
index 7fb208213c1..a142846fc18 100644
--- a/spec/lib/gitlab/ci/reports/test_case_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_case_spec.rb
@@ -6,39 +6,26 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
describe '#initialize' do
let(:test_case) { described_class.new(params) }
- context 'when both classname and name are given' do
- context 'when test case is passed' do
- let(:job) { build(:ci_build) }
- let(:params) { attributes_for(:test_case).merge!(job: job) }
-
- it 'initializes an instance' do
- expect { test_case }.not_to raise_error
-
- expect(test_case.name).to eq('test-1')
- expect(test_case.classname).to eq('trace')
- expect(test_case.file).to eq('spec/trace_spec.rb')
- expect(test_case.execution_time).to eq(1.23)
- expect(test_case.status).to eq(described_class::STATUS_SUCCESS)
- expect(test_case.system_output).to be_nil
- expect(test_case.job).to be_present
- end
- end
+ context 'when required params are given' do
+ let(:job) { build(:ci_build) }
+ let(:params) { attributes_for(:test_case).merge!(job: job) }
- context 'when test case is failed' do
- let(:job) { build(:ci_build) }
- let(:params) { attributes_for(:test_case, :failed).merge!(job: job) }
-
- it 'initializes an instance' do
- expect { test_case }.not_to raise_error
-
- expect(test_case.name).to eq('test-1')
- expect(test_case.classname).to eq('trace')
- expect(test_case.file).to eq('spec/trace_spec.rb')
- expect(test_case.execution_time).to eq(1.23)
- expect(test_case.status).to eq(described_class::STATUS_FAILED)
- expect(test_case.system_output)
- .to eq('Failure/Error: is_expected.to eq(300) expected: 300 got: -100')
- end
+ it 'initializes an instance', :aggregate_failures do
+ expect { test_case }.not_to raise_error
+
+ expect(test_case).to have_attributes(
+ suite_name: params[:suite_name],
+ name: params[:name],
+ classname: params[:classname],
+ file: params[:file],
+ execution_time: params[:execution_time],
+ status: params[:status],
+ system_output: params[:system_output],
+ job: params[:job]
+ )
+
+ key = "#{test_case.suite_name}_#{test_case.classname}_#{test_case.name}"
+ expect(test_case.key).to eq(Digest::SHA256.hexdigest(key))
end
end
@@ -53,6 +40,10 @@ RSpec.describe Gitlab::Ci::Reports::TestCase do
end
end
+ context 'when suite_name is missing' do
+ it_behaves_like 'param is missing', :suite_name
+ end
+
context 'when classname is missing' do
it_behaves_like 'param is missing', :classname
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index 15fa78444e5..50d1595da73 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -229,6 +229,20 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
end
end
+ describe '#each_test_case' do
+ before do
+ test_suite.add_test_case(test_case_success)
+ test_suite.add_test_case(test_case_failed)
+ test_suite.add_test_case(test_case_skipped)
+ test_suite.add_test_case(test_case_error)
+ end
+
+ it 'yields each test case to given block' do
+ expect { |b| test_suite.each_test_case(&b) }
+ .to yield_successive_args(test_case_success, test_case_failed, test_case_skipped, test_case_error)
+ end
+ end
+
Gitlab::Ci::Reports::TestCase::STATUS_TYPES.each do |status_type|
describe "##{status_type}_count" do
subject { test_suite.public_send("#{status_type}_count") }
diff --git a/spec/lib/gitlab/ci/runner/backoff_spec.rb b/spec/lib/gitlab/ci/runner/backoff_spec.rb
new file mode 100644
index 00000000000..f147d69f7cd
--- /dev/null
+++ b/spec/lib/gitlab/ci/runner/backoff_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require 'active_support/testing/time_helpers'
+
+RSpec.describe Gitlab::Ci::Runner::Backoff do
+ include ActiveSupport::Testing::TimeHelpers
+
+ describe '#duration' do
+ it 'returns backoff duration from start' do
+ freeze_time do
+ described_class.new(5.minutes.ago).then do |backoff|
+ expect(backoff.duration).to eq 5.minutes
+ end
+ end
+ end
+
+ it 'returns an integer value' do
+ freeze_time do
+ described_class.new(5.seconds.ago).then do |backoff|
+ expect(backoff.duration).to be 5
+ end
+ end
+ end
+
+ it 'returns the smallest number greater than or equal to duration' do
+ freeze_time do
+ described_class.new(0.5.seconds.ago).then do |backoff|
+ expect(backoff.duration).to be 1
+ end
+ end
+ end
+ end
+
+ describe '#slot' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:started, :slot) do
+ 0 | 0
+ 0.1 | 0
+ 0.9 | 0
+ 1 | 0
+ 1.1 | 0
+ 1.9 | 0
+ 2 | 0
+ 2.9 | 0
+ 3 | 0
+ 4 | 1
+ 5 | 1
+ 6 | 1
+ 7 | 1
+ 8 | 2
+ 9 | 2
+ 9.9 | 2
+ 10 | 2
+ 15 | 2
+ 16 | 3
+ 31 | 3
+ 32 | 4
+ 63 | 4
+ 64 | 5
+ 127 | 5
+ 128 | 6
+ 250 | 6
+ 310 | 7
+ 520 | 8
+ 999 | 8
+ end
+
+ with_them do
+ it 'falls into an appropaite backoff slot' do
+ freeze_time do
+ backoff = described_class.new(started.seconds.ago)
+ expect(backoff.slot).to eq slot
+ end
+ end
+ end
+ end
+
+ describe '#to_seconds' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:started, :backoff) do
+ 0 | 1
+ 0.1 | 1
+ 0.9 | 1
+ 1 | 1
+ 1.1 | 1
+ 1.9 | 1
+ 2 | 1
+ 3 | 1
+ 4 | 2
+ 5 | 2
+ 6 | 2
+ 6.5 | 2
+ 7 | 2
+ 8 | 4
+ 9 | 4
+ 9.9 | 4
+ 10 | 4
+ 15 | 4
+ 16 | 8
+ 31 | 8
+ 32 | 16
+ 63 | 16
+ 64 | 32
+ 127 | 32
+ 128 | 64
+ 250 | 64
+ 310 | 64
+ 520 | 64
+ 999 | 64
+ end
+
+ with_them do
+ it 'calculates backoff based on an appropriate slot' do
+ freeze_time do
+ described_class.new(started.seconds.ago).then do |delay|
+ expect(delay.to_seconds).to eq backoff
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/bridge/common_spec.rb b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
index 92600b21afc..37524afc83d 100644
--- a/spec/lib/gitlab/ci/status/bridge/common_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
@@ -30,15 +30,6 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Common do
it { expect(subject).to have_details }
it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" }
-
- context 'when ci_bridge_pipeline_details is disabled' do
- before do
- stub_feature_flags(ci_bridge_pipeline_details: false)
- end
-
- it { expect(subject).not_to have_details }
- it { expect(subject.details_path).to be_nil }
- end
end
context 'when user does not have access to read downstream pipeline' do
diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
index 021b777a0ff..d27bb98ba9a 100644
--- a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory do
end
context 'when bridge is created' do
- let(:bridge) { create(:ci_bridge) }
+ let(:bridge) { create_bridge(:created) }
it 'matches correct core status' do
expect(factory.core_status).to be_a Gitlab::Ci::Status::Created
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory do
end
context 'when bridge is failed' do
- let(:bridge) { create(:ci_bridge, :failed) }
+ let(:bridge) { create_bridge(:failed) }
it 'matches correct core status' do
expect(factory.core_status).to be_a Gitlab::Ci::Status::Failed
@@ -70,4 +70,61 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory do
end
end
end
+
+ context 'when bridge is a manual action' do
+ let(:bridge) { create_bridge(:playable) }
+
+ it 'matches correct core status' do
+ expect(factory.core_status).to be_a Gitlab::Ci::Status::Manual
+ end
+
+ it 'matches correct extended statuses' do
+ expect(factory.extended_statuses)
+ .to eq [Gitlab::Ci::Status::Bridge::Manual,
+ Gitlab::Ci::Status::Bridge::Play,
+ Gitlab::Ci::Status::Bridge::Action]
+ end
+
+ it 'fabricates action detailed status' do
+ expect(status).to be_a Gitlab::Ci::Status::Bridge::Action
+ end
+
+ it 'fabricates status with correct details' do
+ expect(status.text).to eq s_('CiStatusText|manual')
+ expect(status.group).to eq 'manual'
+ expect(status.icon).to eq 'status_manual'
+ expect(status.favicon).to eq 'favicon_status_manual'
+ expect(status.illustration).to include(:image, :size, :title, :content)
+ expect(status.label).to include 'manual play action'
+ expect(status).not_to have_details
+ expect(status.action_path).to include 'play'
+ end
+
+ context 'when user has ability to play action' do
+ before do
+ bridge.downstream_project.add_developer(user)
+ end
+
+ it 'fabricates status that has action' do
+ expect(status).to have_action
+ end
+ end
+
+ context 'when user does not have ability to play action' do
+ it 'fabricates status that has no action' do
+ expect(status).not_to have_action
+ end
+ end
+ end
+
+ private
+
+ def create_bridge(trait)
+ upstream_project = create(:project, :repository)
+ downstream_project = create(:project, :repository)
+ upstream_pipeline = create(:ci_pipeline, :running, project: upstream_project)
+ trigger = { trigger: { project: downstream_project.full_path, branch: 'feature' } }
+
+ create(:ci_bridge, trait, options: trigger, pipeline: upstream_pipeline)
+ end
end
diff --git a/spec/lib/gitlab/ci/status/canceled_spec.rb b/spec/lib/gitlab/ci/status/canceled_spec.rb
index a35efae5c57..7fae76f61ea 100644
--- a/spec/lib/gitlab/ci/status/canceled_spec.rb
+++ b/spec/lib/gitlab/ci/status/canceled_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Canceled do
describe '#group' do
it { expect(subject.group).to eq 'canceled' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/created_spec.rb b/spec/lib/gitlab/ci/status/created_spec.rb
index 1ddced923f6..1e54d1ed8c5 100644
--- a/spec/lib/gitlab/ci/status/created_spec.rb
+++ b/spec/lib/gitlab/ci/status/created_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Created do
describe '#group' do
it { expect(subject.group).to eq 'created' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/failed_spec.rb b/spec/lib/gitlab/ci/status/failed_spec.rb
index e8bd728b740..f3f3304b04d 100644
--- a/spec/lib/gitlab/ci/status/failed_spec.rb
+++ b/spec/lib/gitlab/ci/status/failed_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Failed do
describe '#group' do
it { expect(subject.group).to eq 'failed' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/pending_spec.rb b/spec/lib/gitlab/ci/status/pending_spec.rb
index 0e47b19d9c1..1c062a0133d 100644
--- a/spec/lib/gitlab/ci/status/pending_spec.rb
+++ b/spec/lib/gitlab/ci/status/pending_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Pending do
describe '#group' do
it { expect(subject.group).to eq 'pending' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/preparing_spec.rb b/spec/lib/gitlab/ci/status/preparing_spec.rb
index 6d33eb77560..ec1850c1959 100644
--- a/spec/lib/gitlab/ci/status/preparing_spec.rb
+++ b/spec/lib/gitlab/ci/status/preparing_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Preparing do
describe '#group' do
it { expect(subject.group).to eq 'preparing' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/running_spec.rb b/spec/lib/gitlab/ci/status/running_spec.rb
index fbc7bfd81b3..e40d696ee4d 100644
--- a/spec/lib/gitlab/ci/status/running_spec.rb
+++ b/spec/lib/gitlab/ci/status/running_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Running do
describe '#group' do
it { expect(subject.group).to eq 'running' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/scheduled_spec.rb b/spec/lib/gitlab/ci/status/scheduled_spec.rb
index 4a1dae937ca..8a923faf3f9 100644
--- a/spec/lib/gitlab/ci/status/scheduled_spec.rb
+++ b/spec/lib/gitlab/ci/status/scheduled_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Scheduled do
describe '#group' do
it { expect(subject.group).to eq 'scheduled' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/skipped_spec.rb b/spec/lib/gitlab/ci/status/skipped_spec.rb
index f402bbe5221..ac3c2f253f7 100644
--- a/spec/lib/gitlab/ci/status/skipped_spec.rb
+++ b/spec/lib/gitlab/ci/status/skipped_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Skipped do
describe '#group' do
it { expect(subject.group).to eq 'skipped' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/success_spec.rb b/spec/lib/gitlab/ci/status/success_spec.rb
index 2d1c50448d4..f2069334abd 100644
--- a/spec/lib/gitlab/ci/status/success_spec.rb
+++ b/spec/lib/gitlab/ci/status/success_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::Success do
describe '#group' do
it { expect(subject.group).to eq 'success' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
index de18198c6c2..bb6139accaf 100644
--- a/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
+++ b/spec/lib/gitlab/ci/status/waiting_for_resource_spec.rb
@@ -26,4 +26,8 @@ RSpec.describe Gitlab::Ci::Status::WaitingForResource do
describe '#group' do
it { expect(subject.group).to eq 'waiting-for-resource' }
end
+
+ describe '#details_path' do
+ it { expect(subject.details_path).to be_nil }
+ end
end
diff --git a/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..8df739d9245
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/Terraform/base_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Terraform/Base.latest.gitlab-ci.yml' do
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform/Base.latest') }
+
+ describe 'the created pipeline' do
+ let(:user) { create(:admin) }
+ let(:default_branch) { 'master' }
+ let(:pipeline_branch) { default_branch }
+ let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ allow(project).to receive(:default_branch).and_return(default_branch)
+ end
+
+ it 'does not create any jobs' do
+ expect(build_names).to be_empty
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
new file mode 100644
index 00000000000..5eec021b9d7
--- /dev/null
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
+ before do
+ allow(Gitlab::Template::GitlabCiYmlTemplate).to receive(:excluded_patterns).and_return([])
+ end
+
+ subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Terraform.latest') }
+
+ describe 'the created pipeline' do
+ let_it_be(:user) { create(:admin) }
+
+ let(:default_branch) { 'master' }
+ let(:pipeline_branch) { default_branch }
+ let(:project) { create(:project, :custom_repo, files: { 'README.md' => '' }) }
+ let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) }
+ let(:pipeline) { service.execute!(:push) }
+ let(:build_names) { pipeline.builds.pluck(:name) }
+
+ before do
+ stub_ci_pipeline_yaml_file(template.content)
+ allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
+ allow(project).to receive(:default_branch).and_return(default_branch)
+ end
+
+ context 'on master branch' do
+ it 'creates init, validate and build jobs' do
+ expect(build_names).to include('init', 'validate', 'build', 'deploy')
+ end
+ end
+
+ context 'outside the master branch' do
+ let(:pipeline_branch) { 'patch-1' }
+
+ before do
+ project.repository.create_branch(pipeline_branch)
+ end
+
+ it 'does not creates a deploy and a test job' do
+ expect(build_names).not_to include('deploy')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/checksum_spec.rb b/spec/lib/gitlab/ci/trace/checksum_spec.rb
new file mode 100644
index 00000000000..794794c3f69
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/checksum_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::Checksum do
+ let(:build) { create(:ci_build, :running) }
+
+ subject { described_class.new(build) }
+
+ context 'when build pending state exists' do
+ before do
+ create(:ci_build_pending_state, build: build, trace_checksum: 'crc32:d4777540')
+ end
+
+ context 'when matching persisted trace chunks exist' do
+ before do
+ create_chunk(index: 0, data: 'a' * 128.kilobytes)
+ create_chunk(index: 1, data: 'b' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'calculates combined trace chunks CRC32 correctly' do
+ expect(subject.chunks_crc32).to eq 3564598592
+ expect(subject).to be_valid
+ end
+ end
+
+ context 'when trace chunks were persisted in a wrong order' do
+ before do
+ create_chunk(index: 0, data: 'b' * 128.kilobytes)
+ create_chunk(index: 1, data: 'a' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'makes trace checksum invalid' do
+ expect(subject).not_to be_valid
+ end
+ end
+
+ context 'when one of the trace chunks is missing' do
+ before do
+ create_chunk(index: 0, data: 'a' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'makes trace checksum invalid' do
+ expect(subject).not_to be_valid
+ end
+ end
+
+ context 'when checksums of persisted trace chunks do not match' do
+ before do
+ create_chunk(index: 0, data: 'a' * 128.kilobytes)
+ create_chunk(index: 1, data: 'X' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'makes trace checksum invalid' do
+ expect(subject).not_to be_valid
+ end
+ end
+
+ context 'when persisted trace chunks are missing' do
+ it 'makes trace checksum invalid' do
+ expect(subject.state_crc32).to eq 3564598592
+ expect(subject).not_to be_valid
+ end
+ end
+ end
+
+ context 'when build pending state is missing' do
+ describe '#state_crc32' do
+ it 'returns nil' do
+ expect(subject.state_crc32).to be_nil
+ end
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ describe '#trace_chunks' do
+ before do
+ create_chunk(index: 0, data: 'abcdefg')
+ end
+
+ it 'does not load raw_data from a database store' do
+ subject.trace_chunks.first.then do |chunk|
+ expect(chunk).to be_database
+ expect { chunk.raw_data }
+ .to raise_error ActiveModel::MissingAttributeError
+ end
+ end
+ end
+
+ describe '#last_chunk' do
+ context 'when there are no chunks' do
+ it 'returns nil' do
+ expect(subject.last_chunk).to be_nil
+ end
+ end
+
+ context 'when there are multiple chunks' do
+ before do
+ create_chunk(index: 1, data: '1234')
+ create_chunk(index: 0, data: 'abcd')
+ end
+
+ it 'returns chunk with the highest index' do
+ expect(subject.last_chunk.chunk_index).to eq 1
+ end
+ end
+ end
+
+ def create_chunk(index:, data:)
+ create(:ci_build_trace_chunk, :persisted, build: build,
+ chunk_index: index,
+ initial_data: data)
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/metrics_spec.rb b/spec/lib/gitlab/ci/trace/metrics_spec.rb
new file mode 100644
index 00000000000..6518d0ab075
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/metrics_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::Metrics, :prometheus do
+ describe '#increment_trace_bytes' do
+ context 'when incrementing by more than one' do
+ it 'increments a single counter' do
+ subject.increment_trace_bytes(10)
+ subject.increment_trace_bytes(20)
+ subject.increment_trace_bytes(30)
+
+ expect(described_class.trace_bytes.get).to eq 60
+ expect(described_class.trace_bytes.values.count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 171877dbaee..92bf2519588 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -2,8 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
- let(:build) { create(:ci_build) }
+RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_default: :keep do
+ let_it_be(:project) { create_default(:project) }
+ let_it_be_with_reload(:build) { create(:ci_build) }
let(:trace) { described_class.new(build) }
describe "associations" do
@@ -32,6 +33,16 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
expect(artifact2.job.trace.raw).to eq(test_data)
end
+
+ it 'reloads the trace in case of a chunk error' do
+ chunk_error = described_class::ChunkedIO::FailedToGetChunkError
+
+ allow_any_instance_of(described_class::Stream)
+ .to receive(:raw).and_raise(chunk_error)
+
+ expect(build).to receive(:reset).and_return(build)
+ expect { trace.raw }.to raise_error(chunk_error)
+ end
end
context 'when live trace feature is disabled' do
@@ -111,4 +122,13 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
end
end
end
+
+ describe '#lock' do
+ it 'acquires an exclusive lease on the trace' do
+ trace.lock do
+ expect { trace.lock }
+ .to raise_error described_class::LockedError
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
new file mode 100644
index 00000000000..7e3cd7ec254
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ class YamlProcessor
+ RSpec.describe Result do
+ include StubRequests
+
+ let(:user) { create(:user) }
+ let(:ci_config) { Gitlab::Ci::Config.new(config_content, user: user) }
+ let(:result) { described_class.new(ci_config: ci_config, warnings: ci_config&.warnings) }
+
+ describe '#merged_yaml' do
+ subject(:merged_yaml) { result.merged_yaml }
+
+ let(:config_content) do
+ YAML.dump(
+ include: { remote: 'https://example.com/sample.yml' },
+ test: { stage: 'test', script: 'echo' }
+ )
+ end
+
+ let(:included_yml) do
+ YAML.dump(
+ another_test: { stage: 'test', script: 'echo 2' }
+ )
+ end
+
+ before do
+ stub_full_request('https://example.com/sample.yml').to_return(body: included_yml)
+ end
+
+ it 'returns expanded yaml config' do
+ expanded_config = YAML.safe_load(merged_yaml, [Symbol])
+ included_config = YAML.safe_load(included_yml, [Symbol])
+
+ expect(expanded_config).to include(*included_config.keys)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index d596494a987..fb6395e888a 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1361,7 +1361,8 @@ module Gitlab
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
)
end
@@ -1383,7 +1384,8 @@ module Gitlab
paths: ["logs/", "binaries/"],
untracked: true,
key: { files: ['file'] },
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
)
end
@@ -1402,7 +1404,8 @@ module Gitlab
paths: ['logs/', 'binaries/'],
untracked: true,
key: 'key',
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
)
end
@@ -1425,7 +1428,8 @@ module Gitlab
paths: ['logs/', 'binaries/'],
untracked: true,
key: { files: ['file'] },
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
)
end
@@ -1448,7 +1452,8 @@ module Gitlab
paths: ['logs/', 'binaries/'],
untracked: true,
key: { files: ['file'], prefix: 'prefix' },
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
)
end
@@ -1468,7 +1473,8 @@ module Gitlab
paths: ["test/"],
untracked: false,
key: 'local',
- policy: 'pull-push'
+ policy: 'pull-push',
+ when: 'on_success'
)
end
end
@@ -2240,47 +2246,49 @@ module Gitlab
end
describe 'with parent-child pipeline' do
+ let(:config) do
+ YAML.dump({
+ build1: { stage: 'build', script: 'test' },
+ test1: {
+ stage: 'test',
+ trigger: {
+ include: includes
+ }
+ }
+ })
+ end
+
context 'when artifact and job are specified' do
- let(:config) do
- YAML.dump({
- build1: { stage: 'build', script: 'test' },
- test1: { stage: 'test', trigger: {
- include: [{ artifact: 'generated.yml', job: 'build1' }]
- } }
- })
- end
+ let(:includes) { [{ artifact: 'generated.yml', job: 'build1' }] }
it { is_expected.to be_valid }
end
- context 'when job is not specified specified while artifact is' do
- let(:config) do
- YAML.dump({
- build1: { stage: 'build', script: 'test' },
- test1: { stage: 'test', trigger: {
- include: [{ artifact: 'generated.yml' }]
- } }
- })
- end
+ context 'when job is not specified while artifact is' do
+ let(:includes) { [{ artifact: 'generated.yml' }] }
it_behaves_like 'returns errors', /include config must specify the job where to fetch the artifact from/
end
- context 'when include is a string' do
- let(:config) do
- YAML.dump({
- build1: { stage: 'build', script: 'test' },
- test1: {
- stage: 'test',
- trigger: {
- include: 'generated.yml'
- }
- }
- })
+ context 'when project and file are specified' do
+ let(:includes) do
+ [{ file: 'generated.yml', project: 'my-namespace/my-project' }]
end
it { is_expected.to be_valid }
end
+
+ context 'when file is not specified while project is' do
+ let(:includes) { [{ project: 'something' }] }
+
+ it_behaves_like 'returns errors', /include config must specify the file where to fetch the config from/
+ end
+
+ context 'when include is a string' do
+ let(:includes) { 'generated.yml' }
+
+ it { is_expected.to be_valid }
+ end
end
describe "Error handling" do
@@ -2457,13 +2465,13 @@ module Gitlab
context 'returns errors if variables is not a map' do
let(:config) { YAML.dump({ variables: "test", rspec: { script: "test" } }) }
- it_behaves_like 'returns errors', 'variables config should be a hash of key value pairs'
+ it_behaves_like 'returns errors', 'variables config should be a hash of key value pairs, value can be a hash'
end
context 'returns errors if variables is not a map of key-value strings' do
let(:config) { YAML.dump({ variables: { test: false }, rspec: { script: "test" } }) }
- it_behaves_like 'returns errors', 'variables config should be a hash of key value pairs'
+ it_behaves_like 'returns errors', 'variables config should be a hash of key value pairs, value can be a hash'
end
context 'returns errors if job when is not on_success, on_failure or always' do
diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
index efdfc0a980b..6b568320953 100644
--- a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
@@ -42,12 +42,24 @@ RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do
expect(null_logger).to receive(:info).with("Looking for orphan LFS files for project #{project.name_with_namespace}")
expect(null_logger).to receive(:info).with("Removed invalid references: 1")
expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:lfs_objects_size])
+ expect(service).to receive(:remove_orphan_references).and_call_original
expect { service.run! }.to change { project.lfs_objects.count }.from(2).to(1)
expect(LfsObjectsProject.exists?(invalid_reference.id)).to be_falsey
end
+ it 'does nothing if the project has no LFS objects' do
+ expect(null_logger).to receive(:info).with(/Looking for orphan LFS files/)
+ expect(null_logger).to receive(:info).with(/Nothing to do/)
+
+ project.lfs_objects_projects.delete_all
+
+ expect(service).not_to receive(:remove_orphan_references)
+
+ service.run!
+ end
+
context 'LFS object is in design repository' do
before do
expect(project.design_repository).to receive(:exists?).and_return(true)
diff --git a/spec/lib/gitlab/closing_issue_extractor_spec.rb b/spec/lib/gitlab/closing_issue_extractor_spec.rb
index f2bc6390032..37349c30224 100644
--- a/spec/lib/gitlab/closing_issue_extractor_spec.rb
+++ b/spec/lib/gitlab/closing_issue_extractor_spec.rb
@@ -3,18 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::ClosingIssueExtractor do
- let(:project) { create(:project) }
- let(:project2) { create(:project) }
- let(:forked_project) { Projects::ForkService.new(project, project2.creator).execute }
- let(:issue) { create(:issue, project: project) }
- let(:issue2) { create(:issue, project: project2) }
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project2) }
let(:reference) { issue.to_reference }
let(:cross_reference) { issue2.to_reference(project) }
- let(:fork_cross_reference) { issue.to_reference(forked_project) }
subject { described_class.new(project, project.creator) }
- before do
+ before_all do
project.add_developer(project.creator)
project.add_developer(project2.creator)
project2.add_maintainer(project.creator)
@@ -325,6 +323,9 @@ RSpec.describe Gitlab::ClosingIssueExtractor do
end
context "with a cross-project fork reference" do
+ let(:forked_project) { Projects::ForkService.new(project, project2.creator).execute }
+ let(:fork_cross_reference) { issue.to_reference(forked_project) }
+
subject { described_class.new(forked_project, forked_project.creator) }
it do
@@ -348,8 +349,8 @@ RSpec.describe Gitlab::ClosingIssueExtractor do
end
context 'with multiple references' do
- let(:other_issue) { create(:issue, project: project) }
- let(:third_issue) { create(:issue, project: project) }
+ let_it_be(:other_issue) { create(:issue, project: project) }
+ let_it_be(:third_issue) { create(:issue, project: project) }
let(:reference2) { other_issue.to_reference }
let(:reference3) { third_issue.to_reference }
diff --git a/spec/lib/gitlab/code_navigation_path_spec.rb b/spec/lib/gitlab/code_navigation_path_spec.rb
index 4dc864b158d..206541f7c0d 100644
--- a/spec/lib/gitlab/code_navigation_path_spec.rb
+++ b/spec/lib/gitlab/code_navigation_path_spec.rb
@@ -16,10 +16,6 @@ RSpec.describe Gitlab::CodeNavigationPath do
subject { described_class.new(project, commit_sha).full_json_path_for(path) }
- before do
- stub_feature_flags(code_navigation: project)
- end
-
context 'when a pipeline exist for a sha' do
it 'returns path to a file in the artifact' do
expect(subject).to eq(lsif_path)
@@ -41,15 +37,5 @@ RSpec.describe Gitlab::CodeNavigationPath do
expect(subject).to eq(lsif_path)
end
end
-
- context 'when code_navigation feature is disabled' do
- before do
- stub_feature_flags(code_navigation: false)
- end
-
- it 'returns nil' do
- expect(subject).to be_nil
- end
- end
end
end
diff --git a/spec/lib/gitlab/config/entry/composable_array_spec.rb b/spec/lib/gitlab/config/entry/composable_array_spec.rb
new file mode 100644
index 00000000000..77766cb3b0a
--- /dev/null
+++ b/spec/lib/gitlab/config/entry/composable_array_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Config::Entry::ComposableArray, :aggregate_failures do
+ let(:valid_config) do
+ [
+ {
+ DATABASE_SECRET: 'passw0rd'
+ },
+ {
+ API_TOKEN: 'passw0rd2'
+ }
+ ]
+ end
+
+ let(:config) { valid_config }
+ let(:entry) { described_class.new(config) }
+
+ before do
+ allow(entry).to receive(:composable_class).and_return(Gitlab::Config::Entry::Node)
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ context 'is invalid' do
+ let(:config) { { hello: :world } }
+
+ it { expect(entry).not_to be_valid }
+ end
+ end
+
+ describe '#compose!' do
+ before do
+ entry.compose!
+ end
+
+ it 'composes child entry with configured value' do
+ expect(entry.value).to eq(config)
+ end
+
+ it 'composes child entries with configured values' do
+ expect(entry[0]).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry[0].description).to eq('node definition')
+ expect(entry[0].key).to eq('node')
+ expect(entry[0].metadata).to eq({})
+ expect(entry[0].parent.class).to eq(Gitlab::Config::Entry::ComposableArray)
+ expect(entry[0].value).to eq(DATABASE_SECRET: 'passw0rd')
+ expect(entry[1]).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry[1].description).to eq('node definition')
+ expect(entry[1].key).to eq('node')
+ expect(entry[1].metadata).to eq({})
+ expect(entry[1].parent.class).to eq(Gitlab::Config::Entry::ComposableArray)
+ expect(entry[1].value).to eq(API_TOKEN: 'passw0rd2')
+ end
+
+ describe '#descendants' do
+ it 'creates descendant nodes' do
+ expect(entry.descendants.first).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry.descendants.first.value).to eq(DATABASE_SECRET: 'passw0rd')
+ expect(entry.descendants.second).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry.descendants.second.value).to eq(API_TOKEN: 'passw0rd2')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/config/entry/composable_hash_spec.rb b/spec/lib/gitlab/config/entry/composable_hash_spec.rb
new file mode 100644
index 00000000000..15bbf2047c5
--- /dev/null
+++ b/spec/lib/gitlab/config/entry/composable_hash_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Config::Entry::ComposableHash, :aggregate_failures do
+ let(:valid_config) do
+ {
+ DATABASE_SECRET: 'passw0rd',
+ API_TOKEN: 'passw0rd2'
+ }
+ end
+
+ let(:config) { valid_config }
+
+ shared_examples 'composes a hash' do
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ context 'is invalid' do
+ let(:config) { %w[one two] }
+
+ it { expect(entry).not_to be_valid }
+ end
+ end
+
+ describe '#value' do
+ context 'when config is a hash' do
+ it 'returns key value' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+
+ describe '#compose!' do
+ before do
+ entry.compose!
+ end
+
+ it 'composes child entry with configured value' do
+ expect(entry.value).to eq(config)
+ end
+
+ it 'composes child entries with configured values' do
+ expect(entry[:DATABASE_SECRET]).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry[:DATABASE_SECRET].description).to eq('DATABASE_SECRET node definition')
+ expect(entry[:DATABASE_SECRET].key).to eq(:DATABASE_SECRET)
+ expect(entry[:DATABASE_SECRET].metadata).to eq(name: :DATABASE_SECRET)
+ expect(entry[:DATABASE_SECRET].parent.class).to eq(Gitlab::Config::Entry::ComposableHash)
+ expect(entry[:DATABASE_SECRET].value).to eq('passw0rd')
+ expect(entry[:API_TOKEN]).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry[:API_TOKEN].description).to eq('API_TOKEN node definition')
+ expect(entry[:API_TOKEN].key).to eq(:API_TOKEN)
+ expect(entry[:API_TOKEN].metadata).to eq(name: :API_TOKEN)
+ expect(entry[:API_TOKEN].parent.class).to eq(Gitlab::Config::Entry::ComposableHash)
+ expect(entry[:API_TOKEN].value).to eq('passw0rd2')
+ end
+
+ describe '#descendants' do
+ it 'creates descendant nodes' do
+ expect(entry.descendants.first).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry.descendants.first.value).to eq('passw0rd')
+ expect(entry.descendants.second).to be_a(Gitlab::Config::Entry::Node)
+ expect(entry.descendants.second.value).to eq('passw0rd2')
+ end
+ end
+ end
+ end
+
+ context 'when ComposableHash is instantiated' do
+ let(:entry) { described_class.new(config) }
+
+ before do
+ allow(entry).to receive(:composable_class).and_return(Gitlab::Config::Entry::Node)
+ end
+
+ it_behaves_like 'composes a hash'
+ end
+
+ context 'when ComposableHash entry is configured in the parent class' do
+ let(:composable_hash_parent_class) do
+ Class.new(Gitlab::Config::Entry::Node) do
+ include ::Gitlab::Config::Entry::Configurable
+
+ entry :secrets, ::Gitlab::Config::Entry::ComposableHash,
+ description: 'Configured secrets for this job',
+ inherit: false,
+ default: { hello: :world },
+ metadata: { composable_class: Gitlab::Config::Entry::Node }
+ end
+ end
+
+ let(:entry) do
+ parent_entry = composable_hash_parent_class.new(secrets: config)
+ parent_entry.compose!
+
+ parent_entry[:secrets]
+ end
+
+ it_behaves_like 'composes a hash'
+
+ it 'creates entry with configuration from parent class' do
+ expect(entry.default).to eq({ hello: :world })
+ expect(entry.metadata).to eq(composable_class: Gitlab::Config::Entry::Node)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index b54fe40bb5f..80bd517ec92 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -262,7 +262,7 @@ RSpec.describe Gitlab::Conflict::File do
end
it 'includes the blob icon for the file' do
- expect(conflict_file.as_json[:blob_icon]).to eq('file-text-o')
+ expect(conflict_file.as_json[:blob_icon]).to eq('doc-text')
end
context 'with the full_content option passed' do
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index e0a8e2c17a3..a31f34d82d7 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -2,16 +2,20 @@
require 'spec_helper'
-RSpec.describe 'cycle analytics events' do
- let(:project) { create(:project, :repository) }
+RSpec.describe 'cycle analytics events', :aggregate_failures do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, :admin) }
let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let(:events) do
- CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user })[stage].events
+ CycleAnalytics::ProjectLevel
+ .new(project, options: { from: from_date, current_user: user })[stage]
+ .events
end
+ let(:event) { events.first }
+
before do
setup(context)
end
@@ -19,36 +23,15 @@ RSpec.describe 'cycle analytics events' do
describe '#issue_events' do
let(:stage) { :issue }
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq(context.title)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(context.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq(context.title)
+ expect(event[:url]).not_to be_nil
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:created_at]).to end_with('ago')
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(context.author.name)
end
end
@@ -59,36 +42,15 @@ RSpec.describe 'cycle analytics events' do
create_commit_referencing_issue(context)
end
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq(context.title)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(context.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq(context.title)
+ expect(event[:url]).not_to be_nil
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:created_at]).to end_with('ago')
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(context.author.name)
end
end
@@ -100,32 +62,14 @@ RSpec.describe 'cycle analytics events' do
create_commit_referencing_issue(context)
end
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq('Awesome merge_request')
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq('Awesome merge_request')
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:created_at]).to end_with('ago')
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
@@ -152,40 +96,16 @@ RSpec.describe 'cycle analytics events' do
merge_merge_requests_closing_issue(user, project, context)
end
- it 'has the name' do
- expect(events.first[:name]).not_to be_nil
- end
-
- it 'has the ID' do
- expect(events.first[:id]).not_to be_nil
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has the branch name' do
- expect(events.first[:branch]).not_to be_nil
- end
-
- it 'has the branch URL' do
- expect(events.first[:branch][:url]).not_to be_nil
- end
-
- it 'has the short SHA' do
- expect(events.first[:short_sha]).not_to be_nil
- end
-
- it 'has the commit URL' do
- expect(events.first[:commit_url]).not_to be_nil
- end
-
- it 'has the date' do
- expect(events.first[:date]).not_to be_nil
- end
-
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
+ it 'has correct attributes' do
+ expect(event[:name]).not_to be_nil
+ expect(event[:id]).not_to be_nil
+ expect(event[:url]).not_to be_nil
+ expect(event[:branch]).not_to be_nil
+ expect(event[:branch][:url]).not_to be_nil
+ expect(event[:short_sha]).not_to be_nil
+ expect(event[:commit_url]).not_to be_nil
+ expect(event[:date]).not_to be_nil
+ expect(event[:total_time]).not_to be_empty
end
end
@@ -197,40 +117,16 @@ RSpec.describe 'cycle analytics events' do
merge_merge_requests_closing_issue(user, project, context)
end
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq('Awesome merge_request')
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has a state' do
- expect(events.first[:state]).not_to be_nil
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).not_to be_nil
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq('Awesome merge_request')
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:url]).not_to be_nil
+ expect(event[:state]).not_to be_nil
+ expect(event[:created_at]).not_to be_nil
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
@@ -257,58 +153,25 @@ RSpec.describe 'cycle analytics events' do
deploy_master(user, project)
end
- it 'has the name' do
- expect(events.first[:name]).not_to be_nil
- end
-
- it 'has the ID' do
- expect(events.first[:id]).not_to be_nil
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has the branch name' do
- expect(events.first[:branch]).not_to be_nil
- end
-
- it 'has the branch URL' do
- expect(events.first[:branch][:url]).not_to be_nil
- end
-
- it 'has the short SHA' do
- expect(events.first[:short_sha]).not_to be_nil
- end
-
- it 'has the commit URL' do
- expect(events.first[:commit_url]).not_to be_nil
- end
-
- it 'has the date' do
- expect(events.first[:date]).not_to be_nil
- end
-
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
+ it 'has correct attributes' do
+ expect(event[:name]).not_to be_nil
+ expect(event[:id]).not_to be_nil
+ expect(event[:url]).not_to be_nil
+ expect(event[:branch]).not_to be_nil
+ expect(event[:branch][:url]).not_to be_nil
+ expect(event[:short_sha]).not_to be_nil
+ expect(event[:commit_url]).not_to be_nil
+ expect(event[:date]).not_to be_nil
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
def setup(context)
milestone = create(:milestone, project: project)
- context.update(milestone: milestone)
+ context.update!(milestone: milestone)
mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}")
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/lib/gitlab/danger/commit_linter_spec.rb
index c31522c538d..882cede759b 100644
--- a/spec/lib/gitlab/danger/commit_linter_spec.rb
+++ b/spec/lib/gitlab/danger/commit_linter_spec.rb
@@ -323,6 +323,16 @@ RSpec.describe Gitlab::Danger::CommitLinter do
end
end
+ context 'when message includes a value that is surrounded by backticks' do
+ let(:commit_message) { "A commit message `%20`" }
+
+ it 'does not add a problem' do
+ expect(commit_linter).not_to receive(:add_problem)
+
+ commit_linter.lint
+ end
+ end
+
context 'when message includes a short reference' do
[
'A commit message to fix #1234',
@@ -336,7 +346,9 @@ RSpec.describe Gitlab::Danger::CommitLinter do
'A commit message to fix gitlab-org/gitlab#1234',
'A commit message to fix gitlab-org/gitlab!1234',
'A commit message to fix gitlab-org/gitlab&1234',
- 'A commit message to fix gitlab-org/gitlab%1234'
+ 'A commit message to fix gitlab-org/gitlab%1234',
+ 'A commit message to fix "gitlab-org/gitlab%1234"',
+ 'A commit message to fix `gitlab-org/gitlab%1234'
].each do |message|
let(:commit_message) { message }
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index c7d55c396ef..509649f08c6 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -284,7 +284,8 @@ RSpec.describe Gitlab::Danger::Helper do
'.codeclimate.yml' | [:engineering_productivity]
'.gitlab/CODEOWNERS' | [:engineering_productivity]
- 'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | [:backend]
+ 'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | [:ci_template]
+ 'lib/gitlab/ci/templates/dotNET-Core.yml' | [:ci_template]
'ee/FOO_VERSION' | [:unknown]
@@ -376,6 +377,7 @@ RSpec.describe Gitlab::Danger::Helper do
:none | ''
:qa | '~QA'
:engineering_productivity | '~"Engineering Productivity" for CI, Danger'
+ :ci_template | '~"ci::templates"'
end
with_them do
@@ -435,6 +437,28 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
+ describe '#draft_mr?' do
+ it 'returns false when `gitlab_helper` is unavailable' do
+ expect(helper).to receive(:gitlab_helper).and_return(nil)
+
+ expect(helper).not_to be_draft_mr
+ end
+
+ it 'returns true for a draft MR' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'Draft: My MR title')
+
+ expect(helper).to be_draft_mr
+ end
+
+ it 'returns false for non draft MR' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'My MR title')
+
+ expect(helper).not_to be_draft_mr
+ end
+ end
+
describe '#cherry_pick_mr?' do
it 'returns false when `gitlab_helper` is unavailable' do
expect(helper).to receive(:gitlab_helper).and_return(nil)
diff --git a/spec/lib/gitlab/danger/roulette_spec.rb b/spec/lib/gitlab/danger/roulette_spec.rb
index b471e17e2e7..1a900dfba22 100644
--- a/spec/lib/gitlab/danger/roulette_spec.rb
+++ b/spec/lib/gitlab/danger/roulette_spec.rb
@@ -4,10 +4,13 @@ require 'webmock/rspec'
require 'timecop'
require 'gitlab/danger/roulette'
+require 'active_support/testing/time_helpers'
RSpec.describe Gitlab::Danger::Roulette do
+ include ActiveSupport::Testing::TimeHelpers
+
around do |example|
- Timecop.freeze(Time.utc(2020, 06, 22, 10)) { example.run }
+ travel_to(Time.utc(2020, 06, 22, 10)) { example.run }
end
let(:backend_available) { true }
@@ -67,14 +70,30 @@ RSpec.describe Gitlab::Danger::Roulette do
)
end
- let(:teammate_json) do
+ let(:ci_template_reviewer) do
+ Gitlab::Danger::Teammate.new(
+ 'username' => 'ci-template-maintainer',
+ 'name' => 'CI Template engineer',
+ 'role' => '~"ci::templates"',
+ 'projects' => { 'gitlab' => 'reviewer ci_template' },
+ 'available' => true,
+ 'tz_offset_hours' => 2.0
+ )
+ end
+
+ let(:teammates) do
[
backend_maintainer.to_h,
frontend_maintainer.to_h,
frontend_reviewer.to_h,
software_engineer_in_test.to_h,
- engineering_productivity_reviewer.to_h
- ].to_json
+ engineering_productivity_reviewer.to_h,
+ ci_template_reviewer.to_h
+ ]
+ end
+
+ let(:teammate_json) do
+ teammates.to_json
end
subject(:roulette) { Object.new.extend(described_class) }
@@ -162,6 +181,14 @@ RSpec.describe Gitlab::Danger::Roulette do
end
end
+ context 'when change contains CI/CD Template category' do
+ let(:categories) { [:ci_template] }
+
+ it 'assigns CI/CD Template reviewer and fallback to backend maintainer' do
+ expect(spins).to eq([described_class::Spin.new(:ci_template, ci_template_reviewer, backend_maintainer, false, false)])
+ end
+ end
+
context 'when change contains test category' do
let(:categories) { [:test] }
@@ -210,6 +237,69 @@ RSpec.describe Gitlab::Danger::Roulette do
end
end
end
+
+ describe 'reviewer suggestion probability' do
+ let(:reviewer) { teammate_with_capability('reviewer', 'reviewer backend') }
+ let(:hungry_reviewer) { teammate_with_capability('hungry_reviewer', 'reviewer backend', hungry: true) }
+ let(:traintainer) { teammate_with_capability('traintainer', 'trainee_maintainer backend') }
+ let(:hungry_traintainer) { teammate_with_capability('hungry_traintainer', 'trainee_maintainer backend', hungry: true) }
+ let(:teammates) do
+ [
+ reviewer.to_h,
+ hungry_reviewer.to_h,
+ traintainer.to_h,
+ hungry_traintainer.to_h
+ ]
+ end
+
+ let(:categories) { [:backend] }
+
+ # This test is testing probability with inherent randomness.
+ # The variance is inversely related to sample size
+ # Given large enough sample size, the variance would be smaller,
+ # but the test would take longer.
+ # Given smaller sample size, the variance would be larger,
+ # but the test would take less time.
+ let!(:sample_size) { 500 }
+ let!(:variance) { 0.1 }
+
+ before do
+ # This test needs actual randomness to simulate probabilities
+ allow(subject).to receive(:new_random).and_return(Random.new)
+ WebMock
+ .stub_request(:get, described_class::ROULETTE_DATA_URL)
+ .to_return(body: teammate_json)
+ end
+
+ it 'has 1:2:3:4 probability of picking reviewer, hungry_reviewer, traintainer, hungry_traintainer' do
+ picks = Array.new(sample_size).map do
+ spins = subject.spin(project, categories, timezone_experiment: timezone_experiment)
+ spins.first.reviewer.name
+ end
+
+ expect(probability(picks, 'reviewer')).to be_within(variance).of(0.1)
+ expect(probability(picks, 'hungry_reviewer')).to be_within(variance).of(0.2)
+ expect(probability(picks, 'traintainer')).to be_within(variance).of(0.3)
+ expect(probability(picks, 'hungry_traintainer')).to be_within(variance).of(0.4)
+ end
+
+ def probability(picks, role)
+ picks.count(role).to_f / picks.length
+ end
+
+ def teammate_with_capability(name, capability, hungry: false)
+ Gitlab::Danger::Teammate.new(
+ {
+ 'name' => name,
+ 'projects' => {
+ 'gitlab' => capability
+ },
+ 'available' => true,
+ 'hungry' => hungry
+ }
+ )
+ end
+ end
end
RSpec::Matchers.define :match_teammates do |expected|
@@ -265,7 +355,8 @@ RSpec.describe Gitlab::Danger::Roulette do
frontend_reviewer,
frontend_maintainer,
software_engineer_in_test,
- engineering_productivity_reviewer
+ engineering_productivity_reviewer,
+ ci_template_reviewer
])
end
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index 6fd32493d6b..eebe14ed5e1 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -4,6 +4,7 @@ require 'timecop'
require 'rspec-parameterized'
require 'gitlab/danger/teammate'
+require 'active_support/testing/time_helpers'
RSpec.describe Gitlab::Danger::Teammate do
using RSpec::Parameterized::TableSyntax
@@ -148,8 +149,10 @@ RSpec.describe Gitlab::Danger::Teammate do
end
describe '#local_hour' do
+ include ActiveSupport::Testing::TimeHelpers
+
around do |example|
- Timecop.freeze(Time.utc(2020, 6, 23, 10)) { example.run }
+ travel_to(Time.utc(2020, 6, 23, 10)) { example.run }
end
context 'when author is given' do
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 155e66e2fcd..8fb7ab25b17 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
deployment = create(:deployment, status: :failed, environment: environment, sha: commit.sha, project: project)
deployable = deployment.deployable
expected_deployable_url = Gitlab::Routing.url_helpers.project_job_url(deployable.project, deployable)
- expected_user_url = Gitlab::Routing.url_helpers.user_url(deployment.user)
+ expected_user_url = Gitlab::Routing.url_helpers.user_url(deployment.deployed_by)
expected_commit_url = Gitlab::UrlBuilder.build(commit)
data = described_class.build(deployment)
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
expect(data[:environment]).to eq("somewhere")
expect(data[:project]).to eq(project.hook_attrs)
expect(data[:short_sha]).to eq(deployment.short_sha)
- expect(data[:user]).to eq(deployment.user.hook_attrs)
+ expect(data[:user]).to eq(deployment.deployed_by.hook_attrs)
expect(data[:user_url]).to eq(expected_user_url)
expect(data[:commit_url]).to eq(expected_commit_url)
expect(data[:commit_title]).to eq(commit.title)
diff --git a/spec/lib/gitlab/database/background_migration_job_spec.rb b/spec/lib/gitlab/database/background_migration_job_spec.rb
index dd5bf8b512f..42695925a1c 100644
--- a/spec/lib/gitlab/database/background_migration_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration_job_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::Database::BackgroundMigrationJob do
let!(:job1) { create(:background_migration_job, :succeeded, created_at: initial_time, updated_at: initial_time) }
it 'does not update non-pending jobs' do
- Timecop.freeze(initial_time + 1.day) do
+ travel_to(initial_time + 1.day) do
expect { described_class.mark_all_as_succeeded('TestJob', [1, 100]) }
.to change { described_class.succeeded.count }.from(1).to(2)
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 71d3666602f..31a8b4afa03 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::BatchCount do
let_it_be(:fallback) { ::Gitlab::Database::BatchCounter::FALLBACK }
- let_it_be(:small_batch_size) { ::Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE - 1 }
+ let_it_be(:small_batch_size) { calculate_batch_size(::Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE) }
let(:model) { Issue }
let(:column) { :author_id }
@@ -22,6 +22,12 @@ RSpec.describe Gitlab::Database::BatchCount do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
+ def calculate_batch_size(batch_size)
+ zero_offset_modifier = -1
+
+ batch_size + zero_offset_modifier
+ end
+
shared_examples 'disallowed configurations' do |method|
it 'returns fallback if start is bigger than finish' do
expect(described_class.public_send(method, *args, start: 1, finish: 0)).to eq(fallback)
@@ -45,6 +51,46 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
+ shared_examples 'when batch fetch query is canceled' do
+ let(:batch_size) { 22_000 }
+ let(:relation) { instance_double(ActiveRecord::Relation) }
+
+ it 'reduces batch size by half and retry fetch' do
+ too_big_batch_relation_mock = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ allow(relation).to receive(:where).with("id" => 0..calculate_batch_size(batch_size)).and_return(too_big_batch_relation_mock)
+ allow(too_big_batch_relation_mock).to receive(:send).and_raise(ActiveRecord::QueryCanceled)
+
+ expect(relation).to receive(:where).with("id" => 0..calculate_batch_size(batch_size / 2)).and_return(double(send: 1))
+
+ subject.call(model, column, batch_size: batch_size, start: 0)
+ end
+
+ context 'when all retries fail' do
+ let(:batch_count_query) { 'SELECT COUNT(id) FROM relation WHERE id BETWEEN 0 and 1' }
+
+ before do
+ allow(model).to receive_message_chain(:select, :public_send, where: relation)
+ allow(relation).to receive(:send).and_raise(ActiveRecord::QueryCanceled.new('query timed out'))
+ allow(relation).to receive(:to_sql).and_return(batch_count_query)
+ end
+
+ it 'logs failing query' do
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ event: 'batch_count',
+ relation: model.table_name,
+ operation: operation,
+ operation_args: operation_args,
+ start: 0,
+ mode: mode,
+ query: batch_count_query,
+ message: 'Query has been canceled with message: query timed out'
+ )
+ expect(subject.call(model, column, batch_size: batch_size, start: 0)).to eq(-1)
+ end
+ end
+ end
+
describe '#batch_count' do
it 'counts table' do
expect(described_class.batch_count(model)).to eq(5)
@@ -86,10 +132,11 @@ RSpec.describe Gitlab::Database::BatchCount do
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE}" do
min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE)
- expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
- expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + min_id, :itself).once.and_call_original
- end
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
described_class.batch_count(model)
end
@@ -98,6 +145,15 @@ RSpec.describe Gitlab::Database::BatchCount do
subject { described_class.batch_count(model) }
end
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :itself }
+ let(:operation) { :count }
+ let(:operation_args) { nil }
+ let(:column) { nil }
+
+ subject { described_class.method(:batch_count) }
+ end
+
context 'disallowed_configurations' do
include_examples 'disallowed configurations', :batch_count do
let(:args) { [Issue] }
@@ -108,6 +164,24 @@ RSpec.describe Gitlab::Database::BatchCount do
expect { described_class.batch_count(model.distinct(column)) }.to raise_error 'Use distinct count for optimized distinct counting'
end
end
+
+ context 'when a relation is grouped' do
+ let!(:one_more_issue) { create(:issue, author: user, project: model.first.project) }
+
+ before do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 1)
+ end
+
+ context 'count by default column' do
+ let(:count) do
+ described_class.batch_count(model.group(column), batch_size: 2)
+ end
+
+ it 'counts grouped records' do
+ expect(count).to eq({ user.id => 4, another_user.id => 2 })
+ end
+ end
+ end
end
describe '#batch_distinct_count' do
@@ -151,10 +225,11 @@ RSpec.describe Gitlab::Database::BatchCount do
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE)
- expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
- expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + min_id, :distinct).once.and_call_original
- end
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
described_class.batch_distinct_count(model)
end
@@ -175,6 +250,33 @@ RSpec.describe Gitlab::Database::BatchCount do
end.to raise_error 'Use distinct count only with non id fields'
end
end
+
+ context 'when a relation is grouped' do
+ let!(:one_more_issue) { create(:issue, author: user, project: model.first.project) }
+
+ before do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 1)
+ end
+
+ context 'distinct count by non-unique column' do
+ let(:count) do
+ described_class.batch_distinct_count(model.group(column), :project_id, batch_size: 2)
+ end
+
+ it 'counts grouped records' do
+ expect(count).to eq({ user.id => 3, another_user.id => 2 })
+ end
+ end
+ end
+
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :distinct }
+ let(:operation) { :count }
+ let(:operation_args) { nil }
+ let(:column) { nil }
+
+ subject { described_class.method(:batch_distinct_count) }
+ end
end
describe '#batch_sum' do
@@ -209,10 +311,11 @@ RSpec.describe Gitlab::Database::BatchCount do
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE}" do
min_id = model.minimum(:id)
+ relation = instance_double(ActiveRecord::Relation)
+ allow(model).to receive_message_chain(:select, public_send: relation)
+ batch_end_id = min_id + calculate_batch_size(Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE)
- expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
- expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE + min_id, :itself).once.and_call_original
- end
+ expect(relation).to receive(:where).with("id" => min_id..batch_end_id).and_return(double(send: 1))
described_class.batch_sum(model, column)
end
@@ -226,5 +329,13 @@ RSpec.describe Gitlab::Database::BatchCount do
let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE }
let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE - 1 }
end
+
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :itself }
+ let(:operation) { :sum }
+ let(:operation_args) { [column] }
+
+ subject { described_class.method(:batch_sum) }
+ end
end
end
diff --git a/spec/lib/gitlab/database/bulk_update_spec.rb b/spec/lib/gitlab/database/bulk_update_spec.rb
new file mode 100644
index 00000000000..f2a7d6e69d8
--- /dev/null
+++ b/spec/lib/gitlab/database/bulk_update_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BulkUpdate do
+ describe 'error states' do
+ let(:columns) { %i[title] }
+
+ let_it_be(:mapping) do
+ create_default(:user)
+ create_default(:project)
+
+ i_a, i_b = create_list(:issue, 2)
+
+ {
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
+ end
+
+ it 'does not raise errors on valid inputs' do
+ expect { described_class.execute(columns, mapping) }.not_to raise_error
+ end
+
+ it 'expects a non-empty list of column names' do
+ expect { described_class.execute([], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects all columns to be symbols' do
+ expect { described_class.execute([1], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects all columns to be valid columns on the tables' do
+ expect { described_class.execute([:foo], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'refuses to set ID' do
+ expect { described_class.execute([:id], mapping) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects a non-empty mapping' do
+ expect { described_class.execute(columns, []) }.to raise_error(ArgumentError)
+ end
+
+ it 'expects all map values to be Hash instances' do
+ bad_map = mapping.merge(build(:issue) => 2)
+
+ expect { described_class.execute(columns, bad_map) }.to raise_error(ArgumentError)
+ end
+ end
+
+ it 'is possible to update all objects in a single query' do
+ users = create_list(:user, 3)
+ mapping = users.zip(%w(foo bar baz)).to_h do |u, name|
+ [u, { username: name, admin: true }]
+ end
+
+ expect do
+ described_class.execute(%i[username admin], mapping)
+ end.not_to exceed_query_limit(1)
+
+ # We have optimistically updated the values
+ expect(users).to all(be_admin)
+ expect(users.map(&:username)).to eq(%w(foo bar baz))
+
+ users.each(&:reset)
+
+ # The values are correct on reset
+ expect(users).to all(be_admin)
+ expect(users.map(&:username)).to eq(%w(foo bar baz))
+ end
+
+ it 'is possible to update heterogeneous sets' do
+ create_default(:user)
+ create_default(:project)
+
+ mr_a = create(:merge_request)
+ i_a, i_b = create_list(:issue, 2)
+
+ mapping = {
+ mr_a => { title: 'MR a' },
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
+
+ expect do
+ described_class.execute(%i[title], mapping)
+ end.not_to exceed_query_limit(2)
+
+ expect([mr_a, i_a, i_b].map { |x| x.reset.title })
+ .to eq(['MR a', 'Issue a', 'Issue b'])
+ end
+
+ shared_examples 'basic functionality' do
+ it 'sets multiple values' do
+ create_default(:user)
+ create_default(:project)
+
+ i_a, i_b = create_list(:issue, 2)
+
+ mapping = {
+ i_a => { title: 'Issue a' },
+ i_b => { title: 'Issue b' }
+ }
+
+ described_class.execute(%i[title], mapping)
+
+ expect([i_a, i_b].map { |x| x.reset.title })
+ .to eq(['Issue a', 'Issue b'])
+ end
+ end
+
+ include_examples 'basic functionality'
+
+ context 'when prepared statements are configured differently to the normal test environment' do
+ # rubocop: disable RSpec/LeakyConstantDeclaration
+ # This cop is disabled because you cannot call establish_connection on
+ # an anonymous class.
+ class ActiveRecordBasePreparedStatementsInverted < ActiveRecord::Base
+ def self.abstract_class?
+ true # So it gets its own connection
+ end
+ end
+ # rubocop: enable RSpec/LeakyConstantDeclaration
+
+ before_all do
+ c = ActiveRecord::Base.connection.instance_variable_get(:@config)
+ inverted = c.merge(prepared_statements: !ActiveRecord::Base.connection.prepared_statements)
+ ActiveRecordBasePreparedStatementsInverted.establish_connection(inverted)
+ end
+
+ before do
+ allow(ActiveRecord::Base).to receive(:connection_specification_name)
+ .and_return(ActiveRecordBasePreparedStatementsInverted.connection_specification_name)
+ end
+
+ include_examples 'basic functionality'
+ end
+end
diff --git a/spec/lib/gitlab/database/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/concurrent_reindex_spec.rb
deleted file mode 100644
index 4e2c3f547d4..00000000000
--- a/spec/lib/gitlab/database/concurrent_reindex_spec.rb
+++ /dev/null
@@ -1,207 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::ConcurrentReindex, '#execute' do
- subject { described_class.new(index_name, logger: logger) }
-
- let(:table_name) { '_test_reindex_table' }
- let(:column_name) { '_test_column' }
- let(:index_name) { '_test_reindex_index' }
- let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
- let(:connection) { ActiveRecord::Base.connection }
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
- id serial NOT NULL PRIMARY KEY,
- #{column_name} integer NOT NULL);
-
- CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
- SQL
- end
-
- context 'when the index does not exist' do
- before do
- connection.execute(<<~SQL)
- DROP INDEX #{index_name}
- SQL
- end
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ReindexError, /does not exist/)
- end
- end
-
- context 'when the index is unique' do
- before do
- connection.execute(<<~SQL)
- DROP INDEX #{index_name};
- CREATE UNIQUE INDEX #{index_name} ON #{table_name} (#{column_name})
- SQL
- end
-
- it 'raises an error' do
- expect do
- subject.execute
- end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
- end
- end
-
- context 'replacing the original index with a rebuilt copy' do
- let(:replacement_name) { 'tmp_reindex__test_reindex_index' }
- let(:replaced_name) { 'old_reindex__test_reindex_index' }
-
- let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
- let(:drop_index) { "DROP INDEX CONCURRENTLY IF EXISTS #{replacement_name}" }
-
- let!(:original_index) { find_index_create_statement }
-
- before do
- allow(subject).to receive(:connection).and_return(connection)
- allow(subject).to receive(:disable_statement_timeout).and_yield
- end
-
- it 'replaces the existing index with an identical index' do
- expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
-
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
- expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.execute
-
- check_index_exists
- end
-
- context 'when a dangling index is left from a previous run' do
- before do
- connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
- end
-
- it 'replaces the existing index with an identical index' do
- expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
-
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
- expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.execute
-
- check_index_exists
- end
- end
-
- context 'when it fails to create the replacement index' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect(connection).to receive(:execute).with(create_index).ordered
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when the replacement index is not valid' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect(subject).to receive(:replacement_index_valid?).and_return(false)
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
-
- check_index_exists
- end
- end
-
- context 'when a database error occurs while swapping the indexes' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect(connection).to receive(:execute).ordered
- .with("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when with_lock_retries fails to acquire the lock' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true)
- .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /exhausted/)
-
- check_index_exists
- end
- end
- end
-
- def expect_to_execute_concurrently_in_order(sql)
- # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
- # verify the original call but pass through the non-concurrent form.
- expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
- end
-
- def expect_to_execute_in_order(sql)
- expect(connection).to receive(:execute).with(sql).ordered.and_call_original
- end
-
- def find_index_create_statement
- ActiveRecord::Base.connection.select_value(<<~SQL)
- SELECT indexdef
- FROM pg_indexes
- WHERE schemaname = 'public'
- AND indexname = #{ActiveRecord::Base.connection.quote(index_name)}
- SQL
- end
-
- def check_index_exists
- expect(find_index_create_statement).to eq(original_index)
- end
-end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 0bdcca630aa..a8edcc5f7e5 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -699,6 +699,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:copy_indexes).with(:users, :old, :new)
expect(model).to receive(:copy_foreign_keys).with(:users, :old, :new)
+ expect(model).to receive(:copy_check_constraints).with(:users, :old, :new)
model.rename_column_concurrently(:users, :old, :new)
end
@@ -761,6 +762,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:change_column_default)
.with(:users, :new, old_column.default)
+ expect(model).to receive(:copy_check_constraints)
+ .with(:users, :old, :new)
+
model.rename_column_concurrently(:users, :old, :new)
end
end
@@ -856,6 +860,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:copy_indexes).with(:users, :new, :old)
expect(model).to receive(:copy_foreign_keys).with(:users, :new, :old)
+ expect(model).to receive(:copy_check_constraints).with(:users, :new, :old)
model.undo_cleanup_concurrent_column_rename(:users, :old, :new)
end
@@ -894,6 +899,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:change_column_default)
.with(:users, :old, new_column.default)
+ expect(model).to receive(:copy_check_constraints)
+ .with(:users, :new, :old)
+
model.undo_cleanup_concurrent_column_rename(:users, :old, :new)
end
end
@@ -925,6 +933,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#undo_change_column_type_concurrently' do
+ it 'reverses the operations of change_column_type_concurrently' do
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
+
+ expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ .with(:users, /trigger_.{12}/)
+
+ expect(model).to receive(:remove_column).with(:users, "old_for_type_change")
+
+ model.undo_change_column_type_concurrently(:users, :old)
+ end
+ end
+
describe '#cleanup_concurrent_column_type_change' do
it 'cleans up the type changing procedure' do
expect(model).to receive(:cleanup_concurrent_column_rename)
@@ -937,6 +958,94 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#undo_cleanup_concurrent_column_type_change' do
+ context 'in a transaction' do
+ it 'raises RuntimeError' do
+ allow(model).to receive(:transaction_open?).and_return(true)
+
+ expect { model.undo_cleanup_concurrent_column_type_change(:users, :old, :new) }
+ .to raise_error(RuntimeError)
+ end
+ end
+
+ context 'outside a transaction' do
+ let(:temp_column) { "old_for_type_change" }
+
+ let(:temp_undo_cleanup_column) do
+ identifier = "users_old_for_type_change"
+ hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
+ "tmp_undo_cleanup_column_#{hashed_identifier}"
+ end
+
+ let(:trigger_name) { model.rename_trigger_name(:users, :old, :old_for_type_change) }
+
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'reverses the operations of cleanup_concurrent_column_type_change' do
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
+
+ expect(model).to receive(:create_column_from).with(
+ :users,
+ :old,
+ temp_undo_cleanup_column,
+ type: :string,
+ batch_column_name: :id,
+ type_cast_function: nil
+ ).and_return(true)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, :old, temp_column)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, temp_undo_cleanup_column, :old)
+
+ expect(model).to receive(:install_rename_triggers_for_postgresql)
+ .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+
+ model.undo_cleanup_concurrent_column_type_change(:users, :old, :string)
+ end
+
+ it 'passes the type_cast_function and batch_column_name' do
+ expect(model).to receive(:column_exists?).with(:users, :other_batch_column).and_return(true)
+ expect(model).to receive(:check_trigger_permissions!).with(:users)
+
+ expect(model).to receive(:create_column_from).with(
+ :users,
+ :old,
+ temp_undo_cleanup_column,
+ type: :string,
+ batch_column_name: :other_batch_column,
+ type_cast_function: :custom_type_cast_function
+ ).and_return(true)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, :old, temp_column)
+
+ expect(model).to receive(:rename_column)
+ .with(:users, temp_undo_cleanup_column, :old)
+
+ expect(model).to receive(:install_rename_triggers_for_postgresql)
+ .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+
+ model.undo_cleanup_concurrent_column_type_change(
+ :users,
+ :old,
+ :string,
+ type_cast_function: :custom_type_cast_function,
+ batch_column_name: :other_batch_column
+ )
+ end
+
+ it 'raises an error with invalid batch_column_name' do
+ expect do
+ model.undo_cleanup_concurrent_column_type_change(:users, :old, :new, batch_column_name: :invalid)
+ end.to raise_error(RuntimeError, /Column invalid does not exist on users/)
+ end
+ end
+ end
+
describe '#install_rename_triggers_for_postgresql' do
it 'installs the triggers for PostgreSQL' do
expect(model).to receive(:execute)
@@ -1128,7 +1237,65 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
name: 'index_on_issues_gl_project_id',
length: [],
order: [],
- opclasses: { 'gl_project_id' => 'bar' })
+ opclass: { 'gl_project_id' => 'bar' })
+
+ model.copy_indexes(:issues, :project_id, :gl_project_id)
+ end
+ end
+
+ context 'using an index with multiple columns and custom operator classes' do
+ it 'copies the index' do
+ index = double(:index,
+ columns: %w(project_id foobar),
+ name: 'index_on_issues_project_id_foobar',
+ using: :gin,
+ where: nil,
+ opclasses: { 'project_id' => 'bar', 'foobar' => :gin_trgm_ops },
+ unique: false,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:issues, 'project_id')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:issues,
+ %w(gl_project_id foobar),
+ unique: false,
+ name: 'index_on_issues_gl_project_id_foobar',
+ length: [],
+ order: [],
+ opclass: { 'gl_project_id' => 'bar', 'foobar' => :gin_trgm_ops },
+ using: :gin)
+
+ model.copy_indexes(:issues, :project_id, :gl_project_id)
+ end
+ end
+
+ context 'using an index with multiple columns and a custom operator class on the non affected column' do
+ it 'copies the index' do
+ index = double(:index,
+ columns: %w(project_id foobar),
+ name: 'index_on_issues_project_id_foobar',
+ using: :gin,
+ where: nil,
+ opclasses: { 'foobar' => :gin_trgm_ops },
+ unique: false,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:issues, 'project_id')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:issues,
+ %w(gl_project_id foobar),
+ unique: false,
+ name: 'index_on_issues_gl_project_id_foobar',
+ length: [],
+ order: [],
+ opclass: { 'foobar' => :gin_trgm_ops },
+ using: :gin)
model.copy_indexes(:issues, :project_id, :gl_project_id)
end
@@ -1400,15 +1567,32 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
)
end
- after do
- 'DROP INDEX IF EXISTS test_index;'
- end
-
it 'returns true if an index exists' do
expect(model.index_exists_by_name?(:projects, 'test_index'))
.to be_truthy
end
end
+
+ context 'when an index exists for a table with the same name in another schema' do
+ before do
+ ActiveRecord::Base.connection.execute(
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE INDEX test_index_on_name ON new_test_schema.projects (LOWER(name));'
+ )
+ end
+
+ it 'returns false if the index does not exist in the current schema' do
+ expect(model.index_exists_by_name?(:projects, 'test_index_on_name'))
+ .to be_falsy
+ end
+ end
end
describe '#create_or_update_plan_limit' do
@@ -1863,11 +2047,17 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
ActiveRecord::Base.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
- end
- after do
ActiveRecord::Base.connection.execute(
- 'ALTER TABLE projects DROP CONSTRAINT IF EXISTS check_1'
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
@@ -1885,6 +2075,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model.check_constraint_exists?(:users, 'check_1'))
.to be_falsy
end
+
+ it 'returns false if a constraint with the same name exists for the same table in another schema' do
+ expect(model.check_constraint_exists?(:projects, 'check_2'))
+ .to be_falsy
+ end
end
describe '#add_check_constraint' do
@@ -2086,6 +2281,138 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#copy_check_constraints' do
+ context 'inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ allow(model).to receive(:column_exists?).and_return(true)
+ end
+
+ let(:old_column_constraints) do
+ [
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'check_d7d49d475d',
+ 'constraint_def' => 'CHECK ((old_column IS NOT NULL))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'check_48560e521e',
+ 'constraint_def' => 'CHECK ((char_length(old_column) <= 255))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'custom_check_constraint',
+ 'constraint_def' => 'CHECK (((old_column IS NOT NULL) AND (another_column IS NULL)))'
+ },
+ {
+ 'schema_name' => 'public',
+ 'table_name' => 'test_table',
+ 'column_name' => 'old_column',
+ 'constraint_name' => 'not_valid_check_constraint',
+ 'constraint_def' => 'CHECK ((old_column IS NOT NULL)) NOT VALID'
+ }
+ ]
+ end
+
+ it 'copies check constraints from one column to another' do
+ allow(model).to receive(:check_constraints_for)
+ .with(:test_table, :old_column, schema: nil)
+ .and_return(old_column_constraints)
+
+ allow(model).to receive(:not_null_constraint_name).with(:test_table, :new_column)
+ .and_return('check_1')
+
+ allow(model).to receive(:text_limit_name).with(:test_table, :new_column)
+ .and_return('check_2')
+
+ allow(model).to receive(:check_constraint_name)
+ .with(:test_table, :new_column, 'copy_check_constraint')
+ .and_return('check_3')
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(new_column IS NOT NULL)',
+ 'check_1',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(char_length(new_column) <= 255)',
+ 'check_2',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '((new_column IS NOT NULL) AND (another_column IS NULL))',
+ 'check_3',
+ validate: true
+ ).once
+
+ expect(model).to receive(:add_check_constraint)
+ .with(
+ :test_table,
+ '(new_column IS NOT NULL)',
+ 'check_1',
+ validate: false
+ ).once
+
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end
+
+ it 'does nothing if there are no constraints defined for the old column' do
+ allow(model).to receive(:check_constraints_for)
+ .with(:test_table, :old_column, schema: nil)
+ .and_return([])
+
+ expect(model).not_to receive(:add_check_constraint)
+
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end
+
+ it 'raises an error when the orginating column does not exist' do
+ allow(model).to receive(:column_exists?).with(:test_table, :old_column).and_return(false)
+
+ error_message = /Column old_column does not exist on test_table/
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError, error_message)
+ end
+
+ it 'raises an error when the target column does not exist' do
+ allow(model).to receive(:column_exists?).with(:test_table, :new_column).and_return(false)
+
+ error_message = /Column new_column does not exist on test_table/
+
+ expect do
+ model.copy_check_constraints(:test_table, :old_column, :new_column)
+ end.to raise_error(RuntimeError, error_message)
+ end
+ end
+ end
+
describe '#add_text_limit' do
context 'when it is called with the default options' do
it 'calls add_check_constraint with an infered constraint name and validate: true' do
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index 034bf966db7..8a35d8149ad 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do
describe '#execute' do
it 'returns a list of class names and columns pairs' do
- Timecop.freeze(REMOVE_DATE) do
+ travel_to(REMOVE_DATE) do
expect(subject.execute).to eq([
['Testing::A', {
'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 334cac653cf..885eef5723e 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
let(:partitioning_key) { :created_at }
around do |example|
- Timecop.freeze(Date.parse('2020-08-22')) { example.run }
+ travel_to(Date.parse('2020-08-22')) { example.run }
end
context 'with existing partitions' do
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
index ec3d0a6dbcb..c43b51e10a0 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
@@ -116,23 +116,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
expect(jobs_updated).to eq(1)
end
- context 'when the feature flag is disabled' do
- let(:mock_connection) { double('connection') }
-
- before do
- allow(subject).to receive(:connection).and_return(mock_connection)
- stub_feature_flags(backfill_partitioned_audit_events: false)
- end
-
- it 'exits without attempting to copy data' do
- expect(mock_connection).not_to receive(:execute)
-
- subject.perform(1, 100, source_table, destination_table, unique_key)
-
- expect(destination_model.count).to eq(0)
- end
- end
-
context 'when the job is run within an explicit transaction block' do
let(:mock_connection) { double('connection') }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 44ef0b307fe..147637cf471 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -213,7 +213,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
it 'creates partitions including the next month from today' do
today = Date.new(2020, 5, 8)
- Timecop.freeze(today) do
+ travel_to(today) do
migration.partition_table_by_date source_table, partition_column, min_date: min_date
expect_range_partitions_for(partitioned_table, {
@@ -233,7 +233,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'without min_date, max_date' do
it 'creates partitions for the current and next month' do
current_date = Date.new(2020, 05, 22)
- Timecop.freeze(current_date.to_time) do
+ travel_to(current_date.to_time) do
migration.partition_table_by_date source_table, partition_column
expect_range_partitions_for(partitioned_table, {
@@ -514,6 +514,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true)
allow(migration).to receive(:copy_missed_records)
allow(migration).to receive(:execute).with(/VACUUM/)
+ allow(migration).to receive(:execute).with(/^(RE)?SET/)
end
it 'finishes remaining jobs for the correct table' do
@@ -567,6 +568,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
allow(Gitlab::BackgroundMigration).to receive(:steal)
allow(migration).to receive(:execute).with(/VACUUM/)
+ allow(migration).to receive(:execute).with(/^(RE)?SET/)
end
it 'idempotently cleans up after failed background migrations' do
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
new file mode 100644
index 00000000000..1da67a5a6c0
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresIndex do
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE INDEX foo_idx ON public.users (name);
+ CREATE UNIQUE INDEX bar_key ON public.users (id);
+
+ CREATE TABLE example_table (id serial primary key);
+ SQL
+ end
+
+ def find(name)
+ described_class.by_identifier(name)
+ end
+
+ describe '.by_identifier' do
+ it 'finds the index' do
+ expect(find('public.foo_idx')).to be_a(Gitlab::Database::PostgresIndex)
+ end
+
+ it 'raises an error if not found' do
+ expect { find('public.idontexist') }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'raises ArgumentError if given a non-fully qualified index name' do
+ expect { find('foo') }.to raise_error(ArgumentError, /not fully qualified/)
+ end
+ end
+
+ describe '.regular' do
+ it 'only non-unique indexes' do
+ expect(described_class.regular).to all(have_attributes(unique: false))
+ end
+
+ it 'only non partitioned indexes ' do
+ expect(described_class.regular).to all(have_attributes(partitioned: false))
+ end
+
+ it 'only indexes that dont serve an exclusion constraint' do
+ expect(described_class.regular).to all(have_attributes(exclusion: false))
+ end
+ end
+
+ describe '.not_match' do
+ it 'excludes indexes matching the given regex' do
+ expect(described_class.not_match('^bar_k').map(&:name)).to all(match(/^(?!bar_k).*/))
+ end
+
+ it 'matches indexes without this prefix regex' do
+ expect(described_class.not_match('^bar_k')).not_to be_empty
+ end
+ end
+
+ describe '.random_few' do
+ it 'limits to two records by default' do
+ expect(described_class.random_few(2).size).to eq(2)
+ end
+ end
+
+ describe '#unique?' do
+ it 'returns true for a unique index' do
+ expect(find('public.bar_key')).to be_unique
+ end
+
+ it 'returns false for a regular, non-unique index' do
+ expect(find('public.foo_idx')).not_to be_unique
+ end
+
+ it 'returns true for a primary key index' do
+ expect(find('public.example_table_pkey')).to be_unique
+ end
+ end
+
+ describe '#valid_index?' do
+ it 'returns true if the index is invalid' do
+ expect(find('public.foo_idx')).to be_valid_index
+ end
+
+ it 'returns false if the index is marked as invalid' do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ UPDATE pg_index SET indisvalid=false
+ FROM pg_class
+ WHERE pg_class.relname = 'foo_idx' AND pg_index.indexrelid = pg_class.oid
+ SQL
+
+ expect(find('public.foo_idx')).not_to be_valid_index
+ end
+ end
+
+ describe '#to_s' do
+ it 'returns the index name' do
+ expect(find('public.foo_idx').to_s).to eq('foo_idx')
+ end
+ end
+
+ describe '#name' do
+ it 'returns the name' do
+ expect(find('public.foo_idx').name).to eq('foo_idx')
+ end
+ end
+
+ describe '#schema' do
+ it 'returns the index schema' do
+ expect(find('public.foo_idx').schema).to eq('public')
+ end
+ end
+
+ describe '#definition' do
+ it 'returns the index definition' do
+ expect(find('public.foo_idx').definition).to eq('CREATE INDEX foo_idx ON public.users USING btree (name)')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
new file mode 100644
index 00000000000..2d6765aac2e
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
+ subject { described_class.new(index, logger: logger) }
+
+ let(:table_name) { '_test_reindex_table' }
+ let(:column_name) { '_test_column' }
+ let(:index_name) { '_test_reindex_index' }
+ let(:index) { instance_double(Gitlab::Database::PostgresIndex, indexrelid: 42, name: index_name, schema: 'public', partitioned?: false, unique?: false, exclusion?: false, definition: 'CREATE INDEX _test_reindex_index ON public._test_reindex_table USING btree (_test_column)') }
+ let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ #{column_name} integer NOT NULL);
+
+ CREATE INDEX #{index.name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ context 'when the index is unique' do
+ before do
+ allow(index).to receive(:unique?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index is partitioned' do
+ before do
+ allow(index).to receive(:partitioned?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /partitioned indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index serves an exclusion constraint' do
+ before do
+ allow(index).to receive(:exclusion?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
+ end
+ end
+
+ context 'when the index is a lingering temporary index from a previous reindexing run' do
+ context 'with the temporary index prefix' do
+ let(:index_name) { 'tmp_reindex_something' }
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+
+ context 'with the replaced index prefix' do
+ let(:index_name) { 'old_reindex_something' }
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+ end
+
+ context 'replacing the original index with a rebuilt copy' do
+ let(:replacement_name) { 'tmp_reindex_42' }
+ let(:replaced_name) { 'old_reindex_42' }
+
+ let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
+ let(:drop_index) do
+ <<~SQL
+ DROP INDEX CONCURRENTLY
+ IF EXISTS "public"."#{replacement_name}"
+ SQL
+ end
+
+ let!(:original_index) { find_index_create_statement }
+
+ it 'integration test: executing full index replacement without mocks' do
+ allow(connection).to receive(:execute).and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+
+ subject.perform
+
+ check_index_exists
+ end
+
+ context 'mocked specs' do
+ before do
+ allow(subject).to receive(:connection).and_return(connection)
+ allow(connection).to receive(:execute).and_call_original
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').twice
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name)
+ expect_index_rename(replacement_name, index.name)
+ expect_index_rename(replaced_name, replacement_name)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.perform
+
+ check_index_exists
+ end
+
+ context 'when a dangling index is left from a previous run' do
+ before do
+ connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').exactly(3).times
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name)
+ expect_index_rename(replacement_name, index.name)
+ expect_index_rename(replaced_name, replacement_name)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.perform
+
+ check_index_exists
+ end
+ end
+
+ context 'when it fails to create the replacement index' do
+ it 'safely cleans up and signals the error' do
+ expect(connection).to receive(:execute).with(create_index).ordered
+ .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when the replacement index is not valid' do
+ it 'safely cleans up and signals the error' do
+ replacement_index = double('replacement index', valid_index?: false)
+ allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when a database error occurs while swapping the indexes' do
+ it 'safely cleans up and signals the error' do
+ replacement_index = double('replacement index', valid_index?: true)
+ allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when with_lock_retries fails to acquire the lock' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true)
+ .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
+ end
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
+
+ check_index_exists
+ end
+ end
+ end
+ end
+
+ def expect_to_execute_concurrently_in_order(sql)
+ # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
+ # verify the original call but pass through the non-concurrent form.
+ expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+
+ def expect_index_rename(from, to)
+ expect(connection).to receive(:execute).with(<<~SQL).ordered
+ ALTER INDEX "public"."#{from}"
+ RENAME TO "#{to}"
+ SQL
+ end
+
+ def find_index_create_statement
+ ActiveRecord::Base.connection.select_value(<<~SQL)
+ SELECT indexdef
+ FROM pg_indexes
+ WHERE schemaname = 'public'
+ AND indexname = #{ActiveRecord::Base.connection.quote(index.name)}
+ SQL
+ end
+
+ def check_index_exists
+ expect(find_index_create_statement).to eq(original_index)
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
new file mode 100644
index 00000000000..f45d959c0de
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::Coordinator do
+ include ExclusiveLeaseHelpers
+
+ describe '.perform' do
+ subject { described_class.new(indexes).perform }
+
+ let(:indexes) { [instance_double(Gitlab::Database::PostgresIndex), instance_double(Gitlab::Database::PostgresIndex)] }
+ let(:reindexers) { [instance_double(Gitlab::Database::Reindexing::ConcurrentReindex), instance_double(Gitlab::Database::Reindexing::ConcurrentReindex)] }
+
+ let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
+ let(:lease_key) { 'gitlab/database/reindexing/coordinator' }
+ let(:lease_timeout) { 1.day }
+ let(:uuid) { 'uuid' }
+
+ before do
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).and_yield
+
+ indexes.zip(reindexers).each do |index, reindexer|
+ allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
+ allow(reindexer).to receive(:perform)
+ end
+ end
+
+ it 'performs concurrent reindexing for each index' do
+ indexes.zip(reindexers).each do |index, reindexer|
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).ordered.and_return(reindexer)
+ expect(reindexer).to receive(:perform)
+ end
+
+ subject
+ end
+
+ it 'keeps track of actions and creates ReindexAction records' do
+ indexes.each do |index|
+ expect(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).with(index).and_yield
+ end
+
+ subject
+ end
+
+ context 'locking' do
+ it 'acquires a lock while reindexing' do
+ indexes.each do |index|
+ expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
+ action = instance_double(Gitlab::Database::Reindexing::ConcurrentReindex)
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).ordered.with(index).and_return(action)
+ expect(action).to receive(:perform).ordered
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
+ end
+
+ subject
+ end
+
+ it 'does does not perform reindexing actions if lease is not granted' do
+ indexes.each do |index|
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).not_to receive(:new)
+ end
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
new file mode 100644
index 00000000000..efb5b8463a1
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ReindexAction, '.keep_track_of' do
+ let(:index) { double('index', identifier: 'public.something', ondisk_size_bytes: 10240, reload: nil) }
+ let(:size_after) { 512 }
+
+ it 'yields to the caller' do
+ expect { |b| described_class.keep_track_of(index, &b) }.to yield_control
+ end
+
+ def find_record
+ described_class.find_by(index_identifier: index.identifier)
+ end
+
+ it 'creates the record with a start time and updates its end time' do
+ freeze_time do
+ described_class.keep_track_of(index) do
+ expect(find_record.action_start).to be_within(1.second).of(Time.zone.now)
+
+ travel(10.seconds)
+ end
+
+ duration = find_record.action_end - find_record.action_start
+
+ expect(duration).to be_within(1.second).of(10.seconds)
+ end
+ end
+
+ it 'creates the record with its status set to :started and updates its state to :finished' do
+ described_class.keep_track_of(index) do
+ expect(find_record).to be_started
+ end
+
+ expect(find_record).to be_finished
+ end
+
+ it 'creates the record with the indexes start size and updates its end size' do
+ described_class.keep_track_of(index) do
+ expect(find_record.ondisk_size_bytes_start).to eq(index.ondisk_size_bytes)
+
+ expect(index).to receive(:reload).once
+ allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+ end
+
+ expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ end
+
+ context 'in case of errors' do
+ it 'sets the state to failed' do
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record).to be_failed
+ end
+
+ it 'records the end time' do
+ freeze_time do
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record.action_end).to be_within(1.second).of(Time.zone.now)
+ end
+ end
+
+ it 'records the resulting index size' do
+ expect(index).to receive(:reload).once
+ allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
new file mode 100644
index 00000000000..86b3c029944
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing do
+ include ExclusiveLeaseHelpers
+
+ describe '.perform' do
+ subject { described_class.perform(indexes) }
+
+ let(:coordinator) { instance_double(Gitlab::Database::Reindexing::Coordinator) }
+ let(:indexes) { double }
+
+ it 'delegates to Coordinator' do
+ expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).with(indexes).and_return(coordinator)
+ expect(coordinator).to receive(:perform)
+
+ subject
+ end
+ end
+
+ describe '.candidate_indexes' do
+ subject { described_class.candidate_indexes }
+
+ it 'retrieves regular indexes that are no left-overs from previous runs' do
+ result = double
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.not_match.not_match').with(no_args).with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+
+ expect(subject).to eq(result)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
index e36a4f610e1..cf75e5a72d9 100644
--- a/spec/lib/gitlab/database/similarity_score_spec.rb
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -90,4 +90,15 @@ RSpec.describe Gitlab::Database::SimilarityScore do
expect(subject).to eq(%w[different same gitlab-danger])
end
end
+
+ describe 'annotation' do
+ it 'annotates the generated SQL expression' do
+ expression = Gitlab::Database::SimilarityScore.build_expression(search: 'test', rules: [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ])
+
+ expect(Gitlab::Database::SimilarityScore).to be_order_by_similarity(expression)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 2cc6e175500..220ae705e71 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -104,9 +104,69 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
context 'after 3 iterations' do
- let(:retry_count) { 4 }
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ let(:retry_count) { 4 }
+ end
+
+ context 'setting the idle transaction timeout' do
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the idle transaction timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_transaction).once
+
+ expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
+
+ subject.run {}
+ end
+ end
- it_behaves_like 'retriable exclusive lock on `projects`'
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the idle transaction timeout so the code can sleep and retry' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+
+ n = 0
+ allow(subject).to receive(:run_block_with_transaction).twice do
+ n += 1
+ raise(ActiveRecord::LockWaitTimeout) if n == 1
+ end
+
+ expect(subject).to receive(:disable_idle_in_transaction_timeout).once
+
+ subject.run {}
+ end
+ end
+ end
+ end
+
+ context 'after the retries are exhausted' do
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second]
+ ]
+ end
+
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the lock_timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).not_to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the lock_timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
end
context 'after the retries, without setting lock_timeout' do
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 420aa0a8df6..3175040167b 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -39,6 +39,12 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.system_id' do
+ it 'returns the PostgreSQL system identifier' do
+ expect(described_class.system_id).to be_an_instance_of(Integer)
+ end
+ end
+
describe '.postgresql?' do
subject { described_class.postgresql? }
@@ -70,25 +76,6 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.postgresql_9_or_less?' do
- it 'returns true when using postgresql 8.4' do
- allow(described_class).to receive(:version).and_return('8.4')
- expect(described_class.postgresql_9_or_less?).to eq(true)
- end
-
- it 'returns true when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.postgresql_9_or_less?).to eq(true)
- end
-
- it 'returns false when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.postgresql_9_or_less?).to eq(false)
- end
- end
-
describe '.postgresql_minimum_supported_version?' do
it 'returns false when using PostgreSQL 10' do
allow(described_class).to receive(:version).and_return('10')
@@ -150,68 +137,6 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.pg_wal_lsn_diff' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_wal_lsn_diff).to eq('pg_xlog_location_diff')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_wal_lsn_diff).to eq('pg_wal_lsn_diff')
- end
- end
-
- describe '.pg_current_wal_insert_lsn' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_xlog_insert_location')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_wal_insert_lsn')
- end
- end
-
- describe '.pg_last_wal_receive_lsn' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_xlog_receive_location')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_wal_receive_lsn')
- end
- end
-
- describe '.pg_last_wal_replay_lsn' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_xlog_replay_location')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_wal_replay_lsn')
- end
- end
-
- describe '.pg_last_xact_replay_timestamp' do
- it 'returns pg_last_xact_replay_timestamp' do
- expect(described_class.pg_last_xact_replay_timestamp).to eq('pg_last_xact_replay_timestamp')
- end
- end
-
describe '.nulls_last_order' do
it { expect(described_class.nulls_last_order('column', 'ASC')).to eq 'column ASC NULLS LAST'}
it { expect(described_class.nulls_last_order('column', 'DESC')).to eq 'column DESC NULLS LAST'}
@@ -433,6 +358,20 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.get_write_location' do
+ it 'returns a string' do
+ connection = ActiveRecord::Base.connection
+
+ expect(described_class.get_write_location(connection)).to be_a(String)
+ end
+
+ it 'returns nil if there are no results' do
+ connection = double(select_all: [])
+
+ expect(described_class.get_write_location(connection)).to be_nil
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
index bd60c24859c..72a66b0451e 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
@@ -120,7 +120,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
described_class.new(merge_request.merge_request_diff,
batch_page,
batch_size,
- collection_default_args)
+ **collection_default_args)
end
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 7e926f86096..f6810d7a966 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -43,7 +43,8 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
describe '#decorate' do
# Manually creates a Diff::File object to avoid triggering the cache on
- # the FileCollection::MergeRequestDiff
+ # the FileCollection::MergeRequestDiff
+ #
let(:diff_file) do
diffs = merge_request.diffs
raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['CHANGELOG'])).first
@@ -73,6 +74,37 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
expect(rich_texts).to all(be_html_safe)
end
+
+ context "when diff_file is uncached due to default_max_patch_bytes change" do
+ before do
+ expect(cache).to receive(:read_file).at_least(:once).and_return([])
+
+ # Stub out the application's default and current patch size limits. We
+ # want them to be different, and the diff file to be sized between
+ # the 2 values.
+ #
+ diff_file_size_kb = (diff_file.diff.diff.bytesize * 10)
+
+ stub_const("#{diff_file.diff.class}::DEFAULT_MAX_PATCH_BYTES", diff_file_size_kb - 1 )
+ expect(diff_file.diff.class).to receive(:patch_safe_limit_bytes).and_return(diff_file_size_kb + 1)
+ expect(diff_file.diff.class)
+ .to receive(:patch_safe_limit_bytes)
+ .with(diff_file.diff.class::DEFAULT_MAX_PATCH_BYTES)
+ .and_call_original
+ end
+
+ it "manually writes highlighted lines to the cache" do
+ expect(cache).to receive(:write_to_redis_hash).and_call_original
+
+ cache.decorate(diff_file)
+ end
+
+ it "assigns highlighted diff lines to the DiffFile" do
+ expect(diff_file.highlighted_diff_lines.size).to be > 5
+
+ cache.decorate(diff_file)
+ end
+ end
end
shared_examples 'caches missing entries' do
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 07b8070be30..ef448ee96a4 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -65,24 +65,15 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- [true, false].each do |state_tracking_enabled|
- context "and current user can update noteable #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
- project.add_developer(user)
- end
+ context "and current user can update noteable" do
+ before do
+ project.add_developer(user)
+ end
- it 'does not raise an error' do
- if state_tracking_enabled
- expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
- else
- # One system note is created for the 'close' event
- expect { receiver.execute }.to change { noteable.notes.count }.by(1)
- end
+ it 'does not raise an error' do
+ expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
- expect(noteable.reload).to be_closed
- end
+ expect(noteable.reload).to be_closed
end
end
end
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index 01e2fe8ce17..40669f06371 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -25,13 +25,17 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
let!(:lease) { stub_exclusive_lease(unique_key, 'uuid') }
it 'calls the given block' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
end
it 'calls the given block continuously' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
end
it 'cancels the exclusive lease after the block' do
@@ -74,7 +78,8 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
expect(lease).to receive(:try_obtain).exactly(3).times { nil }
expect(lease).to receive(:try_obtain).once { unique_key }
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(true)
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(true, an_instance_of(described_class::SleepingLock))
end
end
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 9bc865f4d29..e93593d348f 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Experimentation do
+RSpec.describe Gitlab::Experimentation, :snowplow do
before do
stub_const('Gitlab::Experimentation::EXPERIMENTS', {
test_experiment: {
@@ -69,12 +69,26 @@ RSpec.describe Gitlab::Experimentation do
end
end
+ describe '#push_frontend_experiment' do
+ it 'pushes an experiment to the frontend' do
+ gon = instance_double('gon')
+ experiments = { experiments: { 'myExperiment' => true } }
+
+ stub_experiment_for_user(my_experiment: true)
+ allow(controller).to receive(:gon).and_return(gon)
+
+ expect(gon).to receive(:push).with(experiments, true)
+
+ controller.push_frontend_experiment(:my_experiment)
+ end
+ end
+
describe '#experiment_enabled?' do
subject { controller.experiment_enabled?(:test_experiment) }
context 'cookie is not present' do
- it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of nil' do
- expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, nil)
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of nil' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, nil)
controller.experiment_enabled?(:test_experiment)
end
end
@@ -85,22 +99,22 @@ RSpec.describe Gitlab::Experimentation do
get :index
end
- it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
# 'abcd1234'.hex % 100 = 76
- expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, 76)
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, 76)
controller.experiment_enabled?(:test_experiment)
end
end
it 'returns true when DNT: 0 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?) { true }
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
controller.request.headers['DNT'] = '0'
is_expected.to be_truthy
end
it 'returns false when DNT: 1 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?) { true }
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
controller.request.headers['DNT'] = '1'
is_expected.to be_falsy
@@ -127,13 +141,14 @@ RSpec.describe Gitlab::Experimentation do
end
it 'tracks the event with the right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Team',
- 'start',
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
property: 'experimental_group',
- value: 'team_id'
+ value: 1
)
- controller.track_experiment_event(:test_experiment, 'start', 'team_id')
end
end
@@ -143,13 +158,43 @@ RSpec.describe Gitlab::Experimentation do
end
it 'tracks the event with the right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Team',
- 'start',
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'do not track is disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'does track the event' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
property: 'control_group',
- value: 'team_id'
+ value: 1
)
- controller.track_experiment_event(:test_experiment, 'start', 'team_id')
+ end
+ end
+
+ context 'do not track enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not track the event' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_no_snowplow_event
end
end
end
@@ -160,8 +205,9 @@ RSpec.describe Gitlab::Experimentation do
end
it 'does not track the event' do
- expect(Gitlab::Tracking).not_to receive(:event)
controller.track_experiment_event(:test_experiment, 'start')
+
+ expect_no_snowplow_event
end
end
end
@@ -220,6 +266,36 @@ RSpec.describe Gitlab::Experimentation do
)
end
end
+
+ context 'do not track disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group'
+ }
+ )
+ end
+ end
+
+ context 'do not track enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not push data to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+
+ expect(Gon.method_defined?(:tracking_data)).to be_falsey
+ end
+ end
end
context 'when the experiment is disabled' do
@@ -294,6 +370,39 @@ RSpec.describe Gitlab::Experimentation do
controller.record_experiment_user(:test_experiment)
end
end
+
+ context 'do not track' do
+ before do
+ allow(controller).to receive(:current_user).and_return(user)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ context 'is disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'is enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
end
describe '#experiment_tracking_category_and_group' do
@@ -336,8 +445,8 @@ RSpec.describe Gitlab::Experimentation do
end
end
- describe '.enabled_for_user?' do
- subject { described_class.enabled_for_user?(:test_experiment, experimentation_subject_index) }
+ describe '.enabled_for_value?' do
+ subject { described_class.enabled_for_value?(:test_experiment, experimentation_subject_index) }
let(:experimentation_subject_index) { 9 }
@@ -377,4 +486,32 @@ RSpec.describe Gitlab::Experimentation do
end
end
end
+
+ describe '.enabled_for_attribute?' do
+ subject { described_class.enabled_for_attribute?(:test_experiment, attribute) }
+
+ let(:attribute) { 'abcd' } # Digest::SHA1.hexdigest('abcd').hex % 100 = 7
+
+ context 'experiment is disabled' do
+ before do
+ allow(described_class).to receive(:enabled?).and_return(false)
+ end
+
+ it { is_expected.to be false }
+ end
+
+ context 'experiment is enabled' do
+ before do
+ allow(described_class).to receive(:enabled?).and_return(true)
+ end
+
+ it { is_expected.to be true }
+
+ context 'outside enabled ratio' do
+ let(:attribute) { 'abc' } # Digest::SHA1.hexdigest('abc').hex % 100 = 17
+
+ it { is_expected.to be false }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index e1bcf4aeeb1..9271f635b14 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -85,9 +85,9 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
}
end
- let(:stale_sha) { Timecop.freeze(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
- let(:active_sha) { Timecop.freeze(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
- let(:future_sha) { Timecop.freeze(100.days.since) { create_commit } }
+ let(:stale_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
+ let(:active_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
+ let(:future_sha) { travel_to(100.days.since) { create_commit } }
before do
repository.create_branch('stale-1', stale_sha)
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index b202015464f..1a3c332a21b 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -9,8 +9,11 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
MutatingConstantIterator.class_eval do
include Enumerable
+ attr_reader :size
+
def initialize(count, value)
@count = count
+ @size = count
@value = value
end
@@ -517,21 +520,39 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
.to yield_with_args(an_instance_of(Gitlab::Git::Diff))
end
- it 'prunes diffs that are quite big' do
- diff = nil
+ context 'single-file collections' do
+ it 'does not prune diffs' do
+ diff = nil
- subject.each do |d|
- diff = d
+ subject.each do |d|
+ diff = d
+ end
+
+ expect(diff.diff).not_to eq('')
end
+ end
+
+ context 'multi-file collections' do
+ let(:iterator) { [{ diff: 'b' }, { diff: 'a' * 20480 }]}
+
+ it 'prunes diffs that are quite big' do
+ diff = nil
- expect(diff.diff).to eq('')
+ subject.each do |d|
+ diff = d
+ end
+
+ expect(diff.diff).to eq('')
+ end
end
context 'when go over safe limits on files' do
let(:iterator) { [fake_diff(1, 1)] * 4 }
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: 2, max_lines: max_lines })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: 2, max_lines: max_lines })
end
it 'prunes diffs by default even little ones' do
@@ -556,7 +577,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: max_files, max_lines: 80 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: max_files, max_lines: 80 })
end
it 'prunes diffs by default even little ones' do
@@ -581,7 +604,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: max_files, max_lines: 80 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: max_files, max_lines: 80 })
end
it 'prunes diffs by default even little ones' do
@@ -665,8 +690,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS',
- { max_files: max_files, max_lines: 80 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: max_files, max_lines: 80 })
end
it 'considers size of diffs before the offset for prunning' do
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 117c519e98d..980a52bb61e 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -284,13 +284,21 @@ EOT
end
describe '#line_count' do
- it 'returns the correct number of lines' do
- diff = described_class.new(gitaly_diff)
+ let(:diff) { described_class.new(gitaly_diff) }
+ it 'returns the correct number of lines' do
expect(diff.line_count).to eq(7)
end
end
+ describe "#diff_bytesize" do
+ let(:diff) { described_class.new(gitaly_diff) }
+
+ it "returns the size of the diff in bytes" do
+ expect(diff.diff_bytesize).to eq(diff.diff.bytesize)
+ end
+ end
+
describe '#too_large?' do
it 'returns true for a diff that is too large' do
diff = described_class.new(diff: 'a' * 204800)
diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb
index c8fbc674c73..e1873c6ddb5 100644
--- a/spec/lib/gitlab/git/object_pool_spec.rb
+++ b/spec/lib/gitlab/git/object_pool_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Git::ObjectPool do
describe '#create' do
before do
- subject.create
+ subject.create # rubocop:disable Rails/SaveBang
end
context "when the pool doesn't exist yet" do
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Git::ObjectPool do
context 'when the pool already exists' do
it 'raises an FailedPrecondition' do
expect do
- subject.create
+ subject.create # rubocop:disable Rails/SaveBang
end.to raise_error(GRPC::FailedPrecondition)
end
end
diff --git a/spec/lib/gitlab/git/remote_mirror_spec.rb b/spec/lib/gitlab/git/remote_mirror_spec.rb
index 423c4aa9620..92504b7aafe 100644
--- a/spec/lib/gitlab/git/remote_mirror_spec.rb
+++ b/spec/lib/gitlab/git/remote_mirror_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Git::RemoteMirror do
.to receive(:update_remote_mirror)
.with(ref_name, ['master'], ssh_key: 'KEY', known_hosts: 'KNOWN HOSTS', keep_divergent_refs: true)
- remote_mirror.update
+ remote_mirror.update # rubocop:disable Rails/SaveBang
end
it 'wraps gitaly errors' do
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Git::RemoteMirror do
.to receive(:update_remote_mirror)
.and_raise(StandardError)
- expect { remote_mirror.update }.to raise_error(StandardError)
+ expect { remote_mirror.update }.to raise_error(StandardError) # rubocop:disable Rails/SaveBang
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 73eecd3401a..6dfa791f70b 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -120,7 +120,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:expected_extension) { 'tar.gz' }
let(:expected_filename) { "#{expected_prefix}.#{expected_extension}" }
- let(:expected_path) { File.join(storage_path, cache_key, expected_filename) }
+ let(:expected_path) { File.join(storage_path, cache_key, "@v2", expected_filename) }
let(:expected_prefix) { "gitlab-git-test-#{ref}-#{SeedRepo::LastCommit::ID}" }
subject(:metadata) { repository.archive_metadata(ref, storage_path, 'gitlab-git-test', format, append_sha: append_sha, path: path) }
@@ -133,12 +133,32 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
expect(metadata['ArchivePrefix']).to eq(expected_prefix)
end
- it 'sets ArchivePath to the expected globally-unique path' do
- # This is really important from a security perspective. Think carefully
- # before changing it: https://gitlab.com/gitlab-org/gitlab-foss/issues/45689
- expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID))
+ context 'when :include_lfs_blobs_in_archive feature flag is disabled' do
+ let(:expected_path) { File.join(storage_path, cache_key, expected_filename) }
- expect(metadata['ArchivePath']).to eq(expected_path)
+ before do
+ stub_feature_flags(include_lfs_blobs_in_archive: false)
+ end
+
+ it 'sets ArchivePath to the expected globally-unique path' do
+ # This is really important from a security perspective. Think carefully
+ # before changing it: https://gitlab.com/gitlab-org/gitlab-foss/issues/45689
+ expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID))
+
+ expect(metadata['ArchivePath']).to eq(expected_path)
+ end
+ end
+
+ context 'when :include_lfs_blobs_in_archive feature flag is enabled' do
+ before do
+ stub_feature_flags(include_lfs_blobs_in_archive: true)
+ end
+
+ it 'sets ArchivePath to the expected globally-unique path' do
+ expect(expected_path).to include(File.join(repository.gl_repository, SeedRepo::LastCommit::ID))
+
+ expect(metadata['ArchivePath']).to eq(expected_path)
+ end
end
context 'path is set' do
@@ -1630,13 +1650,14 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:right_branch) { 'test-master' }
let(:first_parent_ref) { 'refs/heads/test-master' }
let(:target_ref) { 'refs/merge-requests/999/merge' }
+ let(:allow_conflicts) { false }
before do
repository.create_branch(right_branch, branch_head) unless repository.ref_exists?(first_parent_ref)
end
def merge_to_ref
- repository.merge_to_ref(user, left_sha, right_branch, target_ref, 'Merge message', first_parent_ref)
+ repository.merge_to_ref(user, left_sha, right_branch, target_ref, 'Merge message', first_parent_ref, allow_conflicts)
end
it 'generates a commit in the target_ref' do
@@ -2079,7 +2100,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:object_pool_rugged) { Rugged::Repository.new(object_pool_path) }
before do
- object_pool.create
+ object_pool.create # rubocop:disable Rails/SaveBang
end
it 'does not raise an error when disconnecting a non-linked repository' do
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 4f6a3fb823e..16cea1dc1a3 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -7,7 +7,7 @@ require 'tempfile'
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
- let(:feature_flag_name) { 'feature-flag-name' }
+ let(:feature_flag_name) { wrapper.rugged_feature_keys.first }
let(:temp_gitaly_metadata_file) { create_temporary_gitaly_metadata_file }
before(:all) do
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
end
- context 'when feature flag is not persisted' do
+ context 'when feature flag is not persisted', stub_feature_flags: false do
context 'when running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index a88097705f6..36bff42d937 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -51,6 +51,11 @@ RSpec.describe Gitlab::Git::Wiki do
expect(subject.page(title: 'page1', dir: '').url_path).to eq 'page1'
expect(subject.page(title: 'page1', dir: 'foo').url_path).to eq 'foo/page1'
end
+
+ it 'returns nil for invalid arguments' do
+ expect(subject.page(title: '')).to be_nil
+ expect(subject.page(title: 'foo', version: ':')).to be_nil
+ end
end
describe '#delete_page' do
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 3b8b5fd82c6..8c481cdee08 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -232,29 +232,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
- context 'when geo is enabled', if: Gitlab.ee? do
- let(:user) { snippet.author }
- let!(:primary_node) { FactoryBot.create(:geo_node, :primary) }
-
- before do
- allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
- allow(::Gitlab::Geo).to receive(:secondary_with_primary?).and_return(true)
- end
-
- # Without override, push access would return Gitlab::GitAccessResult::CustomAction
- it 'skips geo for snippet' do
- expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
- end
-
- context 'when user is migration bot' do
- let(:user) { migration_bot }
-
- it 'skips geo for snippet' do
- expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
- end
- end
- end
-
context 'when changes are specific' do
let(:changes) { "2d1db523e11e777e49377cfb22d368deec3f0793 ddd0f15ae83993f5cb66a927a28673882e99100b master" }
let(:user) { snippet.author }
@@ -283,7 +260,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
service = double
expect(service).to receive(:validate!).and_return(nil)
- expect(Snippet).to receive(:max_file_limit).with(user).and_return(5)
+ expect(Snippet).to receive(:max_file_limit).and_return(5)
expect(Gitlab::Checks::PushFileCountCheck).to receive(:new).with(anything, hash_including(limit: 5)).and_return(service)
push_access_check
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 85567ab2e55..21607edbc32 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -420,6 +420,13 @@ RSpec.describe Gitlab::GitAccess do
expect { pull_access_check }.to raise_forbidden('Your account has been blocked.')
end
+ it 'disallows users that are blocked pending approval to pull' do
+ project.add_maintainer(user)
+ user.block_pending_approval
+
+ expect { pull_access_check }.to raise_forbidden('Your account is pending approval from your administrator and hence blocked.')
+ end
+
it 'disallows deactivated users to pull' do
project.add_maintainer(user)
user.deactivate!
@@ -428,14 +435,12 @@ RSpec.describe Gitlab::GitAccess do
end
context 'when the project repository does not exist' do
- it 'returns not found' do
+ before do
project.add_guest(user)
- repo = project.repository
- Gitlab::GitalyClient::StorageSettings.allow_disk_access { FileUtils.rm_rf(repo.path) }
-
- # Sanity check for rm_rf
- expect(repo.exists?).to eq(false)
+ allow(project.repository).to receive(:exists?).and_return(false)
+ end
+ it 'returns not found' do
expect { pull_access_check }.to raise_error(Gitlab::GitAccess::NotFoundError, 'A repository for this project does not exist yet.')
end
end
@@ -917,6 +922,12 @@ RSpec.describe Gitlab::GitAccess do
project.add_developer(user)
end
+ it 'disallows users that are blocked pending approval to push' do
+ user.block_pending_approval
+
+ expect { push_access_check }.to raise_forbidden('Your account is pending approval from your administrator and hence blocked.')
+ end
+
it 'does not allow deactivated users to push' do
user.deactivate!
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index 688089f4862..b78d99269d3 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -3,17 +3,17 @@
require 'spec_helper'
RSpec.describe Gitlab::GitAccessWiki do
- let(:access) { described_class.new(user, project, 'web', authentication_abilities: authentication_abilities, redirected_path: redirected_path) }
- let_it_be(:project) { create(:project, :wiki_repo) }
let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :wiki_repo) }
+ let_it_be(:wiki) { create(:project_wiki, project: project) }
let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
+ let(:authentication_abilities) { %i[read_project download_code push_code] }
let(:redirected_path) { nil }
- let(:authentication_abilities) do
- [
- :read_project,
- :download_code,
- :push_code
- ]
+
+ let(:access) do
+ described_class.new(user, wiki, 'web',
+ authentication_abilities: authentication_abilities,
+ redirected_path: redirected_path)
end
describe '#push_access_check' do
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::GitAccessWiki do
context 'when the repository does not exist' do
before do
- allow(project.wiki).to receive(:repository).and_return(double('Repository', exists?: false))
+ allow(wiki.repository).to receive(:exists?).and_return(false)
end
it_behaves_like 'not-found git access' do
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 9581b017839..f977fe1638f 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -13,6 +13,10 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
let(:client) { described_class.new(repository) }
describe '#diff_from_parent' do
+ before do
+ stub_feature_flags(increased_diff_limits: false)
+ end
+
context 'when a commit has a parent' do
it 'sends an RPC request with the parent ID as left commit' do
request = Gitaly::CommitDiffRequest.new(
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index b974f456914..ce01566b870 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -88,9 +88,10 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
let(:ref) { 'refs/merge-requests/x/merge' }
let(:message) { 'validación' }
+ let(:allow_conflicts) { false }
let(:response) { Gitaly::UserMergeToRefResponse.new(commit_id: 'new-commit-id') }
- subject { client.user_merge_to_ref(user, source_sha, nil, ref, message, first_parent_ref) }
+ subject { client.user_merge_to_ref(user, source_sha, nil, ref, message, first_parent_ref, allow_conflicts) }
it 'sends a user_merge_to_ref message' do
expect_any_instance_of(Gitaly::OperationService::Stub)
diff --git a/spec/lib/gitlab/gitpod_spec.rb b/spec/lib/gitlab/gitpod_spec.rb
index f4dda42aeb4..717e396f942 100644
--- a/spec/lib/gitlab/gitpod_spec.rb
+++ b/spec/lib/gitlab/gitpod_spec.rb
@@ -4,30 +4,29 @@ require 'spec_helper'
RSpec.describe Gitlab::Gitpod do
let_it_be(:user) { create(:user) }
- let(:feature_scope) { true }
before do
stub_feature_flags(gitpod: feature_scope)
end
- describe '.feature_conditional?' do
- subject { described_class.feature_conditional? }
-
- context 'when feature is enabled globally' do
- it { is_expected.to be_falsey }
- end
+ describe '.feature_available?' do
+ subject { described_class.feature_available? }
- context 'when feature is enabled only to a resource' do
- let(:feature_scope) { user }
+ context 'when feature has not been set' do
+ let(:feature_scope) { nil }
it { is_expected.to be_truthy }
end
- end
- describe '.feature_available?' do
- subject { described_class.feature_available? }
+ context 'when feature is disabled' do
+ let(:feature_scope) { false }
+
+ it { is_expected.to be_falsey }
+ end
context 'when feature is enabled globally' do
+ let(:feature_scope) { true }
+
it { is_expected.to be_truthy }
end
@@ -43,7 +42,15 @@ RSpec.describe Gitlab::Gitpod do
subject { described_class.feature_enabled?(current_user) }
+ context 'when feature has not been set' do
+ let(:feature_scope) { nil }
+
+ it { is_expected.to be_truthy }
+ end
+
context 'when feature is enabled globally' do
+ let(:feature_scope) { true }
+
it { is_expected.to be_truthy }
end
diff --git a/spec/lib/gitlab/gl_repository/identifier_spec.rb b/spec/lib/gitlab/gl_repository/identifier_spec.rb
index e95aaaa6690..e0622e30e7a 100644
--- a/spec/lib/gitlab/gl_repository/identifier_spec.rb
+++ b/spec/lib/gitlab/gl_repository/identifier_spec.rb
@@ -35,14 +35,14 @@ RSpec.describe Gitlab::GlRepository::Identifier do
it_behaves_like 'parsing gl_repository identifier' do
let(:record_id) { project.id }
let(:identifier) { "wiki-#{record_id}" }
- let(:expected_container) { project }
+ let(:expected_container) { project.wiki }
let(:expected_type) { Gitlab::GlRepository::WIKI }
end
it_behaves_like 'parsing gl_repository identifier' do
let(:record_id) { project.id }
let(:identifier) { "project-#{record_id}-wiki" }
- let(:expected_container) { project }
+ let(:expected_container) { project.wiki }
let(:expected_type) { Gitlab::GlRepository::WIKI }
end
end
@@ -87,7 +87,8 @@ RSpec.describe Gitlab::GlRepository::Identifier do
'project-wibble-wiki',
'wiki-1-project',
'snippet',
- 'project-1-wiki-bar'
+ 'project-1-wiki-bar',
+ 'project-1-project'
]
end
@@ -96,10 +97,5 @@ RSpec.describe Gitlab::GlRepository::Identifier do
expect { described_class.parse(identifier) }.to raise_error(described_class::InvalidIdentifier)
end
end
-
- it 'raises InvalidIdentifier on project-1-project' do
- pending 'https://gitlab.com/gitlab-org/gitlab/-/issues/219192'
- expect { described_class.parse('project-1-project') }.to raise_error(described_class::InvalidIdentifier)
- end
end
end
diff --git a/spec/lib/gitlab/gl_repository/repo_type_spec.rb b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
index 3fa636a1cf0..629e6c96858 100644
--- a/spec/lib/gitlab/gl_repository/repo_type_spec.rb
+++ b/spec/lib/gitlab/gl_repository/repo_type_spec.rb
@@ -41,12 +41,14 @@ RSpec.describe Gitlab::GlRepository::RepoType do
end
describe Gitlab::GlRepository::WIKI do
+ let(:wiki) { project.wiki }
+
it_behaves_like 'a repo type' do
- let(:expected_id) { project.id }
+ let(:expected_id) { wiki.project.id }
let(:expected_identifier) { "wiki-#{expected_id}" }
let(:expected_suffix) { '.wiki' }
- let(:expected_container) { project }
- let(:expected_repository) { ::Repository.new(project.wiki.full_path, project, shard: project.wiki.repository_storage, disk_path: project.wiki.disk_path, repo_type: Gitlab::GlRepository::WIKI) }
+ let(:expected_container) { wiki }
+ let(:expected_repository) { ::Repository.new(wiki.full_path, wiki, shard: wiki.repository_storage, disk_path: wiki.disk_path, repo_type: Gitlab::GlRepository::WIKI) }
end
it 'knows its type' do
diff --git a/spec/lib/gitlab/gl_repository_spec.rb b/spec/lib/gitlab/gl_repository_spec.rb
index 3733d545155..05914f92c01 100644
--- a/spec/lib/gitlab/gl_repository_spec.rb
+++ b/spec/lib/gitlab/gl_repository_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe ::Gitlab::GlRepository do
end
it 'parses a project wiki gl_repository' do
- expect(described_class.parse("wiki-#{project.id}")).to eq([project, project, Gitlab::GlRepository::WIKI])
+ expect(described_class.parse("wiki-#{project.id}")).to eq([project.wiki, project, Gitlab::GlRepository::WIKI])
end
it 'parses a snippet gl_repository' do
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 95db6b2b4e0..3d3f381b6d2 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe Gitlab::GonHelper do
end
describe '#push_frontend_feature_flag' do
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
it 'pushes a feature flag to the frontend' do
gon = instance_double('gon')
thing = stub_feature_flag_gate('thing')
diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
index e68c1446502..9538c4bae2b 100644
--- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
end
it 'returns the correct duration in seconds' do
- Timecop.freeze(start_time) do
+ travel_to(start_time) do
subject.before
expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 1.hour.to_f })
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
index efe6c27c463..7576523ce52 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
@@ -19,24 +19,29 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
options.reverse_merge!(null: true)
field :test_field, field_type,
authorize: field_authorizations,
- resolve: -> (_, _, _) { resolved_value },
**options
+
+ define_method :test_field do
+ resolved_value
+ end
end
end
- let(:current_user) { double(:current_user) }
-
subject(:service) { described_class.new(field) }
describe '#authorized_resolve' do
- let(:presented_object) { double('presented object') }
- let(:presented_type) { double('parent type', object: presented_object) }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:query_context) { OpenStruct.new(schema: schema) }
- let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema, context: query_context), values: { current_user: current_user }, object: nil) }
+ let_it_be(:current_user) { build(:user) }
+ let_it_be(:presented_object) { 'presented object' }
+ let_it_be(:query_type) { GraphQL::ObjectType.new }
+ let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
+ let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
+ let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: { current_user: current_user }, object: nil) }
+
+ let(:type_class) { type_with_field(custom_type, :read_field, presented_object) }
+ let(:type_instance) { type_class.authorized_new(presented_object, context) }
+ let(:field) { type_class.fields['testField'].to_graphql }
- subject(:resolved) { service.authorized_resolve.call(presented_type, {}, context) }
+ subject(:resolved) { service.authorized_resolve.call(type_instance, {}, context) }
context 'scalar types' do
shared_examples 'checking permissions on the presented object' do
@@ -48,7 +53,7 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
expect(resolved).to eq('Resolved value')
end
- it "returns nil if the value wasn't authorized" do
+ it 'returns nil if the value was not authorized' do
allow(Ability).to receive(:allowed?).and_return false
expect(resolved).to be_nil
@@ -56,28 +61,28 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
end
context 'when the field is a built-in scalar type' do
- let(:field) { type_with_field(GraphQL::STRING_TYPE, :read_field).fields['testField'].to_graphql }
+ let(:type_class) { type_with_field(GraphQL::STRING_TYPE, :read_field) }
let(:expected_permissions) { [:read_field] }
it_behaves_like 'checking permissions on the presented object'
end
context 'when the field is a list of scalar types' do
- let(:field) { type_with_field([GraphQL::STRING_TYPE], :read_field).fields['testField'].to_graphql }
+ let(:type_class) { type_with_field([GraphQL::STRING_TYPE], :read_field) }
let(:expected_permissions) { [:read_field] }
it_behaves_like 'checking permissions on the presented object'
end
context 'when the field is sub-classed scalar type' do
- let(:field) { type_with_field(Types::TimeType, :read_field).fields['testField'].to_graphql }
+ let(:type_class) { type_with_field(Types::TimeType, :read_field) }
let(:expected_permissions) { [:read_field] }
it_behaves_like 'checking permissions on the presented object'
end
context 'when the field is a list of sub-classed scalar types' do
- let(:field) { type_with_field([Types::TimeType], :read_field).fields['testField'].to_graphql }
+ let(:type_class) { type_with_field([Types::TimeType], :read_field) }
let(:expected_permissions) { [:read_field] }
it_behaves_like 'checking permissions on the presented object'
@@ -86,7 +91,7 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
context 'when the field is a connection' do
context 'when it resolves to nil' do
- let(:field) { type_with_field(Types::QueryType.connection_type, :read_field, nil).fields['testField'].to_graphql }
+ let(:type_class) { type_with_field(Types::QueryType.connection_type, :read_field, nil) }
it 'does not fail when authorizing' do
expect(resolved).to be_nil
@@ -97,7 +102,11 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
context 'when the field is a specific type' do
let(:custom_type) { type(:read_type) }
let(:object_in_field) { double('presented in field') }
- let(:field) { type_with_field(custom_type, :read_field, object_in_field).fields['testField'].to_graphql }
+
+ let(:type_class) { type_with_field(custom_type, :read_field, object_in_field) }
+ let(:type_instance) { type_class.authorized_new(object_in_field, context) }
+
+ subject(:resolved) { service.authorized_resolve.call(type_instance, {}, context) }
it 'checks both field & type permissions' do
spy_ability_check_for(:read_field, object_in_field, passed: true)
@@ -114,7 +123,7 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
end
context 'when the field is not nullable' do
- let(:field) { type_with_field(custom_type, [], object_in_field, null: false).fields['testField'].to_graphql }
+ let(:type_class) { type_with_field(custom_type, :read_field, object_in_field, null: false) }
it 'returns nil when viewing is not allowed' do
spy_ability_check_for(:read_type, object_in_field, passed: false)
@@ -127,7 +136,9 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
let(:object_1) { double('presented in field 1') }
let(:object_2) { double('presented in field 2') }
let(:presented_types) { [double(object: object_1), double(object: object_2)] }
- let(:field) { type_with_field([custom_type], :read_field, presented_types).fields['testField'].to_graphql }
+
+ let(:type_class) { type_with_field([custom_type], :read_field, presented_types) }
+ let(:type_instance) { type_class.authorized_new(presented_types, context) }
it 'checks all permissions' do
allow(Ability).to receive(:allowed?) { true }
diff --git a/spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb b/spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb
deleted file mode 100644
index af604e1c7d5..00000000000
--- a/spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::MarkdownField::Resolver do
- include Gitlab::Routing
- let(:resolver) { described_class.new(:note) }
-
- describe '#proc' do
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
- let(:note) do
- create(:note,
- note: "Referencing #{issue.to_reference(full: true)}")
- end
-
- it 'renders markdown correctly' do
- expect(resolver.proc.call(note, {}, {})).to include(issue_path(issue))
- end
-
- context 'when the issue is not publicly accessible' do
- let(:project) { create(:project, :private) }
-
- it 'hides the references from users that are not allowed to see the reference' do
- expect(resolver.proc.call(note, {}, {})).not_to include(issue_path(issue))
- end
-
- it 'shows the reference to users that are allowed to see it' do
- expect(resolver.proc.call(note, {}, { current_user: project.owner }))
- .to include(issue_path(issue))
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index e3da925376e..82090f992eb 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Graphql::MarkdownField do
+ include Gitlab::Routing
+
describe '.markdown_field' do
it 'creates the field with some default attributes' do
field = class_with_markdown_field(:test_html, null: true, method: :hello).fields['testHtml']
@@ -13,7 +15,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
context 'developer warnings' do
- let(:expected_error) { /Only `method` is allowed to specify the markdown field/ }
+ let_it_be(:expected_error) { /Only `method` is allowed to specify the markdown field/ }
it 'raises when passing a resolver' do
expect { class_with_markdown_field(:test_html, null: true, resolver: 'not really') }
@@ -27,30 +29,61 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
context 'resolving markdown' do
- let(:note) { build(:note, note: '# Markdown!') }
- let(:thing_with_markdown) { double('markdown thing', object: note) }
- let(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
+ let_it_be(:note) { build(:note, note: '# Markdown!') }
+ let_it_be(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
+ let_it_be(:query_type) { GraphQL::ObjectType.new }
+ let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
+ let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
+ let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: {}, object: nil) }
+
+ let(:type_class) { class_with_markdown_field(:note_html, null: false) }
+ let(:type_instance) { type_class.authorized_new(note, context) }
+ let(:field) { type_class.fields['noteHtml'] }
it 'renders markdown from the same property as the field name without the `_html` suffix' do
- field = class_with_markdown_field(:note_html, null: false).fields['noteHtml']
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ end
+
+ context 'when a `method` argument is passed' do
+ let(:type_class) { class_with_markdown_field(:test_html, null: false, method: :note) }
+ let(:field) { type_class.fields['testHtml'] }
- expect(field.to_graphql.resolve(thing_with_markdown, {}, context)).to eq(expected_markdown)
+ it 'renders markdown from a specific property' do
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ end
end
- it 'renders markdown from a specific property when a `method` argument is passed' do
- field = class_with_markdown_field(:test_html, null: false, method: :note).fields['testHtml']
+ describe 'basic verification that references work' do
+ let_it_be(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project) }
+ let(:note) { build(:note, note: "Referencing #{issue.to_reference(full: true)}") }
+
+ it 'renders markdown correctly' do
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ end
+
+ context 'when the issue is not publicly accessible' do
+ let_it_be(:project) { create(:project, :private) }
+
+ it 'hides the references from users that are not allowed to see the reference' do
+ expect(field.to_graphql.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
+ end
+
+ it 'shows the reference to users that are allowed to see it' do
+ context = GraphQL::Query::Context.new(query: query, values: { current_user: project.owner }, object: nil)
+ type_instance = type_class.authorized_new(note, context)
- expect(field.to_graphql.resolve(thing_with_markdown, {}, context)).to eq(expected_markdown)
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ end
+ end
end
end
end
def class_with_markdown_field(name, **args)
- Class.new(GraphQL::Schema::Object) do
+ Class.new(Types::BaseObject) do
prepend Gitlab::Graphql::MarkdownField
+ graphql_name 'MarkdownFieldTest'
markdown_field name, **args
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
new file mode 100644
index 00000000000..b45bb8b79d9
--- /dev/null
+++ b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::Pagination::Keyset::LastItems do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let(:scope) { MergeRequest.order_merged_at_asc.with_order_id_desc }
+
+ subject { described_class.take_items(*args) }
+
+ context 'when the `count` parameter is nil' do
+ let(:args) { [scope, nil] }
+
+ it 'returns a single record' do
+ expect(subject).to eq(merge_request)
+ end
+ end
+
+ context 'when the `count` parameter is given' do
+ let(:args) { [scope, 1] }
+
+ it 'returns an array' do
+ expect(subject).to eq([merge_request])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
index 444c10074a0..eb28e6c8c0a 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
@@ -63,6 +63,29 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do
expect(order_list.first.sort_direction).to eq :desc
end
end
+
+ context 'when ordering by CASE', :aggregate_failuers do
+ let(:relation) { Project.order(Arel::Nodes::Case.new(Project.arel_table[:pending_delete]).when(true).then(100).else(1000).asc) }
+
+ it 'assigns the right attribute name, named function, and direction' do
+ expect(order_list.count).to eq 1
+ expect(order_list.first.attribute_name).to eq 'case_order_value'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::Case)
+ expect(order_list.first.sort_direction).to eq :asc
+ end
+ end
+
+ context 'when ordering by ARRAY_POSITION', :aggregate_failuers do
+ let(:array_position) { Arel::Nodes::NamedFunction.new('ARRAY_POSITION', [Arel.sql("ARRAY[1,0]::smallint[]"), Project.arel_table[:auto_cancel_pending_pipelines]]) }
+ let(:relation) { Project.order(array_position.asc) }
+
+ it 'assigns the right attribute name, named function, and direction' do
+ expect(order_list.count).to eq 1
+ expect(order_list.first.attribute_name).to eq 'array_position'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::NamedFunction)
+ expect(order_list.first.sort_direction).to eq :asc
+ end
+ end
end
describe '#validate_ordering' do
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
index c7e7db4d535..fa631aa5666 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
@@ -136,11 +136,12 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do
let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
let(:arel_table) { Project.arel_table }
let(:decoded_cursor) { { 'similarity' => 0.5, 'id' => 100 } }
+ let(:similarity_function_call) { Gitlab::Database::SimilarityScore::SIMILARITY_FUNCTION_CALL_WITH_ANNOTATION }
let(:similarity_sql) do
[
- '(SIMILARITY(COALESCE("projects"."path", \'\'), \'test\') * CAST(\'1\' AS numeric))',
- '(SIMILARITY(COALESCE("projects"."name", \'\'), \'test\') * CAST(\'0.7\' AS numeric))',
- '(SIMILARITY(COALESCE("projects"."description", \'\'), \'test\') * CAST(\'0.2\' AS numeric))'
+ "(#{similarity_function_call}(COALESCE(\"projects\".\"path\", ''), 'test') * CAST('1' AS numeric))",
+ "(#{similarity_function_call}(COALESCE(\"projects\".\"name\", ''), 'test') * CAST('0.7' AS numeric))",
+ "(#{similarity_function_call}(COALESCE(\"projects\".\"description\", ''), 'test') * CAST('0.2' AS numeric))"
].join(' + ')
end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
index 89d2ab8bb87..c8432513185 100644
--- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
+++ b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
end
it 'returns a duration in seconds' do
- allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2])
+ allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::GraphqlLogger).to receive(:info)
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 045c922783a..009f66d2108 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -17,10 +17,17 @@ RSpec.describe Gitlab::GroupSearchResults do
describe 'issues search' do
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
+ let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
+
let(:query) { 'foo' }
let(:scope) { 'issues' }
+ before do
+ project.add_developer(user)
+ end
+
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by confidential'
end
describe 'merge_requests search' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 3126d87a0d6..5ee7fb2adbf 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -30,6 +30,7 @@ issues:
- metrics
- timelogs
- issuable_severity
+- issuable_sla
- issue_assignees
- closed_by
- epic_issue
@@ -51,6 +52,7 @@ issues:
- status_page_published_incident
- namespace
- note_authors
+- issue_email_participants
events:
- author
- project
@@ -158,6 +160,8 @@ merge_requests:
- assignees
- reviews
- approval_rules
+- approval_merge_request_rule_sources
+- approval_project_rules
- approvals
- approvers
- approver_users
@@ -242,6 +246,7 @@ ci_pipelines:
- latest_builds_report_results
- messages
- pipeline_artifacts
+- latest_statuses
ci_refs:
- project
- ci_pipelines
@@ -300,6 +305,7 @@ protected_branches:
- push_access_levels
- unprotect_access_levels
- approval_project_rules
+- required_code_owners_sections
protected_tags:
- project
- create_access_levels
@@ -408,6 +414,7 @@ project:
- stages
- ci_refs
- builds
+- processables
- runner_projects
- runners
- variables
@@ -465,6 +472,8 @@ project:
- feature_usage
- approval_rules
- approval_merge_request_rules
+- approval_merge_request_rule_sources
+- approval_project_rules
- approvers
- approver_users
- audit_events
@@ -536,6 +545,8 @@ project:
- vulnerability_historical_statistics
- product_analytics_events
- pipeline_artifacts
+- terraform_states
+- alert_management_http_integrations
award_emoji:
- awardable
- user
@@ -703,3 +714,5 @@ system_note_metadata:
- description_version
status_page_published_incident:
- issue
+issuable_sla:
+ - issue
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 93b6f93f0ec..d084b9d7f7e 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -10,14 +10,17 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
# all items are properly serialized while traversing the simple hash.
subject { Gitlab::Json.parse(Gitlab::Json.generate(described_class.new(project, tree).execute)) }
- let!(:project) { setup_project }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { setup_project }
let(:shared) { project.import_export_shared }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
let(:tree) { reader.project_tree }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+
+ before do
allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
end
@@ -224,7 +227,6 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
group: group,
approvals_before_merge: 1
)
- allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
issue = create(:issue, assignees: [user], project: project)
snippet = create(:project_snippet, project: project)
diff --git a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
index eb9a3fa9bd8..6b2f80cc80a 100644
--- a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
@@ -5,16 +5,19 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
let(:group) { create(:group) }
let(:members_mapper) { double('members_mapper').as_null_object }
- let(:user) { create(:admin) }
+ let(:admin) { create(:admin) }
+ let(:importer_user) { admin }
let(:excluded_keys) { [] }
let(:created_object) do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- members_mapper: members_mapper,
- object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
- user: user,
- importable: group,
- excluded_keys: excluded_keys)
+ described_class.create(
+ relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ members_mapper: members_mapper,
+ object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
+ user: importer_user,
+ importable: group,
+ excluded_keys: excluded_keys
+ )
end
context 'label object' do
@@ -24,18 +27,18 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
let(:relation_hash) do
{
- 'id' => 123456,
- 'title' => 'Bruffefunc',
- 'color' => '#1d2da4',
- 'project_id' => nil,
- 'created_at' => '2019-11-20T17:02:20.546Z',
- 'updated_at' => '2019-11-20T17:02:20.546Z',
- 'template' => false,
+ 'id' => 123456,
+ 'title' => 'Bruffefunc',
+ 'color' => '#1d2da4',
+ 'project_id' => nil,
+ 'created_at' => '2019-11-20T17:02:20.546Z',
+ 'updated_at' => '2019-11-20T17:02:20.546Z',
+ 'template' => false,
'description' => 'Description',
- 'group_id' => original_group_id,
- 'type' => 'GroupLabel',
- 'priorities' => [],
- 'textColor' => '#FFFFFF'
+ 'group_id' => original_group_id,
+ 'type' => 'GroupLabel',
+ 'priorities' => [],
+ 'textColor' => '#FFFFFF'
}
end
@@ -60,58 +63,28 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
end
end
- context 'Notes user references' do
- let(:relation_sym) { :notes }
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- 'id' => 111,
- 'access_level' => 30,
- 'source_id' => 1,
- 'source_type' => 'Namespace',
- 'user_id' => 3,
- 'notification_level' => 3,
- 'created_at' => '2016-11-18T09:29:42.634Z',
- 'updated_at' => '2016-11-18T09:29:42.634Z',
- 'user' => {
- 'id' => 999,
- 'email' => new_user.email,
- 'username' => new_user.username
- }
- }
- end
-
+ it_behaves_like 'Notes user references' do
+ let(:importable) { group }
let(:relation_hash) do
{
- 'id' => 4947,
- 'note' => 'note',
+ 'id' => 4947,
+ 'note' => 'note',
'noteable_type' => 'Epic',
- 'author_id' => 999,
- 'created_at' => '2016-11-18T09:29:42.634Z',
- 'updated_at' => '2016-11-18T09:29:42.634Z',
- 'project_id' => 1,
- 'attachment' => {
+ 'author_id' => 999,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'project_id' => 1,
+ 'attachment' => {
'url' => nil
},
- 'noteable_id' => 377,
- 'system' => true,
- 'author' => {
+ 'noteable_id' => 377,
+ 'system' => true,
+ 'author' => {
'name' => 'Administrator'
},
'events' => []
}
end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: group)
- end
-
- it 'maps the right author to the imported note' do
- expect(created_object.author).to eq(new_user)
- end
end
def random_id
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 9737a0f39fc..7a9e7d8afba 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe 'Test coverage of the Project Import' do
project.issues.notes.events
project.issues.notes.events.push_event_payload
project.issues.milestone.events.push_event_payload
+ project.issues.issuable_sla
project.issues.issue_milestones
project.issues.issue_milestones.milestone
project.issues.resource_label_events.label.priorities
diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
index a347d835428..e208a1c383c 100644
--- a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
+++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
@@ -102,4 +102,14 @@ RSpec.describe Gitlab::ImportExport::JSON::NdjsonReader do
end
end
end
+
+ describe '#clear_consumed_relations' do
+ let(:dir_path) { fixture }
+
+ subject { ndjson_reader.clear_consumed_relations }
+
+ it 'returns empty set' do
+ expect(subject).to be_empty
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/lfs_saver_spec.rb b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
index db76eb9538b..55b4f7479b8 100644
--- a/spec/lib/gitlab/import_export/lfs_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
@@ -74,14 +74,6 @@ RSpec.describe Gitlab::ImportExport::LfsSaver do
}
)
end
-
- it 'does not save a json file if feature is disabled' do
- stub_feature_flags(export_lfs_objects_projects: false)
-
- saver.save
-
- expect(File.exist?(lfs_json_file)).to eq(false)
- end
end
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 31cf2362628..50bc6a30044 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -3,19 +3,22 @@
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
- let(:group) { create(:group) }
+ let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
- let(:user) { create(:admin) }
+ let(:admin) { create(:admin) }
+ let(:importer_user) { admin }
let(:excluded_keys) { [] }
let(:created_object) do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
- members_mapper: members_mapper,
- user: user,
- importable: project,
- excluded_keys: excluded_keys)
+ described_class.create(
+ relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
+ members_mapper: members_mapper,
+ user: importer_user,
+ importable: project,
+ excluded_keys: excluded_keys
+ )
end
before do
@@ -113,9 +116,9 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
"created_at" => "2016-11-18T09:29:42.634Z",
"updated_at" => "2016-11-18T09:29:42.634Z",
"user" => {
- "id" => user.id,
- "email" => user.email,
- "username" => user.username
+ "id" => admin.id,
+ "email" => admin.email,
+ "username" => admin.username
}
}
end
@@ -123,7 +126,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
let(:members_mapper) do
Gitlab::ImportExport::MembersMapper.new(
exported_members: [exported_member],
- user: user,
+ user: importer_user,
importable: project)
end
@@ -134,9 +137,9 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
'source_branch' => "feature_conflict",
'source_project_id' => project.id,
'target_project_id' => project.id,
- 'author_id' => user.id,
- 'assignee_id' => user.id,
- 'updated_by_id' => user.id,
+ 'author_id' => admin.id,
+ 'assignee_id' => admin.id,
+ 'updated_by_id' => admin.id,
'title' => "MR1",
'created_at' => "2016-06-14T15:02:36.568Z",
'updated_at' => "2016-06-14T15:02:56.815Z",
@@ -151,11 +154,11 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
end
it 'has preloaded author' do
- expect(created_object.author).to equal(user)
+ expect(created_object.author).to equal(admin)
end
it 'has preloaded updated_by' do
- expect(created_object.updated_by).to equal(user)
+ expect(created_object.updated_by).to equal(admin)
end
it 'has preloaded source project' do
@@ -264,27 +267,8 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
end
end
- context 'Notes user references' do
- let(:relation_sym) { :notes }
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- "id" => 111,
- "access_level" => 30,
- "source_id" => 1,
- "source_type" => "Project",
- "user_id" => 3,
- "notification_level" => 3,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "user" => {
- "id" => 999,
- "email" => new_user.email,
- "username" => new_user.username
- }
- }
- end
-
+ it_behaves_like 'Notes user references' do
+ let(:importable) { project }
let(:relation_hash) do
{
"id" => 4947,
@@ -305,17 +289,6 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
"events" => []
}
end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: project)
- end
-
- it 'maps the right author to the imported note' do
- expect(created_object.author).to eq(new_user)
- end
end
context 'encrypted attributes' do
diff --git a/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb b/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb
new file mode 100644
index 00000000000..82f59245519
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/sample/date_calculator_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Project::Sample::DateCalculator do
+ describe '#closest date to average' do
+ subject { described_class.new(dates).closest_date_to_average }
+
+ context 'when dates are empty' do
+ let(:dates) { [] }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when dates are not empty' do
+ let(:dates) { [[nil, '2020-01-01 00:00:00 +0000'], [nil, '2021-01-01 00:00:00 +0000'], [nil, '2022-01-01 23:59:59 +0000']] }
+
+ it { is_expected.to eq(Time.zone.parse('2021-01-01 00:00:00 +0000')) }
+ end
+ end
+
+ describe '#calculate_by_closest_date_to_average' do
+ let(:calculator) { described_class.new([]) }
+ let(:date) { Time.current }
+
+ subject { calculator.calculate_by_closest_date_to_average(date) }
+
+ context 'when average date is nil' do
+ before do
+ allow(calculator).to receive(:closest_date_to_average).and_return(nil)
+ end
+
+ it { is_expected.to eq(date) }
+ end
+
+ context 'when average date is in the past' do
+ before do
+ allow(calculator).to receive(:closest_date_to_average).and_return(date - 365.days)
+ allow(Time).to receive(:current).and_return(date)
+ end
+
+ it { is_expected.to eq(date + 365.days) }
+ end
+
+ context 'when average date is in the future' do
+ before do
+ allow(calculator).to receive(:closest_date_to_average).and_return(date + 10.days)
+ end
+
+ it { is_expected.to eq(date) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb
new file mode 100644
index 00000000000..f173345a4c6
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/sample/sample_data_relation_tree_restorer_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+# This spec is a lightweight version of:
+# * project/tree_restorer_spec.rb
+#
+# In depth testing is being done in the above specs.
+# This spec tests that restore of the sample project works
+# but does not have 100% relation coverage.
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Project::Sample::SampleDataRelationTreeRestorer do
+ include_context 'relation tree restorer shared context'
+
+ let(:sample_data_relation_tree_restorer) do
+ described_class.new(
+ user: user,
+ shared: shared,
+ relation_reader: relation_reader,
+ object_builder: object_builder,
+ members_mapper: members_mapper,
+ relation_factory: relation_factory,
+ reader: reader,
+ importable: importable,
+ importable_path: importable_path,
+ importable_attributes: attributes
+ )
+ end
+
+ subject { sample_data_relation_tree_restorer.restore }
+
+ shared_examples 'import project successfully' do
+ it 'restores project tree' do
+ expect(subject).to eq(true)
+ end
+
+ describe 'imported project' do
+ let(:project) { Project.find_by_path('project') }
+
+ before do
+ subject
+ end
+
+ it 'has the project attributes and relations', :aggregate_failures do
+ expect(project.description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.')
+ expect(project.issues.count).to eq(10)
+ expect(project.milestones.count).to eq(3)
+ expect(project.labels.count).to eq(2)
+ expect(project.project_feature).not_to be_nil
+ end
+
+ it 'has issues with correctly updated due dates' do
+ due_dates = due_dates(project.issues)
+
+ expect(due_dates).to match_array([Date.today - 7.days, Date.today, Date.today + 7.days])
+ end
+
+ it 'has milestones with correctly updated due dates' do
+ due_dates = due_dates(project.milestones)
+
+ expect(due_dates).to match_array([Date.today - 7.days, Date.today, Date.today + 7.days])
+ end
+
+ def due_dates(relations)
+ due_dates = relations.map { |relation| relation['due_date'] }
+ due_dates.compact!
+ due_dates.sort
+ end
+ end
+ end
+
+ context 'when restoring a project' do
+ let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:importable_name) { 'project' }
+ let(:importable_path) { 'project' }
+ let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder }
+ let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory }
+ let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
+
+ context 'using ndjson reader' do
+ let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' }
+ let(:relation_reader) { Gitlab::ImportExport::JSON::NdjsonReader.new(path) }
+
+ it_behaves_like 'import project successfully'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index f75494aa7c7..c05968c9a85 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -1040,6 +1040,41 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
it_behaves_like 'project tree restorer work properly', :legacy_reader, true
it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
+
+ context 'Sample Data JSON' do
+ let(:user) { create(:user) }
+ let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
+ let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
+
+ before do
+ setup_import_export_config('sample_data')
+ setup_reader(:ndjson_reader)
+ end
+
+ context 'with sample_data_template' do
+ before do
+ allow(project).to receive_message_chain(:import_data, :data, :dig).with('sample_data') { true }
+ end
+
+ it 'initialize SampleDataRelationTreeRestorer' do
+ expect_next_instance_of(Gitlab::ImportExport::Project::Sample::SampleDataRelationTreeRestorer) do |restorer|
+ expect(restorer).to receive(:restore).and_return(true)
+ end
+
+ expect(project_tree_restorer.restore).to eq(true)
+ end
+ end
+
+ context 'without sample_data_template' do
+ it 'initialize RelationTreeRestorer' do
+ expect_next_instance_of(Gitlab::ImportExport::RelationTreeRestorer) do |restorer|
+ expect(restorer).to receive(:restore).and_return(true)
+ end
+
+ expect(project_tree_restorer.restore).to eq(true)
+ end
+ end
+ end
end
context 'disable ndjson import' do
diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
index ddc96b83208..bd9ac6d6697 100644
--- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb
@@ -10,15 +10,7 @@
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do
- include ImportExport::CommonUtil
-
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(importable) }
- let(:attributes) { relation_reader.consume_attributes(importable_name) }
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(exported_members: {}, user: user, importable: importable)
- end
+ include_context 'relation tree restorer shared context'
let(:relation_tree_restorer) do
described_class.new(
diff --git a/spec/lib/gitlab/import_export/repo_restorer_spec.rb b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
index ace4449042e..b32ae60fbcc 100644
--- a/spec/lib/gitlab/import_export/repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_restorer_spec.rb
@@ -36,21 +36,20 @@ RSpec.describe Gitlab::ImportExport::RepoRestorer do
expect(subject.restore).to be_truthy
end
- context 'when the repository creation fails' do
- before do
- allow_next_instance_of(Repositories::DestroyService) do |instance|
+ context 'when the repository already exists' do
+ it 'deletes the existing repository before importing' do
+ allow(project.repository).to receive(:exists?).and_return(true)
+ allow(project.repository).to receive(:path).and_return('repository_path')
+
+ expect_next_instance_of(Repositories::DestroyService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
- end
-
- it 'logs the error' do
- allow(project.repository)
- .to receive(:create_from_bundle)
- .and_raise('9:CreateRepositoryFromBundle: target directory is non-empty')
- expect(shared).to receive(:error).and_call_original
+ expect(shared.logger).to receive(:info).with(
+ message: 'Deleting existing "repository_path" to re-import it.'
+ )
- expect(subject.restore).to be_falsey
+ expect(subject.restore).to be_truthy
end
end
end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 5ca7c5b7a91..e3d1f2c9368 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -855,3 +855,6 @@ ProjectSecuritySetting:
- auto_fix_sast
- created_at
- updated_at
+IssuableSla:
+ - issue_id
+ - due_at
diff --git a/spec/lib/gitlab/issuables_count_for_state_spec.rb b/spec/lib/gitlab/issuables_count_for_state_spec.rb
index d96152e47ea..a6170c146ab 100644
--- a/spec/lib/gitlab/issuables_count_for_state_spec.rb
+++ b/spec/lib/gitlab/issuables_count_for_state_spec.rb
@@ -4,14 +4,15 @@ require 'spec_helper'
RSpec.describe Gitlab::IssuablesCountForState do
let(:finder) do
- double(:finder, count_by_state: { opened: 2, closed: 1 })
+ double(:finder, current_user: nil, params: {}, count_by_state: { opened: 2, closed: 1 })
end
- let(:counter) { described_class.new(finder) }
+ let(:project) { nil }
+ let(:fast_fail) { nil }
+ let(:counter) { described_class.new(finder, project, fast_fail: fast_fail) }
describe 'project given' do
let(:project) { build(:project) }
- let(:counter) { described_class.new(finder, project) }
it 'provides the project' do
expect(counter.project).to eq(project)
@@ -50,5 +51,19 @@ RSpec.describe Gitlab::IssuablesCountForState do
it 'returns 0 when using an invalid state name as a String' do
expect(counter['kittens']).to be_zero
end
+
+ context 'fast_fail enabled' do
+ let(:fast_fail) { true }
+
+ it 'returns the expected value' do
+ expect(counter[:closed]).to eq(1)
+ end
+
+ it 'returns -1 when the database times out' do
+ expect(finder).to receive(:count_by_state).and_raise(ActiveRecord::QueryCanceled)
+
+ expect(counter[:closed]).to eq(-1)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index 7aa0a3485fb..a9edb2b530b 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -2,23 +2,26 @@
require 'spec_helper'
-RSpec.describe Gitlab::JobWaiter do
+RSpec.describe Gitlab::JobWaiter, :redis do
describe '.notify' do
it 'pushes the jid to the named queue' do
- key = 'gitlab:job_waiter:foo'
- jid = 1
+ key = described_class.new.key
- redis = double('redis')
- expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
- expect(redis).to receive(:lpush).with(key, jid)
+ described_class.notify(key, 123)
- described_class.notify(key, jid)
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(key)).to be > 0
+ end
end
end
describe '#wait' do
let(:waiter) { described_class.new(2) }
+ before do
+ allow_any_instance_of(described_class).to receive(:wait).and_call_original
+ end
+
it 'returns when all jobs have been completed' do
described_class.notify(waiter.key, 'a')
described_class.notify(waiter.key, 'b')
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index 90c11f29855..7b6d143dda9 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
let(:api_url) { 'https://kubernetes.example.com/prefix' }
let(:kubeclient_options) { { auth_options: { bearer_token: 'xyz' } } }
- let(:client) { described_class.new(api_url, kubeclient_options) }
+ let(:client) { described_class.new(api_url, **kubeclient_options) }
before do
stub_kubeclient_discover(api_url)
@@ -133,7 +133,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
end
it 'falls back to default options, but allows overriding' do
- client = Gitlab::Kubernetes::KubeClient.new(api_url, {})
+ client = described_class.new(api_url)
defaults = Gitlab::Kubernetes::KubeClient::DEFAULT_KUBECLIENT_OPTIONS
expect(client.kubeclient_options[:timeouts]).to eq(defaults[:timeouts])
@@ -347,6 +347,34 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe '#get_ingresses' do
+ let(:extensions_client) { client.extensions_client }
+ let(:networking_client) { client.networking_client }
+
+ include_examples 'redirection not allowed', 'get_ingresses'
+ include_examples 'dns rebinding not allowed', 'get_ingresses'
+
+ it 'delegates to the extensions client' do
+ expect(extensions_client).to receive(:get_ingresses)
+
+ client.get_ingresses
+ end
+
+ context 'extensions does not have deployments for Kubernetes 1.22+ clusters' do
+ before do
+ WebMock
+ .stub_request(:get, api_url + '/apis/extensions/v1beta1')
+ .to_return(kube_response(kube_1_22_extensions_v1beta1_discovery_body))
+ end
+
+ it 'delegates to the apps client' do
+ expect(networking_client).to receive(:get_ingresses)
+
+ client.get_ingresses
+ end
+ end
+ end
+
describe 'istio API group' do
let(:istio_client) { client.istio_client }
diff --git a/spec/lib/gitlab/lfs/client_spec.rb b/spec/lib/gitlab/lfs/client_spec.rb
index 03563a632d6..1c50a2a7500 100644
--- a/spec/lib/gitlab/lfs/client_spec.rb
+++ b/spec/lib/gitlab/lfs/client_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Gitlab::Lfs::Client do
let(:username) { 'user' }
let(:password) { 'password' }
let(:credentials) { { user: username, password: password, auth_method: 'password' } }
+ let(:git_lfs_content_type) { 'application/vnd.git-lfs+json' }
+ let(:git_lfs_user_agent) { "GitLab #{Gitlab::VERSION} LFS client" }
let(:basic_auth_headers) do
{ 'Authorization' => "Basic #{Base64.strict_encode64("#{username}:#{password}")}" }
@@ -21,6 +23,18 @@ RSpec.describe Gitlab::Lfs::Client do
}
end
+ let(:verify_action) do
+ {
+ "href" => "#{base_url}/some/file/verify",
+ "header" => {
+ "Key" => "value"
+ }
+ }
+ end
+
+ let(:authorized_upload_action) { upload_action.tap { |action| action['header']['Authorization'] = 'foo' } }
+ let(:authorized_verify_action) { verify_action.tap { |action| action['header']['Authorization'] = 'foo' } }
+
subject(:lfs_client) { described_class.new(base_url, credentials: credentials) }
describe '#batch' do
@@ -34,10 +48,10 @@ RSpec.describe Gitlab::Lfs::Client do
).to_return(
status: 200,
body: { 'objects' => 'anything', 'transfer' => 'basic' }.to_json,
- headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
+ headers: { 'Content-Type' => git_lfs_content_type }
)
- result = lfs_client.batch('upload', objects)
+ result = lfs_client.batch!('upload', objects)
expect(stub).to have_been_requested
expect(result).to eq('objects' => 'anything', 'transfer' => 'basic')
@@ -48,7 +62,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
- expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
+ expect { lfs_client.batch!('upload', objects) }.to raise_error(/Failed/)
end
end
@@ -56,7 +70,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
- expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
+ expect { lfs_client.batch!('upload', objects) }.to raise_error(/Failed/)
end
end
@@ -68,17 +82,23 @@ RSpec.describe Gitlab::Lfs::Client do
).to_return(
status: 200,
body: { 'transfer' => 'carrier-pigeon' }.to_json,
- headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
+ headers: { 'Content-Type' => git_lfs_content_type }
)
- expect { lfs_client.batch('upload', objects) }.to raise_error(/Unsupported transfer/)
+ expect { lfs_client.batch!('upload', objects) }.to raise_error(/Unsupported transfer/)
end
end
def stub_batch(objects:, headers:, operation: 'upload', transfer: 'basic')
- objects = objects.map { |o| { oid: o.oid, size: o.size } }
+ objects = objects.as_json(only: [:oid, :size])
body = { operation: operation, 'transfers': [transfer], objects: objects }.to_json
+ headers = {
+ 'Accept' => git_lfs_content_type,
+ 'Content-Type' => git_lfs_content_type,
+ 'User-Agent' => git_lfs_user_agent
+ }.merge(headers)
+
stub_request(:post, base_url + '/info/lfs/objects/batch').with(body: body, headers: headers)
end
end
@@ -90,7 +110,7 @@ RSpec.describe Gitlab::Lfs::Client do
it "makes an HTTP PUT with expected parameters" do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 200)
- lfs_client.upload(object, upload_action, authenticated: true)
+ lfs_client.upload!(object, upload_action, authenticated: true)
end
end
@@ -101,7 +121,20 @@ RSpec.describe Gitlab::Lfs::Client do
headers: basic_auth_headers.merge(upload_action['header'])
).to_return(status: 200)
- lfs_client.upload(object, upload_action, authenticated: false)
+ lfs_client.upload!(object, upload_action, authenticated: false)
+
+ expect(stub).to have_been_requested
+ end
+ end
+
+ context 'request is not marked as authenticated but includes an authorization header' do
+ it 'prefers the provided authorization header' do
+ stub = stub_upload(
+ object: object,
+ headers: authorized_upload_action['header']
+ ).to_return(status: 200)
+
+ lfs_client.upload!(object, authorized_upload_action, authenticated: false)
expect(stub).to have_been_requested
end
@@ -110,13 +143,13 @@ RSpec.describe Gitlab::Lfs::Client do
context 'LFS object has no file' do
let(:object) { LfsObject.new }
- it 'makes an HJTT PUT with expected parameters' do
+ it 'makes an HTTP PUT with expected parameters' do
stub = stub_upload(
object: object,
headers: upload_action['header']
).to_return(status: 200)
- lfs_client.upload(object, upload_action, authenticated: true)
+ lfs_client.upload!(object, upload_action, authenticated: true)
expect(stub).to have_been_requested
end
@@ -126,7 +159,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 400)
- expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
end
end
@@ -134,15 +167,88 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 500)
- expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
end
end
def stub_upload(object:, headers:)
+ headers = {
+ 'Content-Type' => 'application/octet-stream',
+ 'Content-Length' => object.size.to_s,
+ 'User-Agent' => git_lfs_user_agent
+ }.merge(headers)
+
stub_request(:put, upload_action['href']).with(
body: object.file.read,
headers: headers.merge('Content-Length' => object.size.to_s)
)
end
end
+
+ describe "#verify" do
+ let_it_be(:object) { create(:lfs_object) }
+
+ context 'server returns 200 OK to an authenticated request' do
+ it "makes an HTTP POST with expected parameters" do
+ stub_verify(object: object, headers: verify_action['header']).to_return(status: 200)
+
+ lfs_client.verify!(object, verify_action, authenticated: true)
+ end
+ end
+
+ context 'server returns 200 OK to an unauthenticated request' do
+ it "makes an HTTP POST with expected parameters" do
+ stub = stub_verify(
+ object: object,
+ headers: basic_auth_headers.merge(upload_action['header'])
+ ).to_return(status: 200)
+
+ lfs_client.verify!(object, verify_action, authenticated: false)
+
+ expect(stub).to have_been_requested
+ end
+ end
+
+ context 'request is not marked as authenticated but includes an authorization header' do
+ it 'prefers the provided authorization header' do
+ stub = stub_verify(
+ object: object,
+ headers: authorized_verify_action['header']
+ ).to_return(status: 200)
+
+ lfs_client.verify!(object, authorized_verify_action, authenticated: false)
+
+ expect(stub).to have_been_requested
+ end
+ end
+
+ context 'server returns 400 error' do
+ it 'raises an error' do
+ stub_verify(object: object, headers: verify_action['header']).to_return(status: 400)
+
+ expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
+ end
+ end
+
+ context 'server returns 500 error' do
+ it 'raises an error' do
+ stub_verify(object: object, headers: verify_action['header']).to_return(status: 500)
+
+ expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
+ end
+ end
+
+ def stub_verify(object:, headers:)
+ headers = {
+ 'Accept' => git_lfs_content_type,
+ 'Content-Type' => git_lfs_content_type,
+ 'User-Agent' => git_lfs_user_agent
+ }.merge(headers)
+
+ stub_request(:post, verify_action['href']).with(
+ body: object.to_json(only: [:oid, :size]),
+ headers: headers
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/lfs_token_spec.rb b/spec/lib/gitlab/lfs_token_spec.rb
index 9b8b2c1417a..4b40e8960b2 100644
--- a/spec/lib/gitlab/lfs_token_spec.rb
+++ b/spec/lib/gitlab/lfs_token_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
# Needs to be at least LfsToken::DEFAULT_EXPIRE_TIME + 60 seconds
# in order to check whether it is valid 1 minute after it has expired
- Timecop.freeze(Time.now + described_class::DEFAULT_EXPIRE_TIME + 60) do
+ travel_to(Time.now + described_class::DEFAULT_EXPIRE_TIME + 60) do
expect(lfs_token.token_valid?(expired_token)).to be false
end
end
diff --git a/spec/lib/gitlab/manifest_import/manifest_spec.rb b/spec/lib/gitlab/manifest_import/manifest_spec.rb
index 2e8753b0880..352120c079d 100644
--- a/spec/lib/gitlab/manifest_import/manifest_spec.rb
+++ b/spec/lib/gitlab/manifest_import/manifest_spec.rb
@@ -12,19 +12,7 @@ RSpec.describe Gitlab::ManifestImport::Manifest do
end
context 'missing or invalid attributes' do
- let(:file) { Tempfile.new('foo') }
-
- before do
- content = <<~EOS
- <manifest>
- <remote review="invalid-url" />
- <project name="platform/build"/>
- </manifest>
- EOS
-
- file.write(content)
- file.rewind
- end
+ let(:file) { File.open(Rails.root.join('spec/fixtures/invalid_manifest.xml')) }
it { expect(manifest.valid?).to be false }
diff --git a/spec/lib/gitlab/manifest_import/metadata_spec.rb b/spec/lib/gitlab/manifest_import/metadata_spec.rb
new file mode 100644
index 00000000000..c8158d3e148
--- /dev/null
+++ b/spec/lib/gitlab/manifest_import/metadata_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_state do
+ let(:user) { double(id: 1) }
+ let(:repositories) do
+ [
+ { id: 'test1', url: 'http://demo.host/test1' },
+ { id: 'test2', url: 'http://demo.host/test2' }
+ ]
+ end
+
+ describe '#save' do
+ it 'stores data in Redis with an expiry of EXPIRY_TIME' do
+ status = described_class.new(user)
+ repositories_key = 'manifest_import:metadata:user:1:repositories'
+ group_id_key = 'manifest_import:metadata:user:1:group_id'
+
+ status.save(repositories, 2)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(repositories_key)).to be_within(5).of(described_class::EXPIRY_TIME)
+ expect(redis.ttl(group_id_key)).to be_within(5).of(described_class::EXPIRY_TIME)
+ end
+ end
+ end
+
+ describe '#repositories' do
+ it 'allows repositories to round-trip with symbol keys' do
+ status = described_class.new(user)
+
+ status.save(repositories, 2)
+
+ expect(status.repositories).to eq(repositories)
+ end
+
+ it 'uses the fallback when there is nothing in Redis' do
+ fallback = { manifest_import_repositories: repositories }
+ status = described_class.new(user, fallback: fallback)
+
+ expect(status.repositories).to eq(repositories)
+ end
+ end
+
+ describe '#group_id' do
+ it 'returns the group ID as an integer' do
+ status = described_class.new(user)
+
+ status.save(repositories, 2)
+
+ expect(status.group_id).to eq(2)
+ end
+
+ it 'uses the fallback when there is nothing in Redis' do
+ fallback = { manifest_import_group_id: 3 }
+ status = described_class.new(user, fallback: fallback)
+
+ expect(status.group_id).to eq(3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
index 09d5e048f6a..ff8f5797f9d 100644
--- a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
@@ -8,9 +8,16 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
describe '#execute' do
let(:project) { create(:project) }
let(:dashboard_path) { 'path/to/dashboard.yml' }
+ let(:prometheus_adapter) { double('adapter', clear_prometheus_reactive_cache!: nil) }
subject { described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path) }
+ before do
+ allow_next_instance_of(::Clusters::Applications::ScheduleUpdateService) do |update_service|
+ allow(update_service).to receive(:execute)
+ end
+ end
+
context 'valid dashboard' do
let(:dashboard_hash) { load_sample_dashboard }
@@ -21,20 +28,32 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
end
context 'with existing metrics' do
+ let(:existing_metric_attributes) do
+ {
+ project: project,
+ identifier: 'metric_b',
+ title: 'overwrite',
+ y_label: 'overwrite',
+ query: 'overwrite',
+ unit: 'overwrite',
+ legend: 'overwrite',
+ dashboard_path: dashboard_path
+ }
+ end
+
let!(:existing_metric) do
- create(:prometheus_metric, {
- project: project,
- identifier: 'metric_b',
- title: 'overwrite',
- y_label: 'overwrite',
- query: 'overwrite',
- unit: 'overwrite',
- legend: 'overwrite'
- })
+ create(:prometheus_metric, existing_metric_attributes)
+ end
+
+ let!(:existing_alert) do
+ alert = create(:prometheus_alert, project: project, prometheus_metric: existing_metric)
+ existing_metric.prometheus_alerts << alert
+
+ alert
end
it 'updates existing PrometheusMetrics' do
- described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path).execute
+ subject.execute
expect(existing_metric.reload.attributes.with_indifferent_access).to include({
title: 'Super Chart B',
@@ -49,6 +68,15 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
expect { subject.execute }.to change { PrometheusMetric.count }.by(2)
end
+ it 'updates affected environments' do
+ expect(::Clusters::Applications::ScheduleUpdateService).to receive(:new).with(
+ existing_alert.environment.cluster_prometheus_adapter,
+ project
+ ).and_return(double('ScheduleUpdateService', execute: true))
+
+ subject.execute
+ end
+
context 'with stale metrics' do
let!(:stale_metric) do
create(:prometheus_metric,
@@ -59,11 +87,45 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
)
end
+ let!(:stale_alert) do
+ alert = create(:prometheus_alert, project: project, prometheus_metric: stale_metric)
+ stale_metric.prometheus_alerts << alert
+
+ alert
+ end
+
+ it 'updates existing PrometheusMetrics' do
+ subject.execute
+
+ expect(existing_metric.reload.attributes.with_indifferent_access).to include({
+ title: 'Super Chart B',
+ y_label: 'y_label',
+ query: 'query',
+ unit: 'unit',
+ legend: 'Legend Label'
+ })
+ end
+
it 'deletes stale metrics' do
subject.execute
expect { stale_metric.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
+
+ it 'deletes stale alert' do
+ subject.execute
+
+ expect { stale_alert.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'updates affected environments' do
+ expect(::Clusters::Applications::ScheduleUpdateService).to receive(:new).with(
+ existing_alert.environment.cluster_prometheus_adapter,
+ project
+ ).and_return(double('ScheduleUpdateService', execute: true))
+
+ subject.execute
+ end
end
end
end
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index 69b779d36eb..631325402d9 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
end
it 'increments requests count' do
- expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get')
+ expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 200, feature_category: 'unknown')
subject.call(env)
end
@@ -32,75 +32,55 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
end
it 'measures execution time' do
- expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ status: '200', method: 'get' }, a_positive_execution_time)
+ expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ method: 'get' }, a_positive_execution_time)
Timecop.scale(3600) { subject.call(env) }
end
context 'request is a health check endpoint' do
- it 'increments health endpoint counter' do
- env['PATH_INFO'] = '/-/liveness'
+ ['/-/liveness', '/-/liveness/', '/-/%6D%65%74%72%69%63%73'].each do |path|
+ context "when path is #{path}" do
+ before do
+ env['PATH_INFO'] = path
+ end
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
+ it 'increments health endpoint counter rather than overall counter' do
+ expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: 200)
+ expect(described_class).not_to receive(:http_request_total)
- subject.call(env)
- end
-
- context 'with trailing slash' do
- before do
- env['PATH_INFO'] = '/-/liveness/'
- end
-
- it 'increments health endpoint counter' do
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
-
- subject.call(env)
- end
- end
-
- context 'with percent encoded values' do
- before do
- env['PATH_INFO'] = '/-/%6D%65%74%72%69%63%73' # /-/metrics
- end
+ subject.call(env)
+ end
- it 'increments health endpoint counter' do
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
+ it 'does not record the request duration' do
+ expect(described_class).not_to receive(:http_request_duration_seconds)
- subject.call(env)
+ subject.call(env)
+ end
end
end
end
context 'request is not a health check endpoint' do
- it 'does not increment health endpoint counter' do
- env['PATH_INFO'] = '/-/ordinary-requests'
-
- expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
- end
-
- context 'path info is a root path' do
- before do
- env['PATH_INFO'] = '/-/'
- end
-
- it 'does not increment health endpoint counter' do
- expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
- end
- end
-
- context 'path info is a subpath' do
- before do
- env['PATH_INFO'] = '/-/health/subpath'
- end
-
- it 'does not increment health endpoint counter' do
- expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
+ ['/-/ordinary-requests', '/-/', '/-/health/subpath'].each do |path|
+ context "when path is #{path}" do
+ before do
+ env['PATH_INFO'] = path
+ end
+
+ it 'increments overall counter rather than health endpoint counter' do
+ expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 200, feature_category: 'unknown')
+ expect(described_class).not_to receive(:http_health_requests_total)
+
+ subject.call(env)
+ end
+
+ it 'records the request duration' do
+ expect(described_class)
+ .to receive_message_chain(:http_request_duration_seconds, :observe)
+ .with({ method: 'get' }, a_positive_execution_time)
+
+ subject.call(env)
+ end
end
end
end
@@ -121,7 +101,7 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
end
it 'increments requests count' do
- expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get')
+ expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'unknown')
expect { subject.call(env) }.to raise_error(StandardError)
end
@@ -133,13 +113,32 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
end
end
+ context 'when a feature category header is present' do
+ before do
+ allow(app).to receive(:call).and_return([200, { described_class::FEATURE_CATEGORY_HEADER => 'issue_tracking' }, nil])
+ end
+
+ it 'adds the feature category to the labels for http_request_total' do
+ expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get', status: 200, feature_category: 'issue_tracking')
+
+ subject.call(env)
+ end
+
+ it 'does not record a feature category for health check endpoints' do
+ env['PATH_INFO'] = '/-/liveness'
+
+ expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get', status: 200)
+ expect(described_class).not_to receive(:http_request_total)
+
+ subject.call(env)
+ end
+ end
+
describe '.initialize_http_request_duration_seconds' do
it "sets labels" do
expected_labels = []
- described_class::HTTP_METHODS.each do |method, statuses|
- statuses.each do |status|
- expected_labels << { method: method, status: status.to_s }
- end
+ described_class::HTTP_METHODS.each do |method|
+ expected_labels << { method: method }
end
described_class.initialize_http_request_duration_seconds
diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb
index 1fffef53a82..7bac041cd65 100644
--- a/spec/lib/gitlab/middleware/go_spec.rb
+++ b/spec/lib/gitlab/middleware/go_spec.rb
@@ -135,6 +135,17 @@ RSpec.describe Gitlab::Middleware::Go do
it_behaves_like 'unauthorized'
end
+
+ context 'with a blacklisted ip' do
+ it 'returns forbidden' do
+ expect(Gitlab::Auth).to receive(:find_for_git_client).and_raise(Gitlab::Auth::IpBlacklisted)
+ response = go
+
+ expect(response[0]).to eq(403)
+ expect(response[1]['Content-Length']).to be_nil
+ expect(response[2]).to eq([''])
+ end
+ end
end
end
end
@@ -176,10 +187,11 @@ RSpec.describe Gitlab::Middleware::Go do
it 'returns 404' do
response = go
+
expect(response[0]).to eq(404)
expect(response[1]['Content-Type']).to eq('text/html')
expected_body = %{<html><body>go get #{Gitlab.config.gitlab.url}/#{project.full_path}</body></html>}
- expect(response[2].body).to eq([expected_body])
+ expect(response[2]).to eq([expected_body])
end
end
@@ -251,7 +263,7 @@ RSpec.describe Gitlab::Middleware::Go do
expect(response[0]).to eq(200)
expect(response[1]['Content-Type']).to eq('text/html')
expected_body = %{<html><head><meta name="go-import" content="#{Gitlab.config.gitlab.host}/#{path} git #{repository_url}" /><meta name="go-source" content="#{Gitlab.config.gitlab.host}/#{path} #{project_url} #{project_url}/-/tree/#{branch}{/dir} #{project_url}/-/blob/#{branch}{/dir}/{file}#L{line}" /></head><body>go get #{Gitlab.config.gitlab.url}/#{path}</body></html>}
- expect(response[2].body).to eq([expected_body])
+ expect(response[2]).to eq([expected_body])
end
end
end
diff --git a/spec/lib/gitlab/middleware/handle_null_bytes_spec.rb b/spec/lib/gitlab/middleware/handle_null_bytes_spec.rb
new file mode 100644
index 00000000000..76a5174817e
--- /dev/null
+++ b/spec/lib/gitlab/middleware/handle_null_bytes_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require "rack/test"
+
+RSpec.describe Gitlab::Middleware::HandleNullBytes do
+ let(:null_byte) { "\u0000" }
+ let(:error_400) { [400, {}, ["Bad Request"]] }
+ let(:app) { double(:app) }
+
+ subject { described_class.new(app) }
+
+ before do
+ allow(app).to receive(:call) do |args|
+ args
+ end
+ end
+
+ def env_for(params = {})
+ Rack::MockRequest.env_for('/', { params: params })
+ end
+
+ context 'with null bytes in params' do
+ it 'rejects null bytes in a top level param' do
+ env = env_for(name: "null#{null_byte}byte")
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "responds with 400 BadRequest for hashes with strings" do
+ env = env_for(name: { inner_key: "I am #{null_byte} bad" })
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "responds with 400 BadRequest for arrays with strings" do
+ env = env_for(name: ["I am #{null_byte} bad"])
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "responds with 400 BadRequest for arrays containing hashes with string values" do
+ env = env_for(name: [
+ {
+ inner_key: "I am #{null_byte} bad"
+ }
+ ])
+
+ expect(subject.call(env)).to eq error_400
+ end
+
+ it "gives up and does not 400 with too deeply nested params" do
+ env = env_for(name: [
+ {
+ inner_key: { deeper_key: [{ hash_inside_array_key: "I am #{null_byte} bad" }] }
+ }
+ ])
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+
+ context 'without null bytes in params' do
+ it "does not respond with a 400 for strings" do
+ env = env_for(name: "safe name")
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+
+ it "does not respond with a 400 with no params" do
+ env = env_for
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+
+ context 'when disabled via env flag' do
+ before do
+ stub_env('REJECT_NULL_BYTES', '1')
+ end
+
+ it 'does not respond with a 400 no matter what' do
+ env = env_for(name: "null#{null_byte}byte")
+
+ expect(subject.call(env)).not_to eq error_400
+ end
+ end
+end
diff --git a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
index cdb48024531..a9dae72f4db 100644
--- a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
+++ b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Middleware::RailsQueueDuration do
expect(transaction).to receive(:observe).with(:gitlab_rails_queue_duration_seconds, 1)
- Timecop.freeze(Time.at(3)) do
+ travel_to(Time.at(3)) do
expect(middleware.call(env)).to eq('yay')
end
end
diff --git a/spec/lib/gitlab/middleware/same_site_cookies_spec.rb b/spec/lib/gitlab/middleware/same_site_cookies_spec.rb
index 2d1a9b2eee2..18342fd78ac 100644
--- a/spec/lib/gitlab/middleware/same_site_cookies_spec.rb
+++ b/spec/lib/gitlab/middleware/same_site_cookies_spec.rb
@@ -60,12 +60,12 @@ RSpec.describe Gitlab::Middleware::SameSiteCookies do
end
context 'with no cookies' do
- let(:cookies) { nil }
+ let(:cookies) { "" }
it 'does not add headers' do
response = do_request
- expect(response['Set-Cookie']).to be_nil
+ expect(response['Set-Cookie']).to eq("")
end
end
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
index be20f0194f7..c9a23170137 100644
--- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
let(:request_context) { double("request_context") }
- subject do
+ subject(:paginator) do
described_class.new(request_context)
end
@@ -119,6 +119,34 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
subject.paginate(resource)
end
end
+
+ it 'does not return the total headers when excluding them' do
+ expect_no_header('X-Total')
+ expect_no_header('X-Total-Pages')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+
+ paginator.paginate(resource, exclude_total_headers: true)
+ end
+ end
+
+ context 'when resource is a paginatable array' do
+ let(:resource) { Kaminari.paginate_array(Project.all.to_a) }
+
+ it_behaves_like 'response with pagination headers'
+
+ it 'only returns the requested resources' do
+ expect(paginator.paginate(resource).count).to eq(2)
+ end
+
+ it 'does not return total headers when excluding them' do
+ expect_no_header('X-Total')
+ expect_no_header('X-Total-Pages')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+
+ paginator.paginate(resource, exclude_total_headers: true)
+ end
end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index fe0735b8043..a76ad1f6f4c 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -265,9 +265,15 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:project) { create(:project, :public) }
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
+ let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
let(:query) { 'foo' }
+ before do
+ project.add_developer(user)
+ end
+
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by confidential'
end
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index fa45c605b1b..98bd2efdbc6 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe Gitlab::ProjectTemplate do
expected = %w[
rails spring express iosswift dotnetcore android
gomicro gatsby hugo jekyll plainhtml gitbook
- hexo sse_middleman nfhugo nfjekyll nfplainhtml
- nfgitbook nfhexo salesforcedx serverless_framework
- jsonnet cluster_management
+ hexo sse_middleman gitpod_spring_petclinic nfhugo
+ nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
+ serverless_framework jsonnet cluster_management
]
expect(described_class.all).to be_an(Array)
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
index 8abc944eeb1..b2350eff9f9 100644
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do
around do |example|
- Timecop.freeze(Time.local(2008, 9, 1, 12, 0, 0)) { example.run }
+ travel_to(Time.local(2008, 9, 1, 12, 0, 0)) { example.run }
end
include_examples 'additional metrics query' do
diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
index 4683c4eae28..66b93d0dd72 100644
--- a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Prometheus::Queries::DeploymentQuery do
around do |example|
time_without_subsecond_values = Time.local(2008, 9, 1, 12, 0, 0)
- Timecop.freeze(time_without_subsecond_values) { example.run }
+ travel_to(time_without_subsecond_values) { example.run }
end
it 'sends appropriate queries to prometheus' do
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
index 1422d48152a..1dbdb892a5d 100644
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ b/spec/lib/gitlab/prometheus/query_variables_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::QueryVariables do
describe '.call' do
+ let_it_be_with_refind(:environment) { create(:environment) }
let(:project) { environment.project }
- let(:environment) { create(:environment) }
let(:slug) { environment.slug }
let(:params) { {} }
- subject { described_class.call(environment, params) }
+ subject { described_class.call(environment, **params) }
it { is_expected.to include(ci_environment_slug: slug) }
it { is_expected.to include(ci_project_name: project.name) }
diff --git a/spec/lib/gitlab/redis/hll_spec.rb b/spec/lib/gitlab/redis/hll_spec.rb
index cbf78f23036..e452e5b2f52 100644
--- a/spec/lib/gitlab/redis/hll_spec.rb
+++ b/spec/lib/gitlab/redis/hll_spec.rb
@@ -39,6 +39,24 @@ RSpec.describe Gitlab::Redis::HLL, :clean_gitlab_redis_shared_state do
end
end
end
+
+ context 'when adding entries' do
+ let(:metric) { 'test-{metric}' }
+
+ it 'supports single value' do
+ track_event(metric, 1)
+
+ expect(count_unique_events([metric])).to eq(1)
+ end
+
+ it 'supports multiple values' do
+ stub_const("#{described_class.name}::HLL_BATCH_SIZE", 2)
+
+ track_event(metric, [1, 2, 3, 4, 5])
+
+ expect(count_unique_events([metric])).to eq(5)
+ end
+ end
end
describe '.count' do
@@ -94,13 +112,13 @@ RSpec.describe Gitlab::Redis::HLL, :clean_gitlab_redis_shared_state do
expect(unique_counts).to eq(4)
end
+ end
- def track_event(key, value, expiry = 1.day)
- described_class.add(key: key, value: value, expiry: expiry)
- end
+ def track_event(key, value, expiry = 1.day)
+ described_class.add(key: key, value: value, expiry: expiry)
+ end
- def count_unique_events(keys)
- described_class.count(keys: keys)
- end
+ def count_unique_events(keys)
+ described_class.count(keys: keys)
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 88c3315150b..1c56e489a94 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -99,6 +99,36 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('foo-') }
end
+ describe '.build_trace_section_regex' do
+ subject { described_class.build_trace_section_regex }
+
+ context 'without options' do
+ example = "section_start:1600445393032:NAME\r\033\[0K"
+
+ it { is_expected.to match(example) }
+ it { is_expected.to match("section_end:12345678:aBcDeFg1234\r\033\[0K") }
+ it { is_expected.to match("section_start:0:sect_for_alpha-v1.0\r\033\[0K") }
+ it { is_expected.not_to match("section_start:section:0\r\033\[0K") }
+ it { is_expected.not_to match("section_:1600445393032:NAME\r\033\[0K") }
+ it { is_expected.not_to match(example.upcase) }
+ end
+
+ context 'with options' do
+ it { is_expected.to match("section_start:1600445393032:NAME[collapsed=true]\r\033\[0K") }
+ it { is_expected.to match("section_start:1600445393032:NAME[collapsed=true, example_option=false]\r\033\[0K") }
+ it { is_expected.to match("section_start:1600445393032:NAME[collapsed=true,example_option=false]\r\033\[0K") }
+ it { is_expected.to match("section_start:1600445393032:NAME[numeric_option=1234567]\r\033\[0K") }
+ # Without splitting the regex in one for start and one for end,
+ # this is possible, however, it is ignored for section_end.
+ it { is_expected.to match("section_end:1600445393032:NAME[collapsed=true]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed=[]]]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed = true]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed = true, example_option=false]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed=true, example_option=false]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[]\r\033\[0K") }
+ end
+ end
+
describe '.container_repository_name_regex' do
subject { described_class.container_repository_name_regex }
@@ -317,6 +347,22 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+ describe '.nuget_version_regex' do
+ subject { described_class.nuget_version_regex }
+
+ it { is_expected.to match('1.2.3') }
+ it { is_expected.to match('1.2.3.4') }
+ it { is_expected.to match('1.2.3.4-stable.1') }
+ it { is_expected.to match('1.2.3-beta') }
+ it { is_expected.to match('1.2.3-alpha.3') }
+ it { is_expected.to match('1.0.7+r3456') }
+ it { is_expected.not_to match('1') }
+ it { is_expected.not_to match('1.2') }
+ it { is_expected.not_to match('1./2.3') }
+ it { is_expected.not_to match('../../../../../1.2.3') }
+ it { is_expected.not_to match('%2e%2e%2f1.2.3') }
+ end
+
describe '.pypi_version_regex' do
subject { described_class.pypi_version_regex }
@@ -384,6 +430,140 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+ describe '.debian_package_name_regex' do
+ subject { described_class.debian_package_name_regex }
+
+ it { is_expected.to match('0ad') }
+ it { is_expected.to match('g++') }
+ it { is_expected.to match('lua5.1') }
+ it { is_expected.to match('samba') }
+
+ # may not be empty string
+ it { is_expected.not_to match('') }
+ # must start with an alphanumeric character
+ it { is_expected.not_to match('-a') }
+ it { is_expected.not_to match('+a') }
+ it { is_expected.not_to match('.a') }
+ it { is_expected.not_to match('_a') }
+ # only letters, digits and characters '-+._'
+ it { is_expected.not_to match('a~') }
+ it { is_expected.not_to match('aé') }
+
+ # More strict Lintian regex
+ # at least 2 chars
+ it { is_expected.not_to match('a') }
+ # lowercase only
+ it { is_expected.not_to match('Aa') }
+ it { is_expected.not_to match('aA') }
+ # No underscore
+ it { is_expected.not_to match('a_b') }
+ end
+
+ describe '.debian_version_regex' do
+ subject { described_class.debian_version_regex }
+
+ context 'valid versions' do
+ it { is_expected.to match('1.0') }
+ it { is_expected.to match('1.0~alpha1') }
+ it { is_expected.to match('2:4.9.5+dfsg-5+deb10u1') }
+ end
+
+ context 'dpkg errors' do
+ # version string is empty
+ it { is_expected.not_to match('') }
+ # version string has embedded spaces
+ it { is_expected.not_to match('1 0') }
+ # epoch in version is empty
+ it { is_expected.not_to match(':1.0') }
+ # epoch in version is not number
+ it { is_expected.not_to match('a:1.0') }
+ # epoch in version is negative
+ it { is_expected.not_to match('-1:1.0') }
+ # epoch in version is too big
+ it { is_expected.not_to match('9999999999:1.0') }
+ # nothing after colon in version number
+ it { is_expected.not_to match('2:') }
+ # revision number is empty
+ # Note: we are less strict here
+ # it { is_expected.not_to match('1.0-') }
+ # version number is empty
+ it { is_expected.not_to match('-1') }
+ it { is_expected.not_to match('2:-1') }
+ end
+
+ context 'dpkg warnings' do
+ # version number does not start with digit
+ it { is_expected.not_to match('a') }
+ it { is_expected.not_to match('a1.0') }
+ # invalid character in version number
+ it { is_expected.not_to match('1_0') }
+ # invalid character in revision number
+ it { is_expected.not_to match('1.0-1_0') }
+ end
+
+ context 'dpkg accepts' do
+ # dpkg accepts leading or trailing space
+ it { is_expected.not_to match(' 1.0') }
+ it { is_expected.not_to match('1.0 ') }
+ # dpkg accepts multiple colons
+ it { is_expected.not_to match('1:2:3') }
+ end
+ end
+
+ describe '.debian_architecture_regex' do
+ subject { described_class.debian_architecture_regex }
+
+ it { is_expected.to match('amd64') }
+ it { is_expected.to match('kfreebsd-i386') }
+
+ # may not be empty string
+ it { is_expected.not_to match('') }
+ # must start with an alphanumeric
+ it { is_expected.not_to match('-a') }
+ it { is_expected.not_to match('+a') }
+ it { is_expected.not_to match('.a') }
+ it { is_expected.not_to match('_a') }
+ # only letters, digits and characters '-'
+ it { is_expected.not_to match('a+b') }
+ it { is_expected.not_to match('a.b') }
+ it { is_expected.not_to match('a_b') }
+ it { is_expected.not_to match('a~') }
+ it { is_expected.not_to match('aé') }
+
+ # More strict
+ # Enforce lowercase
+ it { is_expected.not_to match('AMD64') }
+ it { is_expected.not_to match('Amd64') }
+ it { is_expected.not_to match('aMD64') }
+ end
+
+ describe '.debian_distribution_regex' do
+ subject { described_class.debian_distribution_regex }
+
+ it { is_expected.to match('buster') }
+ it { is_expected.to match('buster-updates') }
+ it { is_expected.to match('Debian10.5') }
+
+ # Do not allow slash, even if this exists in the wild
+ it { is_expected.not_to match('jessie/updates') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
+ describe '.debian_component_regex' do
+ subject { described_class.debian_component_regex }
+
+ it { is_expected.to match('main') }
+ it { is_expected.to match('non-free') }
+
+ # Do not allow slash
+ it { is_expected.not_to match('non/free') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
describe '.semver_regex' do
subject { described_class.semver_regex }
@@ -434,4 +614,45 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
it { is_expected.not_to match('') }
end
+
+ describe '.generic_package_name_regex' do
+ subject { described_class.generic_package_name_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo.bar.baz-2.0-20190901.47283-1') }
+ it { is_expected.not_to match('../../foo') }
+ it { is_expected.not_to match('..\..\foo') }
+ it { is_expected.not_to match('%2f%2e%2e%2f%2essh%2fauthorized_keys') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('my file name') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.generic_package_file_name_regex' do
+ subject { described_class.generic_package_file_name_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo.bar.baz-2.0-20190901.47283-1.jar') }
+ it { is_expected.not_to match('../../foo') }
+ it { is_expected.not_to match('..\..\foo') }
+ it { is_expected.not_to match('%2f%2e%2e%2f%2essh%2fauthorized_keys') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('my file name') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.prefixed_semver_regex' do
+ subject { described_class.prefixed_semver_regex }
+
+ it { is_expected.to match('v1.2.3') }
+ it { is_expected.to match('v1.2.3-beta') }
+ it { is_expected.to match('v1.2.3-alpha.3') }
+ it { is_expected.not_to match('v1') }
+ it { is_expected.not_to match('v1.2') }
+ it { is_expected.not_to match('v1./2.3') }
+ it { is_expected.not_to match('v../../../../../1.2.3') }
+ it { is_expected.not_to match('v%2e%2e%2f1.2.3') }
+ end
end
diff --git a/spec/lib/gitlab/relative_positioning/mover_spec.rb b/spec/lib/gitlab/relative_positioning/mover_spec.rb
index c49230c2415..dafd34585a8 100644
--- a/spec/lib/gitlab/relative_positioning/mover_spec.rb
+++ b/spec/lib/gitlab/relative_positioning/mover_spec.rb
@@ -37,18 +37,11 @@ RSpec.describe RelativePositioning::Mover do
end
def set_positions(positions)
- vals = issues.zip(positions).map do |issue, pos|
- issue.relative_position = pos
- "(#{issue.id}, #{pos})"
- end.join(', ')
-
- Issue.connection.exec_query(<<~SQL, 'set-positions')
- WITH cte(cte_id, new_pos) AS (
- SELECT * FROM (VALUES #{vals}) as t (id, pos)
- )
- UPDATE issues SET relative_position = new_pos FROM cte WHERE id = cte_id
- ;
- SQL
+ mapping = issues.zip(positions).to_h do |issue, pos|
+ [issue, { relative_position: pos }]
+ end
+
+ ::Gitlab::Database::BulkUpdate.execute([:relative_position], mapping)
end
def ids_in_position_order
diff --git a/spec/lib/gitlab/repo_path_spec.rb b/spec/lib/gitlab/repo_path_spec.rb
index 05f32459164..912efa6a5db 100644
--- a/spec/lib/gitlab/repo_path_spec.rb
+++ b/spec/lib/gitlab/repo_path_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a full wiki project path' do
- expect(described_class.parse(project.wiki.repository.full_path)).to eq([project, project, Gitlab::GlRepository::WIKI, nil])
+ expect(described_class.parse(project.wiki.repository.full_path)).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, nil])
end
it 'parses a personal snippet repository path' do
@@ -36,7 +36,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a relative wiki path' do
- expect(described_class.parse(project.full_path + '.wiki.git')).to eq([project, project, Gitlab::GlRepository::WIKI, nil])
+ expect(described_class.parse(project.full_path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, nil])
end
it 'parses a relative path starting with /' do
@@ -49,7 +49,7 @@ RSpec.describe ::Gitlab::RepoPath do
end
it 'parses a relative wiki path' do
- expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project, project, Gitlab::GlRepository::WIKI, redirect_route])
+ expect(described_class.parse(redirect.path + '.wiki.git')).to eq([project.wiki, project, Gitlab::GlRepository::WIKI, redirect_route])
end
it 'parses a relative path starting with /' do
diff --git a/spec/lib/gitlab/repository_size_checker_spec.rb b/spec/lib/gitlab/repository_size_checker_spec.rb
index 9b2c02b1190..bd030d81d97 100644
--- a/spec/lib/gitlab/repository_size_checker_spec.rb
+++ b/spec/lib/gitlab/repository_size_checker_spec.rb
@@ -3,14 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::RepositorySizeChecker do
+ let_it_be(:namespace) { nil }
let(:current_size) { 0 }
let(:limit) { 50 }
let(:enabled) { true }
subject do
described_class.new(
- current_size_proc: -> { current_size },
- limit: limit,
+ current_size_proc: -> { current_size.megabytes },
+ limit: limit.megabytes,
+ namespace: namespace,
enabled: enabled
)
end
@@ -18,7 +20,7 @@ RSpec.describe Gitlab::RepositorySizeChecker do
describe '#enabled?' do
context 'when enabled' do
it 'returns true' do
- expect(subject.enabled?).to be_truthy
+ expect(subject.enabled?).to eq(true)
end
end
@@ -26,7 +28,7 @@ RSpec.describe Gitlab::RepositorySizeChecker do
let(:limit) { 0 }
it 'returns false' do
- expect(subject.enabled?).to be_falsey
+ expect(subject.enabled?).to eq(false)
end
end
end
@@ -35,59 +37,20 @@ RSpec.describe Gitlab::RepositorySizeChecker do
let(:current_size) { 49 }
it 'returns true when changes go over' do
- expect(subject.changes_will_exceed_size_limit?(2)).to be_truthy
+ expect(subject.changes_will_exceed_size_limit?(2.megabytes)).to eq(true)
end
it 'returns false when changes do not go over' do
- expect(subject.changes_will_exceed_size_limit?(1)).to be_falsey
+ expect(subject.changes_will_exceed_size_limit?(1.megabytes)).to eq(false)
end
end
describe '#above_size_limit?' do
- context 'when size is above the limit' do
- let(:current_size) { 100 }
-
- it 'returns true' do
- expect(subject.above_size_limit?).to be_truthy
- end
- end
-
- it 'returns false when not over the limit' do
- expect(subject.above_size_limit?).to be_falsey
- end
+ include_examples 'checker size above limit'
+ include_examples 'checker size not over limit'
end
describe '#exceeded_size' do
- context 'when current size is below or equal to the limit' do
- let(:current_size) { 50 }
-
- it 'returns zero' do
- expect(subject.exceeded_size).to eq(0)
- end
- end
-
- context 'when current size is over the limit' do
- let(:current_size) { 51 }
-
- it 'returns zero' do
- expect(subject.exceeded_size).to eq(1)
- end
- end
-
- context 'when change size will be over the limit' do
- let(:current_size) { 50 }
-
- it 'returns zero' do
- expect(subject.exceeded_size(1)).to eq(1)
- end
- end
-
- context 'when change size will not be over the limit' do
- let(:current_size) { 49 }
-
- it 'returns zero' do
- expect(subject.exceeded_size(1)).to eq(0)
- end
- end
+ include_examples 'checker size exceeded'
end
end
diff --git a/spec/lib/gitlab/repository_size_error_message_spec.rb b/spec/lib/gitlab/repository_size_error_message_spec.rb
index b6b975143c9..53b5ed5518f 100644
--- a/spec/lib/gitlab/repository_size_error_message_spec.rb
+++ b/spec/lib/gitlab/repository_size_error_message_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::RepositorySizeErrorMessage do
+ let_it_be(:namespace) { build(:namespace) }
let(:checker) do
Gitlab::RepositorySizeChecker.new(
current_size_proc: -> { 15.megabytes },
+ namespace: namespace,
limit: 10.megabytes
)
end
@@ -13,6 +15,10 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
let(:message) { checker.error_message }
let(:base_message) { 'because this repository has exceeded its size limit of 10 MB by 5 MB' }
+ before do
+ allow(namespace).to receive(:total_repository_size_excess).and_return(0)
+ end
+
describe 'error messages' do
describe '#commit_error' do
it 'returns the correct message' do
diff --git a/spec/lib/gitlab/sample_data_template_spec.rb b/spec/lib/gitlab/sample_data_template_spec.rb
new file mode 100644
index 00000000000..7d0d415b3af
--- /dev/null
+++ b/spec/lib/gitlab/sample_data_template_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SampleDataTemplate do
+ describe '.all' do
+ it 'returns all templates' do
+ expected = %w[
+ basic
+ serenity_valley
+ ]
+
+ expect(described_class.all).to be_an(Array)
+ expect(described_class.all.map(&:name)).to match_array(expected)
+ end
+ end
+
+ describe '.find' do
+ subject { described_class.find(query) }
+
+ context 'when there is a match' do
+ let(:query) { :basic }
+
+ it { is_expected.to be_a(described_class) }
+ end
+
+ context 'when there is no match' do
+ let(:query) { 'no-match' }
+
+ it { is_expected.to be(nil) }
+ end
+ end
+
+ describe '.archive_directory' do
+ subject { described_class.archive_directory }
+
+ it { is_expected.to be_a Pathname }
+ end
+
+ describe 'validate all templates' do
+ let_it_be(:admin) { create(:admin) }
+
+ described_class.all.each do |template|
+ it "#{template.name} has a valid archive" do
+ archive = template.archive_path
+
+ expect(File.exist?(archive)).to be(true)
+ end
+
+ context 'with valid parameters' do
+ it 'can be imported' do
+ params = {
+ template_name: template.name,
+ namespace_id: admin.namespace.id,
+ path: template.name
+ }
+
+ project = Projects::CreateFromTemplateService.new(admin, params).execute
+
+ expect(project).to be_valid
+ expect(project).to be_persisted
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/search/recent_issues_spec.rb b/spec/lib/gitlab/search/recent_issues_spec.rb
index 19a41d2aa38..c6d93173dc0 100644
--- a/spec/lib/gitlab/search/recent_issues_spec.rb
+++ b/spec/lib/gitlab/search/recent_issues_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe ::Gitlab::Search::RecentIssues do
- def create_item(content:, project:)
- create(:issue, title: content, project: project)
+ let(:parent_type) { :project }
+
+ def create_item(content:, parent:)
+ create(:issue, title: content, project: parent)
end
it_behaves_like 'search recent items'
diff --git a/spec/lib/gitlab/search/recent_merge_requests_spec.rb b/spec/lib/gitlab/search/recent_merge_requests_spec.rb
index c6678ce0342..1da3e1425d9 100644
--- a/spec/lib/gitlab/search/recent_merge_requests_spec.rb
+++ b/spec/lib/gitlab/search/recent_merge_requests_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe ::Gitlab::Search::RecentMergeRequests do
- def create_item(content:, project:)
- create(:merge_request, :unique_branches, title: content, target_project: project, source_project: project)
+ let(:parent_type) { :project }
+
+ def create_item(content:, parent:)
+ create(:merge_request, :unique_branches, title: content, target_project: parent, source_project: parent)
end
it_behaves_like 'search recent items'
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index b4cf6a568b4..57be9e93af2 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -11,9 +11,11 @@ RSpec.describe Gitlab::SearchResults do
let_it_be(:issue) { create(:issue, project: project, title: 'foo') }
let_it_be(:milestone) { create(:milestone, project: project, title: 'foo') }
let(:merge_request) { create(:merge_request, source_project: project, title: 'foo') }
+ let(:query) { 'foo' }
let(:filters) { {} }
+ let(:sort) { nil }
- subject(:results) { described_class.new(user, 'foo', Project.order(:id), filters: filters) }
+ subject(:results) { described_class.new(user, query, Project.order(:id), sort: sort, filters: filters) }
context 'as a user with access' do
before do
@@ -58,6 +60,25 @@ RSpec.describe Gitlab::SearchResults do
end
end
+ describe '#highlight_map' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:scope, :expected) do
+ 'projects' | {}
+ 'issues' | {}
+ 'merge_requests' | {}
+ 'milestones' | {}
+ 'users' | {}
+ 'unknown' | {}
+ end
+
+ with_them do
+ it 'returns the expected highlight_map' do
+ expect(results.highlight_map(scope)).to eq(expected)
+ end
+ end
+ end
+
describe '#formatted_limited_count' do
using RSpec::Parameterized::TableSyntax
@@ -137,10 +158,12 @@ RSpec.describe Gitlab::SearchResults do
end
describe '#merge_requests' do
+ let(:scope) { 'merge_requests' }
+
it 'includes project filter by default' do
expect(results).to receive(:project_ids_relation).and_call_original
- results.objects('merge_requests')
+ results.objects(scope)
end
it 'skips project filter if default project context is used' do
@@ -148,24 +171,34 @@ RSpec.describe Gitlab::SearchResults do
expect(results).not_to receive(:project_ids_relation)
- results.objects('merge_requests')
+ results.objects(scope)
end
context 'filtering' do
let!(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
let!(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
- let(:scope) { 'merge_requests' }
let(:query) { 'foo' }
include_examples 'search results filtered by state'
end
+
+ context 'ordering' do
+ let(:query) { 'sorted' }
+ let!(:old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'old-1', title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:merge_request, :opened, source_project: project, source_branch: 'new-1', title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'very-old-1', title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted'
+ end
end
describe '#issues' do
+ let(:scope) { 'issues' }
+
it 'includes project filter by default' do
expect(results).to receive(:project_ids_relation).and_call_original
- results.objects('issues')
+ results.objects(scope)
end
it 'skips project filter if default project context is used' do
@@ -173,16 +206,25 @@ RSpec.describe Gitlab::SearchResults do
expect(results).not_to receive(:project_ids_relation)
- results.objects('issues')
+ results.objects(scope)
end
context 'filtering' do
- let(:scope) { 'issues' }
-
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo open') }
+ let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by confidential'
+ end
+
+ context 'ordering' do
+ let(:query) { 'sorted' }
+ let!(:old_result) { create(:issue, project: project, title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:issue, project: project, title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:issue, project: project, title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted'
end
end
diff --git a/spec/lib/gitlab/sidekiq_cluster_spec.rb b/spec/lib/gitlab/sidekiq_cluster_spec.rb
index 5dd913aebb0..5517abe1010 100644
--- a/spec/lib/gitlab/sidekiq_cluster_spec.rb
+++ b/spec/lib/gitlab/sidekiq_cluster_spec.rb
@@ -99,7 +99,7 @@ RSpec.describe Gitlab::SidekiqCluster do
allow(Process).to receive(:spawn).and_return(1)
expect(described_class).to receive(:wait_async).with(1)
- expect(described_class.start_sidekiq(%w(foo), options)).to eq(1)
+ expect(described_class.start_sidekiq(%w(foo), **options)).to eq(1)
end
it 'handles duplicate queue names' do
@@ -109,7 +109,7 @@ RSpec.describe Gitlab::SidekiqCluster do
.and_return(1)
expect(described_class).to receive(:wait_async).with(1)
- expect(described_class.start_sidekiq(%w(foo foo bar baz), options)).to eq(1)
+ expect(described_class.start_sidekiq(%w(foo foo bar baz), **options)).to eq(1)
end
it 'runs the sidekiq process in a new process group' do
@@ -119,7 +119,7 @@ RSpec.describe Gitlab::SidekiqCluster do
.and_return(1)
allow(described_class).to receive(:wait_async)
- expect(described_class.start_sidekiq(%w(foo bar baz), options)).to eq(1)
+ expect(described_class.start_sidekiq(%w(foo bar baz), **options)).to eq(1)
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
index bde19fa7552..ca473462d2e 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
include ApplicationWorker
+ feature_category :foo
worker_context user: nil
def perform(identifier, *args)
@@ -56,6 +57,12 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
expect(TestWorker.contexts['identifier'].keys).not_to include('meta.user')
end
+ it 'takes the feature category from the worker' do
+ TestWorker.perform_async('identifier', 1)
+
+ expect(TestWorker.contexts['identifier']).to include('meta.feature_category' => 'foo')
+ end
+
it "doesn't fail for unknown workers" do
expect { OtherWorker.perform_async }.not_to raise_error
end
diff --git a/spec/lib/gitlab/snippet_search_results_spec.rb b/spec/lib/gitlab/snippet_search_results_spec.rb
index e1ae26a4d9e..2177b2be6d6 100644
--- a/spec/lib/gitlab/snippet_search_results_spec.rb
+++ b/spec/lib/gitlab/snippet_search_results_spec.rb
@@ -21,6 +21,12 @@ RSpec.describe Gitlab::SnippetSearchResults do
end
end
+ describe '#highlight_map' do
+ it 'returns the expected highlight map' do
+ expect(results.highlight_map('snippet_titles')).to eq({})
+ end
+ end
+
describe '#objects' do
it 'uses page and per_page to paginate results' do
snippet2 = create(:snippet, :public, content: 'foo', file_name: 'foo')
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index 220ac2ff6da..9bf6f0b82bc 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -3,6 +3,43 @@
require 'spec_helper'
RSpec.describe Gitlab::SQL::Pattern do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '.fuzzy_search' do
+ let_it_be(:issue1) { create(:issue, title: 'noise foo noise', description: 'noise bar noise') }
+ let_it_be(:issue2) { create(:issue, title: 'noise baz noise', description: 'noise foo noise') }
+ let_it_be(:issue3) { create(:issue, title: 'Oh', description: 'Ah') }
+
+ subject(:fuzzy_search) { Issue.fuzzy_search(query, columns) }
+
+ where(:query, :columns, :expected) do
+ 'foo' | [Issue.arel_table[:title]] | %i[issue1]
+
+ 'foo' | %i[title] | %i[issue1]
+ 'foo' | %w[title] | %i[issue1]
+ 'foo' | %i[description] | %i[issue2]
+ 'foo' | %i[title description] | %i[issue1 issue2]
+ 'bar' | %i[title description] | %i[issue1]
+ 'baz' | %i[title description] | %i[issue2]
+ 'qux' | %i[title description] | []
+
+ 'oh' | %i[title description] | %i[issue3]
+ 'OH' | %i[title description] | %i[issue3]
+ 'ah' | %i[title description] | %i[issue3]
+ 'AH' | %i[title description] | %i[issue3]
+ 'oh' | %i[title] | %i[issue3]
+ 'ah' | %i[description] | %i[issue3]
+ end
+
+ with_them do
+ let(:expected_issues) { expected.map { |sym| send(sym) } }
+
+ it 'finds the expected issues' do
+ expect(fuzzy_search).to match_array(expected_issues)
+ end
+ end
+ end
+
describe '.to_pattern' do
subject(:to_pattern) { User.to_pattern(query) }
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
new file mode 100644
index 00000000000..9ce6007165b
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
@@ -0,0 +1,245 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Global do
+ let(:global) { described_class.new(hash) }
+ let(:default_image_upload_path_value) { 'source/images' }
+
+ let(:default_mounts_value) do
+ [
+ {
+ source: 'source',
+ target: ''
+ }
+ ]
+ end
+
+ let(:default_static_site_generator_value) { 'middleman' }
+
+ shared_examples_for 'valid default configuration' do
+ describe '#compose!' do
+ before do
+ global.compose!
+ end
+
+ it 'creates nodes hash' do
+ expect(global.descendants).to be_an Array
+ end
+
+ it 'creates node object for each entry' do
+ expect(global.descendants.count).to eq 3
+ end
+
+ it 'creates node object using valid class' do
+ expect(global.descendants.map(&:class)).to match_array(expected_node_object_classes)
+ end
+
+ it 'sets a description containing "Static Site Editor" for all nodes' do
+ expect(global.descendants.map(&:description)).to all(match(/Static Site Editor/))
+ end
+
+ describe '#leaf?' do
+ it 'is not leaf' do
+ expect(global).not_to be_leaf
+ end
+ end
+ end
+
+ context 'when not composed' do
+ describe '#static_site_generator_value' do
+ it 'returns nil' do
+ expect(global.static_site_generator_value).to be nil
+ end
+ end
+
+ describe '#leaf?' do
+ it 'is leaf' do
+ expect(global).to be_leaf
+ end
+ end
+ end
+
+ context 'when composed' do
+ before do
+ global.compose!
+ end
+
+ describe '#errors' do
+ it 'has no errors' do
+ expect(global.errors).to be_empty
+ end
+ end
+
+ describe '#image_upload_path_value' do
+ it 'returns correct values' do
+ expect(global.image_upload_path_value).to eq(default_image_upload_path_value)
+ end
+ end
+
+ describe '#mounts_value' do
+ it 'returns correct values' do
+ expect(global.mounts_value).to eq(default_mounts_value)
+ end
+ end
+
+ describe '#static_site_generator_value' do
+ it 'returns correct values' do
+ expect(global.static_site_generator_value).to eq(default_static_site_generator_value)
+ end
+ end
+ end
+ end
+
+ describe '.nodes' do
+ it 'returns a hash' do
+ expect(described_class.nodes).to be_a(Hash)
+ end
+
+ context 'when filtering all the entry/node names' do
+ it 'contains the expected node names' do
+ expected_node_names = %i[
+ image_upload_path
+ mounts
+ static_site_generator
+ ]
+ expect(described_class.nodes.keys).to match_array(expected_node_names)
+ end
+ end
+ end
+
+ context 'when configuration is valid' do
+ context 'when some entries defined' do
+ let(:expected_node_object_classes) do
+ [
+ Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath,
+ Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts,
+ Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
+ ]
+ end
+
+ let(:hash) do
+ {
+ image_upload_path: default_image_upload_path_value,
+ mounts: default_mounts_value,
+ static_site_generator: default_static_site_generator_value
+ }
+ end
+
+ it_behaves_like 'valid default configuration'
+ end
+ end
+
+ context 'when value is an empty hash' do
+ let(:expected_node_object_classes) do
+ [
+ Gitlab::Config::Entry::Unspecified,
+ Gitlab::Config::Entry::Unspecified,
+ Gitlab::Config::Entry::Unspecified
+ ]
+ end
+
+ let(:hash) { {} }
+
+ it_behaves_like 'valid default configuration'
+ end
+
+ context 'when configuration is not valid' do
+ before do
+ global.compose!
+ end
+
+ context 'when a single entry is invalid' do
+ let(:hash) do
+ { image_upload_path: { not_a_string: true } }
+ end
+
+ describe '#errors' do
+ it 'reports errors' do
+ expect(global.errors)
+ .to include 'image_upload_path config should be a string'
+ end
+ end
+ end
+
+ context 'when a multiple entries are invalid' do
+ let(:hash) do
+ {
+ image_upload_path: { not_a_string: true },
+ static_site_generator: { not_a_string: true }
+ }
+ end
+
+ describe '#errors' do
+ it 'reports errors' do
+ expect(global.errors)
+ .to match_array([
+ 'image_upload_path config should be a string',
+ 'static_site_generator config should be a string',
+ "static_site_generator config should be 'middleman'"
+ ])
+ end
+ end
+ end
+
+ context 'when there is an invalid key' do
+ let(:hash) do
+ { invalid_key: true }
+ end
+
+ describe '#errors' do
+ it 'reports errors' do
+ expect(global.errors)
+ .to include 'global config contains unknown keys: invalid_key'
+ end
+ end
+ end
+ end
+
+ context 'when value is not a hash' do
+ let(:hash) { [] }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(global).not_to be_valid
+ end
+ end
+
+ describe '#errors' do
+ it 'returns error about invalid type' do
+ expect(global.errors.first).to match /should be a hash/
+ end
+ end
+ end
+
+ describe '#specified?' do
+ it 'is concrete entry that is defined' do
+ expect(global.specified?).to be true
+ end
+ end
+
+ describe '#[]' do
+ before do
+ global.compose!
+ end
+
+ let(:hash) do
+ { static_site_generator: default_static_site_generator_value }
+ end
+
+ context 'when entry exists' do
+ it 'returns correct entry' do
+ expect(global[:static_site_generator])
+ .to be_an_instance_of Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
+ expect(global[:static_site_generator].value).to eq default_static_site_generator_value
+ end
+ end
+
+ context 'when entry does not exist' do
+ it 'always return unspecified node' do
+ expect(global[:some][:unknown][:node])
+ .not_to be_specified
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
new file mode 100644
index 00000000000..c2b7fbf6f98
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath do
+ subject(:image_upload_path_entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'with a valid config' do
+ let(:config) { 'an-image-upload-path' }
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns a image_upload_path key' do
+ expect(image_upload_path_entry.value).to eq config
+ end
+ end
+ end
+
+ context 'with an invalid config' do
+ let(:config) { { not_a_string: true } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports errors about wrong type' do
+ expect(image_upload_path_entry.errors)
+ .to include 'image upload path config should be a string'
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default image_upload_path' do
+ expect(described_class.default).to eq 'source/images'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
new file mode 100644
index 00000000000..04248fc60a5
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mount do
+ subject(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'with a valid config' do
+ context 'and target is a non-empty string' do
+ let(:config) do
+ {
+ source: 'source',
+ target: 'sub-site'
+ }
+ end
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns mount configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+
+ context 'and target is an empty string' do
+ let(:config) do
+ {
+ source: 'source',
+ target: ''
+ }
+ end
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns mount configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+ end
+
+ context 'with an invalid config' do
+ context 'when source is not a string' do
+ let(:config) { { source: 123, target: 'target' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'mount source should be a string'
+ end
+ end
+
+ context 'when source is not present' do
+ let(:config) { { target: 'target' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "mount source can't be blank"
+ end
+ end
+
+ context 'when target is not a string' do
+ let(:config) { { source: 'source', target: 123 } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'mount target should be a string'
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { test: 100 } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'mount config contains unknown keys: test'
+ end
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default mount' do
+ expect(described_class.default)
+ .to eq({
+ source: 'source',
+ target: ''
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
new file mode 100644
index 00000000000..0ae2ece9474
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts do
+ subject(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'with a valid config' do
+ let(:config) do
+ [
+ {
+ source: 'source',
+ target: ''
+ },
+ {
+ source: 'sub-site/source',
+ target: 'sub-site'
+ }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns mounts configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+
+ context 'with an invalid config' do
+ let(:config) { { not_an_array: true } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports errors about wrong type' do
+ expect(entry.errors)
+ .to include 'mounts config should be a array'
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default mounts' do
+ expect(described_class.default)
+ .to eq([{
+ source: 'source',
+ target: ''
+ }])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
new file mode 100644
index 00000000000..a9c730218cf
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator do
+ let(:static_site_generator) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'when value is valid' do
+ let(:config) { 'middleman' }
+
+ describe '#value' do
+ it 'returns a static_site_generator key' do
+ expect(static_site_generator.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(static_site_generator).to be_valid
+ end
+ end
+ end
+
+ context 'when value is invalid' do
+ let(:config) { 'not-a-valid-generator' }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(static_site_generator).not_to be_valid
+ end
+ end
+ end
+
+ context 'when value has a wrong type' do
+ let(:config) { { not_a_string: true } }
+
+ it 'reports errors about wrong type' do
+ expect(static_site_generator.errors)
+ .to include 'static site generator config should be a string'
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default static_site_generator' do
+ expect(described_class.default).to eq 'middleman'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
index 594425c2dab..d444d4f1df7 100644
--- a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
@@ -3,13 +3,85 @@
require 'spec_helper'
RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig do
- subject(:config) { described_class.new }
+ let(:config) do
+ described_class.new(yml)
+ end
+
+ context 'when config is valid' do
+ context 'when config has valid values' do
+ let(:yml) do
+ <<-EOS
+ static_site_generator: middleman
+ EOS
+ end
+
+ describe '#to_hash_with_defaults' do
+ it 'returns hash created from string' do
+ expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'has no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when a config entry has an empty value' do
+ let(:yml) { 'static_site_generator: ' }
+
+ describe '#to_hash' do
+ it 'returns default value' do
+ expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'has no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+ end
+
+ context 'when config is invalid' do
+ context 'when yml is incorrect' do
+ let(:yml) { '// invalid' }
+
+ describe '.new' do
+ it 'raises error' do
+ expect { config }.to raise_error(described_class::ConfigError, /Invalid configuration format/)
+ end
+ end
+ end
+
+ context 'when config value exists but is not a valid value' do
+ let(:yml) { 'static_site_generator: "unsupported-generator"' }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(config).not_to be_valid
+ end
- describe '#data' do
- subject { config.data }
+ it 'has errors' do
+ expect(config.errors).not_to be_empty
+ end
+ end
- it 'returns hardcoded data for now' do
- is_expected.to match(static_site_generator: 'middleman')
+ describe '#errors' do
+ it 'returns an array of strings' do
+ expect(config.errors).to all(be_an_instance_of(String))
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
index 3433a54be9c..2f761b69e60 100644
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
project: 'project',
project_id: project.id,
return_url: 'http://example.com',
- is_supported_content: 'true',
+ is_supported_content: true,
base_url: '/namespace/project/-/sse/master%2FREADME.md',
merge_requests_illustration_path: %r{illustrations/merge_requests}
})
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
stub_feature_flags(sse_erb_support: project)
end
- it { is_expected.to include(is_supported_content: 'true') }
+ it { is_expected.to include(is_supported_content: true) }
end
context 'when feature flag is disabled' do
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
stub_feature_flags(sse_erb_support: false)
end
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
end
@@ -88,31 +88,31 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
context 'when branch is not master' do
let(:ref) { 'my-branch' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when file does not have a markdown extension' do
let(:path) { 'README.txt' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when file does not have an extension' do
let(:path) { 'README' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when file does not exist' do
let(:path) { 'UNKNOWN.md' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when repository is empty' do
let(:repository) { create(:project_empty_repo).repository }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when return_url is not a valid URL' do
@@ -132,5 +132,11 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
it { is_expected.to include(return_url: nil) }
end
+
+ context 'when a commit for the ref cannot be found' do
+ let(:ref) { 'nonexistent-ref' }
+
+ it { is_expected.to include(commit_id: nil) }
+ end
end
end
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
new file mode 100644
index 00000000000..351af3c07d2
--- /dev/null
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::SubscriptionPortal do
+ describe '.default_subscriptions_url' do
+ subject { described_class.default_subscriptions_url }
+
+ context 'on non test and non dev environments' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ end
+
+ it 'returns production subscriptions app URL' do
+ is_expected.to eq('https://customers.gitlab.com')
+ end
+ end
+
+ context 'on dev environment' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
+ end
+
+ it 'returns staging subscriptions app url' do
+ is_expected.to eq('https://customers.stg.gitlab.com')
+ end
+ end
+
+ context 'on test environment' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(true)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ end
+
+ it 'returns staging subscriptions app url' do
+ is_expected.to eq('https://customers.stg.gitlab.com')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index 68ff28becfa..6d03cf496b8 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -47,4 +47,18 @@ RSpec.describe Gitlab::Themes, lib: true do
expect(ids).not_to be_empty
end
end
+
+ describe 'theme.css_filename' do
+ described_class.each do |theme|
+ next unless theme.css_filename
+
+ context "for #{theme.name}" do
+ it 'returns an existing CSS filename' do
+ css_file_path = Rails.root.join('app/assets/stylesheets/themes', theme.css_filename + '.scss')
+
+ expect(File.exist?(css_file_path)).to eq(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index f0bf7b9964f..6ddeaf98370 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Tracking do
end
around do |example|
- Timecop.freeze(timestamp) { example.run }
+ travel_to(timestamp) { example.run }
end
before do
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 2a674557b76..f2c1d8718d7 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -41,11 +41,11 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
context 'for web IDE edit actions' do
it_behaves_like 'tracks and counts action' do
def track_action(params)
- described_class.track_web_ide_edit_action(params)
+ described_class.track_web_ide_edit_action(**params)
end
def count_unique(params)
- described_class.count_web_ide_edit_actions(params)
+ described_class.count_web_ide_edit_actions(**params)
end
end
end
@@ -53,11 +53,11 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
context 'for SFE edit actions' do
it_behaves_like 'tracks and counts action' do
def track_action(params)
- described_class.track_sfe_edit_action(params)
+ described_class.track_sfe_edit_action(**params)
end
def count_unique(params)
- described_class.count_sfe_edit_actions(params)
+ described_class.count_sfe_edit_actions(**params)
end
end
end
@@ -65,11 +65,11 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
context 'for snippet editor edit actions' do
it_behaves_like 'tracks and counts action' do
def track_action(params)
- described_class.track_snippet_editor_edit_action(params)
+ described_class.track_snippet_editor_edit_action(**params)
end
def count_unique(params)
- described_class.count_snippet_editor_edit_actions(params)
+ described_class.count_snippet_editor_edit_actions(**params)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index f881da71251..e84c3c17274 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -15,12 +15,12 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
# depending on which day of the week test is run.
# Monday 6th of June
reference_time = Time.utc(2020, 6, 1)
- Timecop.freeze(reference_time) { example.run }
+ travel_to(reference_time) { example.run }
end
describe '.categories' do
it 'gets all unique category names' do
- expect(described_class.categories).to contain_exactly('analytics', 'compliance', 'ide_edit', 'search', 'source_code', 'incident_management', 'issues_edit')
+ expect(described_class.categories).to contain_exactly('analytics', 'compliance', 'ide_edit', 'search', 'source_code', 'incident_management', 'issues_edit', 'testing')
end
end
@@ -238,16 +238,20 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'returns the number of unique events for all known events' do
results = {
- 'category1' => {
- 'event1_slot' => 1,
- 'event2_slot' => 1,
- 'category1_total_unique_counts_weekly' => 2,
- 'category1_total_unique_counts_monthly' => 3
- },
- 'category2' => {
- 'event3' => 1,
- 'event4' => 1
- }
+ "category1" => {
+ "event1_slot_weekly" => 1,
+ "event1_slot_monthly" => 1,
+ "event2_slot_weekly" => 1,
+ "event2_slot_monthly" => 2,
+ "category1_total_unique_counts_weekly" => 2,
+ "category1_total_unique_counts_monthly" => 3
+ },
+ "category2" => {
+ "event3_weekly" => 1,
+ "event3_monthly" => 1,
+ "event4_weekly" => 1,
+ "event4_monthly" => 1
+ }
}
expect(subject.unique_events_data).to eq(results)
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index 479fe36bcdd..e08dc41d0cc 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_TITLE_CHANGED }
def track_action(params)
- described_class.track_issue_title_changed_action(params)
+ described_class.track_issue_title_changed_action(**params)
end
end
end
@@ -57,7 +57,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
def track_action(params)
- described_class.track_issue_description_changed_action(params)
+ described_class.track_issue_description_changed_action(**params)
end
end
end
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
def track_action(params)
- described_class.track_issue_assignee_changed_action(params)
+ described_class.track_issue_assignee_changed_action(**params)
end
end
end
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
def track_action(params)
- described_class.track_issue_made_confidential_action(params)
+ described_class.track_issue_made_confidential_action(**params)
end
end
end
@@ -87,7 +87,207 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_MADE_VISIBLE }
def track_action(params)
- described_class.track_issue_made_visible_action(params)
+ described_class.track_issue_made_visible_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue created actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_CREATED }
+
+ def track_action(params)
+ described_class.track_issue_created_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue closed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_CLOSED }
+
+ def track_action(params)
+ described_class.track_issue_closed_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue reopened actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_REOPENED }
+
+ def track_action(params)
+ described_class.track_issue_reopened_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue label changed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_LABEL_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_label_changed_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue cross-referenced actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_CROSS_REFERENCED }
+
+ def track_action(params)
+ described_class.track_issue_cross_referenced_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue moved actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_MOVED }
+
+ def track_action(params)
+ described_class.track_issue_moved_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue relate actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_RELATED }
+
+ def track_action(params)
+ described_class.track_issue_related_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue unrelate actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_UNRELATED }
+
+ def track_action(params)
+ described_class.track_issue_unrelated_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue marked as duplicate actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_MARKED_AS_DUPLICATE }
+
+ def track_action(params)
+ described_class.track_issue_marked_as_duplicate_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue locked actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_LOCKED }
+
+ def track_action(params)
+ described_class.track_issue_locked_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue unlocked actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_UNLOCKED }
+
+ def track_action(params)
+ described_class.track_issue_unlocked_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue added to epic actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_ADDED_TO_EPIC}
+
+ def track_action(params)
+ described_class.track_issue_added_to_epic_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue removed from epic actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_REMOVED_FROM_EPIC}
+
+ def track_action(params)
+ described_class.track_issue_removed_from_epic_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue changed epic actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_CHANGED_EPIC}
+
+ def track_action(params)
+ described_class.track_issue_changed_epic_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue designs added actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_DESIGNS_ADDED }
+
+ def track_action(params)
+ described_class.track_issue_designs_added_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue designs modified actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_DESIGNS_MODIFIED }
+
+ def track_action(params)
+ described_class.track_issue_designs_modified_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue designs removed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_DESIGNS_REMOVED }
+
+ def track_action(params)
+ described_class.track_issue_designs_removed_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue due date changed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_DUE_DATE_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_due_date_changed_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue time estimate changed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_TIME_ESTIMATE_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_time_estimate_changed_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue time spent changed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_TIME_SPENT_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_time_spent_changed_action(**params)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
new file mode 100644
index 00000000000..aaa576865f6
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/static_site_editor_counter_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::StaticSiteEditorCounter do
+ it_behaves_like 'a redis usage counter', 'StaticSiteEditor', :views
+
+ it_behaves_like 'a redis usage counter with totals', :static_site_editor,
+ views: 3
+end
diff --git a/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb b/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
index 8f5f1347ce8..d1144dd0bc5 100644
--- a/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueEvents, :clean_gitlab_redis
let(:time) { Time.zone.now }
def track_event(params)
- track_unique_events.track_event(params)
+ track_unique_events.track_event(**params)
end
def count_unique(params)
- track_unique_events.count_unique_events(params)
+ track_unique_events.count_unique_events(**params)
end
context 'tracking an event' do
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 6631a0d3cc6..f64fa2b868d 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
before do
stub_usage_data_connections
stub_object_store_settings
+ clear_memoized_values(described_class::CE_MEMOIZED_VALUES)
end
describe '.uncached_data' do
@@ -24,17 +25,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
it 'clears memoized values' do
- values = %i(issue_minimum_id issue_maximum_id
- project_minimum_id project_maximum_id
- user_minimum_id user_maximum_id unique_visit_service
- deployment_minimum_id deployment_maximum_id
- approval_merge_request_rule_minimum_id
- approval_merge_request_rule_maximum_id)
- values.each do |key|
- expect(described_class).to receive(:clear_memoization).with(key)
- end
+ allow(described_class).to receive(:clear_memoization)
subject
+
+ described_class::CE_MEMOIZED_VALUES.each do |key|
+ expect(described_class).to have_received(:clear_memoization).with(key)
+ end
end
it 'merge_requests_users is included only in montly counters' do
@@ -174,21 +171,29 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
for_defined_days_back do
user = create(:user)
+ user2 = create(:user)
create(:event, author: user)
create(:group_member, user: user)
+ create(:authentication_event, user: user, provider: :ldapmain, result: :success)
+ create(:authentication_event, user: user2, provider: :ldapsecondary, result: :success)
+ create(:authentication_event, user: user2, provider: :group_saml, result: :success)
+ create(:authentication_event, user: user2, provider: :group_saml, result: :success)
+ create(:authentication_event, user: user, provider: :group_saml, result: :failed)
end
expect(described_class.usage_activity_by_stage_manage({})).to include(
events: 2,
groups: 2,
- users_created: 4,
- omniauth_providers: ['google_oauth2']
+ users_created: 6,
+ omniauth_providers: ['google_oauth2'],
+ user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4 }
)
expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
events: 1,
groups: 1,
- users_created: 2,
- omniauth_providers: ['google_oauth2']
+ users_created: 3,
+ omniauth_providers: ['google_oauth2'],
+ user_auth_by_provider: { 'group_saml' => 1, 'ldap' => 2 }
)
end
@@ -244,6 +249,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
+ it 'includes group imports usage data' do
+ for_defined_days_back do
+ user = create(:user)
+ group = create(:group)
+ group.add_owner(user)
+ create(:group_import_state, group: group, user: user)
+ end
+
+ expect(described_class.usage_activity_by_stage_manage({}))
+ .to include(groups_imported: 2)
+ expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period))
+ .to include(groups_imported: 1)
+ end
+
def omniauth_providers
[
OpenStruct.new(name: 'google_oauth2'),
@@ -260,17 +279,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
cluster = create(:cluster, user: user)
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ create(:project_tracing_setting)
end
expect(described_class.usage_activity_by_stage_monitor({})).to include(
clusters: 2,
clusters_applications_prometheus: 2,
- operations_dashboard_default_dashboard: 2
+ operations_dashboard_default_dashboard: 2,
+ projects_with_tracing_enabled: 2
)
expect(described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)).to include(
clusters: 1,
clusters_applications_prometheus: 1,
- operations_dashboard_default_dashboard: 1
+ operations_dashboard_default_dashboard: 1,
+ projects_with_tracing_enabled: 1
)
end
end
@@ -415,11 +437,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
expect(count_data[:projects_mattermost_active]).to eq(1)
+ expect(count_data[:groups_mattermost_active]).to eq(1)
expect(count_data[:templates_mattermost_active]).to eq(1)
expect(count_data[:instances_mattermost_active]).to eq(1)
- expect(count_data[:projects_inheriting_instance_mattermost_active]).to eq(1)
+ expect(count_data[:projects_inheriting_mattermost_active]).to eq(1)
+ expect(count_data[:groups_inheriting_slack_active]).to eq(1)
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
+ expect(count_data[:projects_with_tracing_enabled]).to eq(1)
expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
@@ -472,8 +497,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:personal_snippets]).to eq(2)
expect(count_data[:project_snippets]).to eq(4)
+ expect(count_data[:projects_creating_incidents]).to eq(2)
expect(count_data[:projects_with_packages]).to eq(2)
expect(count_data[:packages]).to eq(4)
+ expect(count_data[:user_preferences_user_gitpod_enabled]).to eq(1)
end
it 'gathers object store usage correctly' do
@@ -549,8 +576,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe '.system_usage_data_monthly' do
+ let_it_be(:project) { create(:project) }
let!(:ud) { build(:usage_data) }
+ before do
+ stub_application_setting(self_monitoring_project: project)
+
+ for_defined_days_back do
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
+ end
+ end
+
subject { described_class.system_usage_data_monthly }
it 'gathers monthly usage counts correctly' do
@@ -563,6 +599,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(counts_monthly[:personal_snippets]).to eq(1)
expect(counts_monthly[:project_snippets]).to eq(2)
expect(counts_monthly[:packages]).to eq(3)
+ expect(counts_monthly[:promoted_issues]).to eq(1)
end
end
@@ -570,6 +607,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.usage_counters }
it { is_expected.to include(:kubernetes_agent_gitops_sync) }
+ it { is_expected.to include(:static_site_editor_views) }
end
describe '.usage_data_counters' do
@@ -628,6 +666,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?)
+ expect(subject[:gitpod_enabled]).to eq(Gitlab::CurrentSettings.gitpod_enabled?)
end
context 'with embedded Prometheus' do
@@ -657,6 +696,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:grafana_link_enabled]).to eq(false)
end
end
+
+ context 'with Gitpod' do
+ it 'returns true when is enabled' do
+ stub_application_setting(gitpod_enabled: true)
+
+ expect(subject[:gitpod_enabled]).to eq(true)
+ end
+
+ it 'returns false when is disabled' do
+ stub_application_setting(gitpod_enabled: false)
+
+ expect(subject[:gitpod_enabled]).to eq(false)
+ end
+ end
end
describe '.components_usage_data' do
@@ -670,6 +723,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:git][:version]).to eq(Gitlab::Git.version)
expect(subject[:database][:adapter]).to eq(Gitlab::Database.adapter_name)
expect(subject[:database][:version]).to eq(Gitlab::Database.version)
+ expect(subject[:database][:pg_system_id]).to eq(Gitlab::Database.system_id)
expect(subject[:mail][:smtp_server]).to eq(ActionMailer::Base.smtp_settings[:address])
expect(subject[:gitaly][:version]).to be_present
expect(subject[:gitaly][:servers]).to be >= 1
@@ -979,9 +1033,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- def for_defined_days_back(days: [29, 2])
+ def for_defined_days_back(days: [31, 3])
days.each do |n|
- Timecop.travel(n.days.ago) do
+ travel_to(n.days.ago) do
yield
end
end
@@ -1078,8 +1132,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.compliance_unique_visits_data }
before do
- described_class.clear_memoization(:unique_visit_service)
-
allow_next_instance_of(::Gitlab::Analytics::UniqueVisits) do |instance|
::Gitlab::Analytics::UniqueVisits.compliance_events.each do |target|
allow(instance).to receive(:unique_visits_for).with(targets: target).and_return(123)
@@ -1110,7 +1162,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.search_unique_visits_data }
before do
- described_class.clear_memoization(:unique_visit_service)
events = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category('search')
events.each do |event|
allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).with(event_names: event, start_date: 7.days.ago.to_date, end_date: Date.current).and_return(123)
@@ -1136,9 +1187,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.redis_hll_counters }
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
- let(:ineligible_total_categories) { ['source_code'] }
+ let(:ineligible_total_categories) { %w[source_code testing] }
- it 'has all know_events' do
+ it 'has all known_events' do
expect(subject).to have_key(:redis_hll_counters)
expect(subject[:redis_hll_counters].keys).to match_array(categories)
@@ -1146,11 +1197,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
categories.each do |category|
keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
+ metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
+
if ineligible_total_categories.exclude?(category)
- keys.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
+ metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
end
- expect(subject[:redis_hll_counters][category].keys).to match_array(keys)
+ expect(subject[:redis_hll_counters][category].keys).to match_array(metrics)
end
end
end
@@ -1169,6 +1222,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe '.snowplow_event_counts' do
+ let_it_be(:time_period) { { collector_tstamp: 8.days.ago..1.day.ago } }
+
context 'when self-monitoring project exists' do
let_it_be(:project) { create(:project) }
@@ -1181,14 +1236,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
stub_feature_flags(product_analytics: project)
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
- create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 28.days.ago)
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 2.days.ago)
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 9.days.ago)
+
+ create(:product_analytics_event, project: project, se_category: 'foo', se_action: 'bar', collector_tstamp: 2.days.ago)
end
it 'returns promoted_issues for the time period' do
- expect(described_class.snowplow_event_counts[:promoted_issues]).to eq(2)
- expect(described_class.snowplow_event_counts(
- time_period: described_class.last_28_days_time_period(column: :collector_tstamp)
- )[:promoted_issues]).to eq(1)
+ expect(described_class.snowplow_event_counts(time_period)[:promoted_issues]).to eq(1)
end
end
@@ -1198,14 +1253,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
it 'returns an empty hash' do
- expect(described_class.snowplow_event_counts).to eq({})
+ expect(described_class.snowplow_event_counts(time_period)).to eq({})
end
end
end
context 'when self-monitoring project does not exist' do
it 'returns an empty hash' do
- expect(described_class.snowplow_event_counts).to eq({})
+ expect(described_class.snowplow_event_counts(time_period)).to eq({})
end
end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 362cbaa78e9..9c0dc69ccd1 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -212,33 +212,26 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#track_usage_event' do
let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
- let(:event_name) { 'my_event' }
+ let(:event_name) { 'incident_management_alert_status_changed' }
let(:unknown_event) { 'unknown' }
let(:feature) { "usage_data_#{event_name}" }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
context 'with feature enabled' do
before do
stub_feature_flags(feature => true)
end
it 'tracks redis hll event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
described_class.track_usage_event(event_name, value)
end
- it 'does not track event when usage ping is not enabled' do
- stub_application_setting(usage_ping_enabled: false)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- described_class.track_usage_event(event_name, value)
- end
-
it 'raise an error for unknown event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect { described_class.track_usage_event(unknown_event, value) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
diff --git a/spec/lib/gitlab/visibility_level_checker_spec.rb b/spec/lib/gitlab/visibility_level_checker_spec.rb
index 833021a22ca..38a7d967c33 100644
--- a/spec/lib/gitlab/visibility_level_checker_spec.rb
+++ b/spec/lib/gitlab/visibility_level_checker_spec.rb
@@ -5,16 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::VisibilityLevelChecker do
let(:user) { create(:user) }
let(:project) { create(:project) }
- let(:visibility_level_checker) { }
let(:override_params) { {} }
- subject { described_class.new(user, project, project_params: override_params) }
-
describe '#level_restricted?' do
+ subject(:result) { described_class.new(user, project, project_params: override_params).level_restricted? }
+
context 'when visibility level is allowed' do
it 'returns false with nil for visibility level' do
- result = subject.level_restricted?
-
expect(result.restricted?).to eq(false)
expect(result.visibility_level).to be_nil
end
@@ -25,12 +22,26 @@ RSpec.describe Gitlab::VisibilityLevelChecker do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
- it 'returns true and visibility name' do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- result = subject.level_restricted?
+ context 'for public project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ context 'for non-admin user' do
+ it 'returns true and visibility name' do
+ expect(result.restricted?).to eq(true)
+ expect(result.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ end
+ end
+
+ context 'for admin user' do
+ let(:user) { create(:user, :admin) }
- expect(result.restricted?).to eq(true)
- expect(result.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ it 'returns false and a nil visibility level' do
+ expect(result.restricted?).to eq(false)
+ expect(result.visibility_level).to be_nil
+ end
+ end
end
context 'overridden visibility' do
@@ -50,8 +61,6 @@ RSpec.describe Gitlab::VisibilityLevelChecker do
let(:override_visibility) { 'public' }
it 'returns true and visibility name' do
- result = subject.level_restricted?
-
expect(result.restricted?).to eq(true)
expect(result.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
end
@@ -61,8 +70,6 @@ RSpec.describe Gitlab::VisibilityLevelChecker do
let(:override_visibility) { 'publik' }
it 'returns false with nil for visibility level' do
- result = subject.level_restricted?
-
expect(result.restricted?).to eq(false)
expect(result.visibility_level).to be_nil
end
@@ -72,8 +79,6 @@ RSpec.describe Gitlab::VisibilityLevelChecker do
let(:override_params) { {} }
it 'returns false with nil for visibility level' do
- result = subject.level_restricted?
-
expect(result.restricted?).to eq(false)
expect(result.visibility_level).to be_nil
end
diff --git a/spec/lib/gitlab/webpack/manifest_spec.rb b/spec/lib/gitlab/webpack/manifest_spec.rb
new file mode 100644
index 00000000000..1427bdd7d4f
--- /dev/null
+++ b/spec/lib/gitlab/webpack/manifest_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'json'
+
+RSpec.describe Gitlab::Webpack::Manifest do
+ let(:manifest) do
+ <<-EOF
+ {
+ "errors": [],
+ "assetsByChunkName": {
+ "entry1": [ "entry1.js", "entry1-a.js" ],
+ "entry2": "entry2.js"
+ }
+ }
+ EOF
+ end
+
+ around do |example|
+ Gitlab::Webpack::Manifest.clear_manifest!
+
+ example.run
+
+ Gitlab::Webpack::Manifest.clear_manifest!
+ end
+
+ shared_examples_for "a valid manifest" do
+ it "returns single entry asset paths from the manifest" do
+ expect(Gitlab::Webpack::Manifest.asset_paths("entry2")).to eq(["/public_path/entry2.js"])
+ end
+
+ it "returns multiple entry asset paths from the manifest" do
+ expect(Gitlab::Webpack::Manifest.asset_paths("entry1")).to eq(["/public_path/entry1.js", "/public_path/entry1-a.js"])
+ end
+
+ it "errors on a missing entry point" do
+ expect { Gitlab::Webpack::Manifest.asset_paths("herp") }.to raise_error(Gitlab::Webpack::Manifest::AssetMissingError)
+ end
+ end
+
+ before do
+ # Test that config variables work while we're here
+ allow(Gitlab.config.webpack.dev_server).to receive_messages(host: 'hostname', port: 2000, https: false)
+ allow(Gitlab.config.webpack).to receive(:manifest_filename).and_return('my_manifest.json')
+ allow(Gitlab.config.webpack).to receive(:public_path).and_return('public_path')
+ allow(Gitlab.config.webpack).to receive(:output_dir).and_return('manifest_output')
+ end
+
+ context "with dev server enabled" do
+ before do
+ allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(true)
+
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: manifest, status: 200)
+ end
+
+ describe ".asset_paths" do
+ it_behaves_like "a valid manifest"
+
+ it "errors if we can't find the manifest" do
+ allow(Gitlab.config.webpack).to receive(:manifest_filename).and_return('broken.json')
+ stub_request(:get, "http://hostname:2000/public_path/broken.json").to_raise(SocketError)
+
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::ManifestLoadError)
+ end
+
+ describe "webpack errors" do
+ context "when webpack has 'Module build failed' errors in its manifest" do
+ it "errors" do
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" => [
+ "somethingModule build failed something",
+ "I am an error"
+ ]).to_json
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
+
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::WebpackError)
+ end
+ end
+
+ context "when webpack does not have 'Module build failed' errors in its manifest" do
+ it "does not error" do
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" => ["something went wrong"]).to_json
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
+
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.not_to raise_error
+ end
+ end
+
+ it "does not error if errors is present but empty" do
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" => []).to_json
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context "with dev server disabled" do
+ before do
+ allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(false)
+ allow(File).to receive(:read).with(::Rails.root.join("manifest_output/my_manifest.json")).and_return(manifest)
+ end
+
+ describe ".asset_paths" do
+ it_behaves_like "a valid manifest"
+
+ it "errors if we can't find the manifest" do
+ allow(Gitlab.config.webpack).to receive(:manifest_filename).and_return('broken.json')
+ allow(File).to receive(:read).with(::Rails.root.join("manifest_output/broken.json")).and_raise(Errno::ENOENT)
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::ManifestLoadError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index e9733851590..9662ad13631 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -54,12 +54,44 @@ RSpec.describe Gitlab::Workhorse do
commit_id: metadata['CommitId'],
prefix: metadata['ArchivePrefix'],
format: Gitaly::GetArchiveRequest::Format::ZIP,
- path: path
+ path: path,
+ include_lfs_blobs: true
).to_proto
)
}.deep_stringify_keys)
end
+ context 'when include_lfs_blobs_in_archive is disabled' do
+ before do
+ stub_feature_flags(include_lfs_blobs_in_archive: false)
+ end
+
+ it 'sets include_lfs_blobs to false' do
+ key, command, params = decode_workhorse_header(subject)
+
+ expect(key).to eq('Gitlab-Workhorse-Send-Data')
+ expect(command).to eq('git-archive')
+ expect(params).to eq({
+ 'GitalyServer' => {
+ features: { 'gitaly-feature-foobar' => 'true' },
+ address: Gitlab::GitalyClient.address(project.repository_storage),
+ token: Gitlab::GitalyClient.token(project.repository_storage)
+ },
+ 'ArchivePath' => metadata['ArchivePath'],
+ 'GetArchiveRequest' => Base64.encode64(
+ Gitaly::GetArchiveRequest.new(
+ repository: repository.gitaly_repository,
+ commit_id: metadata['CommitId'],
+ prefix: metadata['ArchivePrefix'],
+ format: Gitaly::GetArchiveRequest::Format::ZIP,
+ path: path,
+ include_lfs_blobs: false
+ ).to_proto
+ )
+ }.deep_stringify_keys)
+ end
+ end
+
context 'when archive caching is disabled' do
let(:cache_disabled) { true }
diff --git a/spec/lib/gitlab_danger_spec.rb b/spec/lib/gitlab_danger_spec.rb
index b534823a888..e332647cf8a 100644
--- a/spec/lib/gitlab_danger_spec.rb
+++ b/spec/lib/gitlab_danger_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabDanger do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages, telemetry, utility_css, pajamas')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages, product_analytics, utility_css, pajamas')
end
end
diff --git a/spec/lib/google_api/auth_spec.rb b/spec/lib/google_api/auth_spec.rb
index eeb99bfbb6c..92cb9e494ac 100644
--- a/spec/lib/google_api/auth_spec.rb
+++ b/spec/lib/google_api/auth_spec.rb
@@ -12,12 +12,12 @@ RSpec.describe GoogleApi::Auth do
end
describe '#authorize_url' do
- subject { client.authorize_url }
+ subject { Addressable::URI.parse(client.authorize_url) }
it 'returns authorize_url' do
- is_expected.to start_with('https://accounts.google.com/o/oauth2')
- is_expected.to include(URI.encode(redirect_uri, URI::PATTERN::RESERVED))
- is_expected.to include(URI.encode(redirect_to, URI::PATTERN::RESERVED))
+ expect(subject.to_s).to start_with('https://accounts.google.com/o/oauth2')
+ expect(subject.query_values['state']).to eq(redirect_to)
+ expect(subject.query_values['redirect_uri']).to eq(redirect_uri)
end
end
diff --git a/spec/lib/grafana/time_window_spec.rb b/spec/lib/grafana/time_window_spec.rb
index 9ee65c6cf20..0657bed7b28 100644
--- a/spec/lib/grafana/time_window_spec.rb
+++ b/spec/lib/grafana/time_window_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Grafana::TimeWindow do
let(:to) { '1552828200000' }
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
describe '#formatted' do
@@ -37,7 +37,7 @@ RSpec.describe Grafana::RangeWithDefaults do
let(:to) { Grafana::Timestamp.from_ms_since_epoch('1552828200000') }
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
describe '#to_hash' do
@@ -82,7 +82,7 @@ RSpec.describe Grafana::Timestamp do
let(:timestamp) { Time.at(1552799400) }
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
describe '#formatted' do
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index a920f598c24..fa0cd214c7e 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -24,18 +24,6 @@ RSpec.describe 'Marginalia spec' do
end
end
- def stub_feature(value)
- allow(Gitlab::Marginalia).to receive(:cached_feature_enabled?).and_return(value)
- end
-
- def make_request(correlation_id)
- request_env = Rack::MockRequest.env_for('/')
-
- ::Labkit::Correlation::CorrelationId.use_id(correlation_id) do
- MarginaliaTestController.action(:first_user).call(request_env)
- end
- end
-
describe 'For rails web requests' do
let(:correlation_id) { SecureRandom.uuid }
let(:recorded) { ActiveRecord::QueryRecorder.new { make_request(correlation_id) } }
@@ -149,4 +137,17 @@ RSpec.describe 'Marginalia spec' do
end
end
end
+
+ def stub_feature(value)
+ stub_feature_flags(marginalia: value)
+ Gitlab::Marginalia.set_enabled_from_feature_flag
+ end
+
+ def make_request(correlation_id)
+ request_env = Rack::MockRequest.env_for('/')
+
+ ::Labkit::Correlation::CorrelationId.use_id(correlation_id) do
+ MarginaliaTestController.action(:first_user).call(request_env)
+ end
+ end
end
diff --git a/spec/lib/pager_duty/webhook_payload_parser_spec.rb b/spec/lib/pager_duty/webhook_payload_parser_spec.rb
index 0010165318d..54c61b9121c 100644
--- a/spec/lib/pager_duty/webhook_payload_parser_spec.rb
+++ b/spec/lib/pager_duty/webhook_payload_parser_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'json_schemer'
RSpec.describe PagerDuty::WebhookPayloadParser do
describe '.call' do
@@ -8,36 +9,36 @@ RSpec.describe PagerDuty::WebhookPayloadParser do
File.read(File.join(File.dirname(__FILE__), '../../fixtures/pager_duty/webhook_incident_trigger.json'))
end
+ let(:triggered_event) do
+ {
+ 'event' => 'incident.trigger',
+ 'incident' => {
+ 'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
+ 'incident_number' => 33,
+ 'title' => 'My new incident',
+ 'status' => 'triggered',
+ 'created_at' => '2017-09-26T15:14:36Z',
+ 'urgency' => 'high',
+ 'incident_key' => nil,
+ 'assignees' => [{
+ 'summary' => 'Laura Haley',
+ 'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
+ }],
+ 'impacted_services' => [{
+ 'summary' => 'Production XDB Cluster',
+ 'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
+ }]
+ }
+ }
+ end
+
subject(:parse) { described_class.call(payload) }
context 'when payload is a correct PagerDuty payload' do
let(:payload) { Gitlab::Json.parse(fixture_file) }
it 'returns parsed payload' do
- is_expected.to eq(
- [
- {
- 'event' => 'incident.trigger',
- 'incident' => {
- 'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
- 'incident_number' => 33,
- 'title' => 'My new incident',
- 'status' => 'triggered',
- 'created_at' => '2017-09-26T15:14:36Z',
- 'urgency' => 'high',
- 'incident_key' => nil,
- 'assignees' => [{
- 'summary' => 'Laura Haley',
- 'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
- }],
- 'impacted_services' => [{
- 'summary' => 'Production XDB Cluster',
- 'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
- }]
- }
- }
- ]
- )
+ is_expected.to eq([triggered_event])
end
context 'when assignments summary and html_url are blank' do
@@ -69,11 +70,42 @@ RSpec.describe PagerDuty::WebhookPayloadParser do
end
end
- context 'when payload has no incidents' do
+ context 'when payload schema is invalid' do
let(:payload) { { 'messages' => [{ 'event' => 'incident.trigger' }] } }
it 'returns payload with blank incidents' do
- is_expected.to eq([{ 'event' => 'incident.trigger', 'incident' => {} }])
+ is_expected.to eq([])
+ end
+ end
+
+ context 'when payload consists of two messages' do
+ context 'when one of the messages has no incident data' do
+ let(:payload) do
+ valid_payload = Gitlab::Json.parse(fixture_file)
+ event = { 'event' => 'incident.trigger' }
+ valid_payload['messages'] = valid_payload['messages'].append(event)
+ valid_payload
+ end
+
+ it 'returns parsed payload with valid events only' do
+ is_expected.to eq([triggered_event])
+ end
+ end
+
+ context 'when one of the messages has unknown event' do
+ let(:payload) do
+ valid_payload = Gitlab::Json.parse(fixture_file)
+ event = { 'event' => 'incident.unknown', 'incident' => valid_payload['messages'].first['incident'] }
+ valid_payload['messages'] = valid_payload['messages'].append(event)
+ valid_payload
+ end
+
+ it 'returns parsed payload' do
+ unknown_event = triggered_event.dup
+ unknown_event['event'] = 'incident.unknown'
+
+ is_expected.to contain_exactly(triggered_event, unknown_event)
+ end
end
end
end
diff --git a/spec/lib/safe_zip/extract_spec.rb b/spec/lib/safe_zip/extract_spec.rb
index 30b7e1cdd2c..443430b267d 100644
--- a/spec/lib/safe_zip/extract_spec.rb
+++ b/spec/lib/safe_zip/extract_spec.rb
@@ -15,11 +15,7 @@ RSpec.describe SafeZip::Extract do
describe '#extract' do
subject { object.extract(directories: directories, to: target_path) }
- shared_examples 'extracts archive' do |param|
- before do
- stub_feature_flags(safezip_use_rubyzip: param)
- end
-
+ shared_examples 'extracts archive' do
it 'does extract archive' do
subject
@@ -28,11 +24,7 @@ RSpec.describe SafeZip::Extract do
end
end
- shared_examples 'fails to extract archive' do |param|
- before do
- stub_feature_flags(safezip_use_rubyzip: param)
- end
-
+ shared_examples 'fails to extract archive' do
it 'does not extract archive' do
expect { subject }.to raise_error(SafeZip::Extract::Error)
end
@@ -42,13 +34,7 @@ RSpec.describe SafeZip::Extract do
context "when using #{name} archive" do
let(:archive_name) { name }
- context 'for RubyZip' do
- it_behaves_like 'extracts archive', true
- end
-
- context 'for UnZip' do
- it_behaves_like 'extracts archive', false
- end
+ it_behaves_like 'extracts archive'
end
end
@@ -56,13 +42,7 @@ RSpec.describe SafeZip::Extract do
context "when using #{name} archive" do
let(:archive_name) { name }
- context 'for RubyZip' do
- it_behaves_like 'fails to extract archive', true
- end
-
- context 'for UnZip (UNSAFE)' do
- it_behaves_like 'extracts archive', false
- end
+ it_behaves_like 'fails to extract archive'
end
end
@@ -70,13 +50,7 @@ RSpec.describe SafeZip::Extract do
let(:archive_name) { 'valid-simple.zip' }
let(:directories) { %w(non/existing) }
- context 'for RubyZip' do
- it_behaves_like 'fails to extract archive', true
- end
-
- context 'for UnZip' do
- it_behaves_like 'fails to extract archive', false
- end
+ it_behaves_like 'fails to extract archive'
end
end
end
diff --git a/spec/mailers/abuse_report_mailer_spec.rb b/spec/mailers/abuse_report_mailer_spec.rb
index 4eb616722ac..061f972fd35 100644
--- a/spec/mailers/abuse_report_mailer_spec.rb
+++ b/spec/mailers/abuse_report_mailer_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe AbuseReportMailer do
describe '.notify' do
before do
- stub_application_setting(admin_notification_email: 'admin@example.com')
+ stub_application_setting(abuse_notification_email: 'admin@example.com')
end
let(:report) { create(:abuse_report) }
@@ -17,8 +17,8 @@ RSpec.describe AbuseReportMailer do
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
- context 'with admin_notification_email set' do
- it 'sends to the admin_notification_email' do
+ context 'with abuse_notification_email set' do
+ it 'sends to the abuse_notification_email' do
is_expected.to deliver_to 'admin@example.com'
end
@@ -27,9 +27,9 @@ RSpec.describe AbuseReportMailer do
end
end
- context 'with no admin_notification_email set' do
+ context 'with no abuse_notification_email set' do
it 'returns early' do
- stub_application_setting(admin_notification_email: nil)
+ stub_application_setting(abuse_notification_email: nil)
expect { described_class.notify(spy).deliver_now }
.not_to change { ActionMailer::Base.deliveries.count }
diff --git a/spec/mailers/emails/merge_requests_spec.rb b/spec/mailers/emails/merge_requests_spec.rb
index 477fb16400a..9235a946394 100644
--- a/spec/mailers/emails/merge_requests_spec.rb
+++ b/spec/mailers/emails/merge_requests_spec.rb
@@ -33,4 +33,37 @@ RSpec.describe Emails::MergeRequests do
expect(subject).to have_content current_user.name
end
end
+
+ describe '#merge_requests_csv_email' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:merge_requests) { create_list(:merge_request, 10) }
+ let(:export_status) do
+ {
+ rows_expected: 10,
+ rows_written: 10,
+ truncated: false
+ }
+ end
+
+ let(:csv_data) { MergeRequests::ExportCsvService.new(MergeRequest.all, project).csv_data }
+
+ subject { Notify.merge_requests_csv_email(user, project, csv_data, export_status) }
+
+ it { expect(subject.subject).to eq("#{project.name} | Exported merge requests") }
+ it { expect(subject.to).to contain_exactly(user.notification_email_for(project.group)) }
+ it { expect(subject).to have_content('Your CSV export of 10 merge requests from project')}
+
+ context 'when truncated' do
+ let(:export_status) do
+ {
+ rows_expected: 10,
+ rows_written: 10,
+ truncated: true
+ }
+ end
+
+ it { expect(subject).to have_content('This attachment has been truncated to avoid exceeding the maximum allowed attachment size of 15MB.') }
+ end
+ end
end
diff --git a/spec/mailers/emails/projects_spec.rb b/spec/mailers/emails/projects_spec.rb
index 599f62a8113..aa5947bf68e 100644
--- a/spec/mailers/emails/projects_spec.rb
+++ b/spec/mailers/emails/projects_spec.rb
@@ -30,107 +30,118 @@ RSpec.describe Emails::Projects do
let_it_be(:user) { create(:user) }
describe '#prometheus_alert_fired_email' do
+ let(:default_title) { Gitlab::AlertManagement::Payload::Generic::DEFAULT_TITLE }
+ let(:payload) { { 'startsAt' => Time.now.rfc3339 } }
+ let(:alert_attributes) { build(:alert_management_alert, :from_payload, payload: payload, project: project).attributes }
+
subject do
- Notify.prometheus_alert_fired_email(project.id, user.id, alert_params)
+ Notify.prometheus_alert_fired_email(project.id, user.id, alert_attributes)
end
- let(:alert_params) do
- { 'startsAt' => Time.now.rfc3339 }
+ context 'missing required attributes' do
+ let(:alert_attributes) { build(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project).attributes }
+
+ it_behaves_like 'no email'
end
- context 'with a gitlab alert' do
- before do
- alert_params['labels'] = { 'gitlab_alert_id' => alert.prometheus_metric_id.to_s }
- end
+ context 'with minimum required attributes' do
+ let(:payload) { {} }
- let(:title) do
- "#{alert.title} #{alert.computed_operator} #{alert.threshold}"
- end
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
- let(:metrics_url) do
- metrics_project_environment_url(project, environment)
+ it 'has expected subject' do
+ is_expected.to have_subject("#{project.name} | Alert: #{default_title}")
end
- let(:environment) { alert.environment }
+ it 'has expected content' do
+ is_expected.to have_body_text('An alert has been triggered')
+ is_expected.to have_body_text(project.full_path)
+ is_expected.not_to have_body_text('Description:')
+ is_expected.not_to have_body_text('Environment:')
+ is_expected.not_to have_body_text('Metric:')
+ end
+ end
- let!(:alert) { create(:prometheus_alert, project: project) }
+ context 'with description' do
+ let(:payload) { { 'description' => 'alert description' } }
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
it 'has expected subject' do
- is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{title} for 5 minutes")
+ is_expected.to have_subject("#{project.name} | Alert: #{default_title}")
end
it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path)
- is_expected.to have_body_text('Environment:')
- is_expected.to have_body_text(environment.name)
- is_expected.to have_body_text('Metric:')
- is_expected.to have_body_text(alert.full_query)
- is_expected.to have_body_text(metrics_url)
+ is_expected.to have_body_text('Description:')
+ is_expected.to have_body_text('alert description')
+ is_expected.not_to have_body_text('Environment:')
+ is_expected.not_to have_body_text('Metric:')
end
-
- it_behaves_like 'shows the incident issues url'
end
- context 'with no payload' do
- let(:alert_params) { {} }
+ context 'with environment' do
+ let_it_be(:environment) { create(:environment, project: project) }
+ let(:payload) { { 'gitlab_environment_name' => environment.name } }
+ let(:metrics_url) { metrics_project_environment_url(project, environment) }
- it_behaves_like 'no email'
- end
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
- context 'with an unknown alert' do
- before do
- alert_params['labels'] = { 'gitlab_alert_id' => 'unknown' }
+ it 'has expected subject' do
+ is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{default_title}")
end
- it_behaves_like 'no email'
+ it 'has expected content' do
+ is_expected.to have_body_text('An alert has been triggered')
+ is_expected.to have_body_text(project.full_path)
+ is_expected.to have_body_text('Environment:')
+ is_expected.to have_body_text(environment.name)
+ is_expected.not_to have_body_text('Description:')
+ is_expected.not_to have_body_text('Metric:')
+ end
end
- context 'with an external alert' do
- let(:title) { 'alert title' }
+ context 'with gitlab alerting rule' do
+ let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
+ let_it_be(:environment) { prometheus_alert.environment }
- let(:metrics_url) do
- metrics_project_environments_url(project)
- end
+ let(:alert_attributes) { build(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project).attributes }
+ let(:title) { "#{prometheus_alert.title} #{prometheus_alert.computed_operator} #{prometheus_alert.threshold}" }
+ let(:metrics_url) { metrics_project_environment_url(project, environment) }
before do
- alert_params['annotations'] = { 'title' => title }
- alert_params['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29&g0.tab=1'
+ payload['labels'] = {
+ 'gitlab_alert_id' => prometheus_alert.prometheus_metric_id,
+ 'alertname' => prometheus_alert.title
+ }
end
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'shows the incident issues url'
it 'has expected subject' do
- is_expected.to have_subject("#{project.name} | Alert: #{title}")
+ is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{title} for 5 minutes")
end
it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path)
+ is_expected.to have_body_text('Environment:')
+ is_expected.to have_body_text(environment.name)
+ is_expected.to have_body_text('Metric:')
+ is_expected.to have_body_text(prometheus_alert.full_query)
+ is_expected.to have_body_text(metrics_url)
is_expected.not_to have_body_text('Description:')
- is_expected.not_to have_body_text('Environment:')
end
-
- context 'with annotated description' do
- let(:description) { 'description' }
-
- before do
- alert_params['annotations']['description'] = description
- end
-
- it 'shows the description' do
- is_expected.to have_body_text('Description:')
- is_expected.to have_body_text(description)
- end
- end
-
- it_behaves_like 'shows the incident issues url'
end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index b9f95a9eb00..8604939ead9 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1508,12 +1508,44 @@ RSpec.describe Notify do
)
end
- describe 'group invitation' do
+ describe 'invitations' do
let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
let(:group_member) { invite_to_group(group, inviter: inviter) }
let(:inviter) { owner }
- subject { described_class.member_invited_email('group', group_member.id, group_member.invite_token) }
+ subject { described_class.member_invited_email('Group', group_member.id, group_member.invite_token) }
+
+ shared_examples "tracks the 'sent' event for the invitation reminders experiment" do
+ before do
+ stub_experiment(invitation_reminders: true)
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).with(:invitation_reminders, group_member.invite_email).and_return(experimental_group)
+ end
+
+ it "tracks the 'sent' event", :snowplow do
+ subject.deliver_now
+
+ expect_snowplow_event(
+ category: 'Growth::Acquisition::Experiment::InvitationReminders',
+ label: Digest::MD5.hexdigest(group_member.to_global_id.to_s),
+ property: experimental_group ? 'experimental_group' : 'control_group',
+ action: 'sent'
+ )
+ end
+ end
+
+ describe 'tracking for the invitation reminders experiment' do
+ context 'when invite email is in the experimental group' do
+ let(:experimental_group) { true }
+
+ it_behaves_like "tracks the 'sent' event for the invitation reminders experiment"
+ end
+
+ context 'when invite email is in the control group' do
+ let(:experimental_group) { false }
+
+ it_behaves_like "tracks the 'sent' event for the invitation reminders experiment"
+ end
+ end
context 'when invite_email_experiment is disabled' do
before do
@@ -1608,6 +1640,88 @@ RSpec.describe Notify do
end
end
+ describe 'group invitation reminders' do
+ let_it_be(:inviter) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
+
+ let(:group_member) { invite_to_group(group, inviter: inviter) }
+
+ subject { described_class.member_invited_reminder_email('Group', group_member.id, group_member.invite_token, reminder_index) }
+
+ describe 'not sending a reminder' do
+ let(:reminder_index) { 0 }
+
+ context 'member does not exist' do
+ let(:group_member) { double(id: nil, invite_token: nil) }
+
+ it_behaves_like 'no email is sent'
+ end
+
+ context 'member is not created by a user' do
+ before do
+ group_member.update(created_by: nil)
+ end
+
+ it_behaves_like 'no email is sent'
+ end
+
+ context 'member is a known user' do
+ before do
+ group_member.update(user: create(:user))
+ end
+
+ it_behaves_like 'no email is sent'
+ end
+ end
+
+ describe 'the first reminder' do
+ let(:reminder_index) { 0 }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'contains all the useful information' do
+ is_expected.to have_subject "#{inviter.name}'s invitation to GitLab is pending"
+ is_expected.to have_body_text group.human_name
+ is_expected.to have_body_text group_member.human_access.downcase
+ is_expected.to have_body_text invite_url(group_member.invite_token)
+ is_expected.to have_body_text decline_invite_url(group_member.invite_token)
+ end
+ end
+
+ describe 'the second reminder' do
+ let(:reminder_index) { 1 }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'contains all the useful information' do
+ is_expected.to have_subject "#{inviter.name} is waiting for you to join GitLab"
+ is_expected.to have_body_text group.human_name
+ is_expected.to have_body_text group_member.human_access.downcase
+ is_expected.to have_body_text invite_url(group_member.invite_token)
+ is_expected.to have_body_text decline_invite_url(group_member.invite_token)
+ end
+ end
+
+ describe 'the third reminder' do
+ let(:reminder_index) { 2 }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'contains all the useful information' do
+ is_expected.to have_subject "#{inviter.name} is still waiting for you to join GitLab"
+ is_expected.to have_body_text group.human_name
+ is_expected.to have_body_text group_member.human_access.downcase
+ is_expected.to have_body_text invite_url(group_member.invite_token)
+ is_expected.to have_body_text decline_invite_url(group_member.invite_token)
+ end
+ end
+ end
+
describe 'group invitation accepted' do
let(:invited_user) { create(:user, name: 'invited user') }
let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
diff --git a/spec/migrations/20200929052138_create_initial_versions_for_pre_versioning_terraform_states_spec.rb b/spec/migrations/20200929052138_create_initial_versions_for_pre_versioning_terraform_states_spec.rb
new file mode 100644
index 00000000000..1a618712b32
--- /dev/null
+++ b/spec/migrations/20200929052138_create_initial_versions_for_pre_versioning_terraform_states_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200929052138_create_initial_versions_for_pre_versioning_terraform_states.rb')
+
+RSpec.describe CreateInitialVersionsForPreVersioningTerraformStates do
+ let(:namespace) { table(:namespaces).create!(name: 'terraform', path: 'terraform') }
+ let(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
+ let(:terraform_state_versions) { table(:terraform_state_versions) }
+
+ def create_state!(project, versioning_enabled:)
+ table(:terraform_states).create!(
+ project_id: project.id,
+ uuid: 'uuid',
+ file_store: 2,
+ file: 'state.tfstate',
+ versioning_enabled: versioning_enabled
+ )
+ end
+
+ describe '#up' do
+ context 'for a state that is already versioned' do
+ let!(:terraform_state) { create_state!(project, versioning_enabled: true) }
+
+ it 'does not insert a version record' do
+ expect { migrate! }.not_to change { terraform_state_versions.count }
+ end
+ end
+
+ context 'for a state that is not yet versioned' do
+ let!(:terraform_state) { create_state!(project, versioning_enabled: false) }
+
+ it 'creates a version using the current state data' do
+ expect { migrate! }.to change { terraform_state_versions.count }.by(1)
+
+ migrated_version = terraform_state_versions.last
+ expect(migrated_version.terraform_state_id).to eq(terraform_state.id)
+ expect(migrated_version.version).to be_zero
+ expect(migrated_version.file_store).to eq(terraform_state.file_store)
+ expect(migrated_version.file).to eq(terraform_state.file)
+ expect(migrated_version.created_at).to be_present
+ expect(migrated_version.updated_at).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20201014205300_drop_backfill_jira_tracker_deployment_type_jobs_spec.rb b/spec/migrations/20201014205300_drop_backfill_jira_tracker_deployment_type_jobs_spec.rb
new file mode 100644
index 00000000000..134bea6b666
--- /dev/null
+++ b/spec/migrations/20201014205300_drop_backfill_jira_tracker_deployment_type_jobs_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20201014205300_drop_backfill_jira_tracker_deployment_type_jobs.rb')
+
+RSpec.describe DropBackfillJiraTrackerDeploymentTypeJobs, :sidekiq, :redis, schema: 2020_10_14_205300 do
+ subject(:migration) { described_class.new }
+
+ describe '#up' do
+ let(:retry_set) { Sidekiq::RetrySet.new }
+ let(:scheduled_set) { Sidekiq::ScheduledSet.new }
+
+ context 'there are only affected jobs on the queue' do
+ let(:payload) { { 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1] } }
+ let(:queue_payload) { payload.merge('queue' => described_class::QUEUE) }
+
+ it 'removes enqueued BackfillJiraTrackerDeploymentType background jobs' do
+ Sidekiq::Testing.disable! do # https://github.com/mperham/sidekiq/wiki/testing#api Sidekiq's API does not have a testing mode
+ retry_set.schedule(1.hour.from_now, payload)
+ scheduled_set.schedule(1.hour.from_now, payload)
+ Sidekiq::Client.push(queue_payload)
+
+ expect { migration.up }.to change { Sidekiq::Queue.new(described_class::QUEUE).size }.from(1).to(0)
+ expect(retry_set.size).to eq(0)
+ expect(scheduled_set.size).to eq(0)
+ end
+ end
+ end
+
+ context 'there are not any affected jobs on the queue' do
+ let(:payload) { { 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1] } }
+ let(:queue_payload) { payload.merge('queue' => described_class::QUEUE) }
+
+ it 'skips other enqueued jobs' do
+ Sidekiq::Testing.disable! do
+ retry_set.schedule(1.hour.from_now, payload)
+ scheduled_set.schedule(1.hour.from_now, payload)
+ Sidekiq::Client.push(queue_payload)
+
+ expect { migration.up }.not_to change { Sidekiq::Queue.new(described_class::QUEUE).size }
+ expect(retry_set.size).to eq(1)
+ expect(scheduled_set.size).to eq(1)
+ end
+ end
+ end
+
+ context 'other queues' do
+ it 'does not modify them' do
+ Sidekiq::Testing.disable! do
+ Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => ['SomeOtherClass', 1])
+ Sidekiq::Client.push('queue' => 'other', 'class' => ::BackgroundMigrationWorker, 'args' => [described_class::DROPPED_JOB_CLASS, 1])
+
+ expect { migration.up }.not_to change { Sidekiq::Queue.new('other').size }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb b/spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb
new file mode 100644
index 00000000000..018d48bea66
--- /dev/null
+++ b/spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200908064229_add_partial_index_to_ci_builds_table_on_user_id_name.rb')
+
+RSpec.describe AddPartialIndexToCiBuildsTableOnUserIdName do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ it 'creates temporary partial index on type' do
+ expect { migration.up }.to change { migration.index_exists?(:ci_builds, [:user_id, :name], name: described_class::INDEX_NAME) }.from(false).to(true)
+ end
+ end
+
+ describe '#down' do
+ it 'removes temporary partial index on type' do
+ migration.up
+
+ expect { migration.down }.to change { migration.index_exists?(:ci_builds, [:user_id, :name], name: described_class::INDEX_NAME) }.from(true).to(false)
+ end
+ end
+end
diff --git a/spec/migrations/backfill_status_page_published_incidents_spec.rb b/spec/migrations/backfill_status_page_published_incidents_spec.rb
index 2b1ab891038..674484cdf0a 100644
--- a/spec/migrations/backfill_status_page_published_incidents_spec.rb
+++ b/spec/migrations/backfill_status_page_published_incidents_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe BackfillStatusPagePublishedIncidents, :migration do
end
it 'creates a StatusPage::PublishedIncident record for each published issue' do
- Timecop.freeze(current_time) do
+ travel_to(current_time) do
expect(incidents.all).to be_empty
migrate!
diff --git a/spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb b/spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb
new file mode 100644
index 00000000000..f9285c857de
--- /dev/null
+++ b/spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+# In order to test the CleanupGroupImportStatesWithNullUserId migration, we need
+# to first create GroupImportState with NULL user_id
+# and then run the migration to check that user_id was populated or record removed
+#
+# The problem is that the CleanupGroupImportStatesWithNullUserId migration comes
+# after the NOT NULL constraint has been added with a previous migration (AddNotNullConstraintToUserOnGroupImportStates)
+# That means that while testing the current class we can not insert GroupImportState records with an
+# invalid user_id as constraint is blocking it from doing so
+#
+# To solve this problem, use SchemaVersionFinder to set schema one version prior to AddNotNullConstraintToUserOnGroupImportStates
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200907092715_add_not_null_constraint_to_user_on_group_import_states.rb')
+require Rails.root.join('db', 'post_migrate', '20200909161624_cleanup_group_import_states_with_null_user_id.rb')
+
+RSpec.describe CleanupGroupImportStatesWithNullUserId, :migration,
+ schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraintToUserOnGroupImportStates) do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:users_table) { table(:users) }
+ let(:group_import_states_table) { table(:group_import_states) }
+ let(:members_table) { table(:members) }
+
+ describe 'Group import states clean up' do
+ context 'when user_id is present' do
+ it 'does not update group_import_state record' do
+ user_1 = users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 1)
+ group_1 = namespaces_table.create!(name: 'group_1', path: 'group_1', type: 'Group')
+ create_member(user_id: user_1.id, type: 'GroupMember', source_type: 'Namespace', source_id: group_1.id, access_level: described_class::Group::OWNER)
+ group_import_state_1 = group_import_states_table.create!(group_id: group_1.id, user_id: user_1.id, status: 0)
+
+ expect(group_import_state_1.user_id).to eq(user_1.id)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state_1.reload.user_id).to eq(user_1.id)
+ end
+ end
+
+ context 'when user_id is missing' do
+ it 'updates user_id with group default owner id' do
+ user_2 = users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 1)
+ group_2 = namespaces_table.create!(name: 'group_2', path: 'group_2', type: 'Group')
+ create_member(user_id: user_2.id, type: 'GroupMember', source_type: 'Namespace', source_id: group_2.id, access_level: described_class::Group::OWNER)
+ group_import_state_2 = group_import_states_table.create!(group_id: group_2.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state_2.reload.user_id).to eq(user_2.id)
+ end
+ end
+
+ context 'when group does not contain any owners' do
+ it 'removes group_import_state record' do
+ group_3 = namespaces_table.create!(name: 'group_3', path: 'group_3', type: 'Group')
+ group_import_state_3 = group_import_states_table.create!(group_id: group_3.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect { group_import_state_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when group has parent' do
+ it 'updates user_id with parent group default owner id' do
+ user = users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 1)
+ group_1 = namespaces_table.create!(name: 'group_1', path: 'group_1', type: 'Group')
+ create_member(user_id: user.id, type: 'GroupMember', source_type: 'Namespace', source_id: group_1.id, access_level: described_class::Group::OWNER)
+ group_2 = namespaces_table.create!(name: 'group_2', path: 'group_2', type: 'Group', parent_id: group_1.id)
+ group_import_state = group_import_states_table.create!(group_id: group_2.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state.reload.user_id).to eq(user.id)
+ end
+ end
+
+ context 'when group has owner_id' do
+ it 'updates user_id with owner_id' do
+ user = users_table.create!(name: 'user', email: 'user@example.com', projects_limit: 1)
+ group = namespaces_table.create!(name: 'group', path: 'group', type: 'Group', owner_id: user.id)
+ group_import_state = group_import_states_table.create!(group_id: group.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state.reload.user_id).to eq(user.id)
+ end
+ end
+ end
+
+ def create_member(options)
+ members_table.create!(
+ {
+ notification_level: 0,
+ ldap: false,
+ override: false
+ }.merge(options)
+ )
+ end
+end
diff --git a/spec/migrations/ensure_filled_file_store_on_package_files_spec.rb b/spec/migrations/ensure_filled_file_store_on_package_files_spec.rb
new file mode 100644
index 00000000000..8a0f51ab27e
--- /dev/null
+++ b/spec/migrations/ensure_filled_file_store_on_package_files_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200915185707_ensure_filled_file_store_on_package_files.rb')
+
+RSpec.describe EnsureFilledFileStoreOnPackageFiles, schema: 20200910175553 do
+ let!(:packages_package_files) { table(:packages_package_files) }
+ let!(:packages_packages) { table(:packages_packages) }
+ let!(:namespaces) { table(:namespaces) }
+ let!(:projects) { table(:projects) }
+ let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:package) { packages_packages.create!(project_id: project.id, name: 'bar', package_type: 1) }
+
+ before do
+ constraint_name = 'check_4c5e6bb0b3'
+
+ # In order to insert a row with a NULL to fill.
+ ActiveRecord::Base.connection.execute "ALTER TABLE packages_package_files DROP CONSTRAINT #{constraint_name}"
+
+ @file_store_1 = packages_package_files.create!(file_store: 1, file_name: 'foo_1', file: 'foo_1', package_id: package.id)
+ @file_store_2 = packages_package_files.create!(file_store: 2, file_name: 'foo_2', file: 'foo_2', package_id: package.id)
+ @file_store_nil = packages_package_files.create!(file_store: nil, file_name: 'foo_nil', file: 'foo_nil', package_id: package.id)
+
+ # revert DB structure
+ ActiveRecord::Base.connection.execute "ALTER TABLE packages_package_files ADD CONSTRAINT #{constraint_name} CHECK ((file_store IS NOT NULL)) NOT VALID"
+ end
+
+ it 'correctly migrates nil file_store to 1' do
+ migrate!
+
+ @file_store_1.reload
+ @file_store_2.reload
+ @file_store_nil.reload
+
+ expect(@file_store_1.file_store).to eq(1) # unchanged
+ expect(@file_store_2.file_store).to eq(2) # unchanged
+ expect(@file_store_nil.file_store).to eq(1) # nil => 1
+ end
+end
diff --git a/spec/migrations/migrate_compliance_framework_enum_to_database_framework_record_spec.rb b/spec/migrations/migrate_compliance_framework_enum_to_database_framework_record_spec.rb
new file mode 100644
index 00000000000..cd2ec81abb7
--- /dev/null
+++ b/spec/migrations/migrate_compliance_framework_enum_to_database_framework_record_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20201005094331_migrate_compliance_framework_enum_to_database_framework_record.rb')
+
+RSpec.describe MigrateComplianceFrameworkEnumToDatabaseFrameworkRecord, schema: 20201005092753 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:project_compliance_framework_settings) { table(:project_compliance_framework_settings) }
+ let(:compliance_management_frameworks) { table(:compliance_management_frameworks) }
+
+ let(:gdpr_framework) { 1 }
+ let(:sox_framework) { 5 }
+
+ let!(:root_group) { namespaces.create!(type: 'Group', name: 'a', path: 'a') }
+ let!(:sub_group) { namespaces.create!(type: 'Group', name: 'b', path: 'b', parent_id: root_group.id) }
+ let!(:sub_sub_group) { namespaces.create!(type: 'Group', name: 'c', path: 'c', parent_id: sub_group.id) }
+
+ let!(:namespace) { namespaces.create!(name: 'd', path: 'd') }
+
+ let!(:project_on_root_level) { projects.create!(namespace_id: root_group.id) }
+ let!(:project_on_sub_sub_level_1) { projects.create!(namespace_id: sub_sub_group.id) }
+ let!(:project_on_sub_sub_level_2) { projects.create!(namespace_id: sub_sub_group.id) }
+ let!(:project_on_namespace) { projects.create!(namespace_id: namespace.id) }
+
+ let!(:project_on_root_level_compliance_setting) { project_compliance_framework_settings.create!(project_id: project_on_root_level.id, framework: gdpr_framework) }
+ let!(:project_on_sub_sub_level_compliance_setting_1) { project_compliance_framework_settings.create!(project_id: project_on_sub_sub_level_1.id, framework: sox_framework) }
+ let!(:project_on_sub_sub_level_compliance_setting_2) { project_compliance_framework_settings.create!(project_id: project_on_sub_sub_level_2.id, framework: gdpr_framework) }
+ let!(:project_on_namespace_level_compliance_setting) { project_compliance_framework_settings.create!(project_id: project_on_namespace.id, framework: gdpr_framework) }
+
+ subject { described_class.new.up }
+
+ context 'when Gitlab.ee? is true' do
+ before do
+ expect(Gitlab).to receive(:ee?).and_return(true)
+ end
+
+ it 'updates the project settings' do
+ subject
+
+ gdpr_framework = compliance_management_frameworks.find_by(namespace_id: root_group.id, name: 'GDPR')
+ expect(project_on_root_level_compliance_setting.reload.framework_id).to eq(gdpr_framework.id)
+ expect(project_on_sub_sub_level_compliance_setting_2.reload.framework_id).to eq(gdpr_framework.id)
+
+ sox_framework = compliance_management_frameworks.find_by(namespace_id: root_group.id, name: 'SOX')
+ expect(project_on_sub_sub_level_compliance_setting_1.reload.framework_id).to eq(sox_framework.id)
+
+ gdpr_framework = compliance_management_frameworks.find_by(namespace_id: namespace.id, name: 'GDPR')
+ expect(project_on_namespace_level_compliance_setting.reload.framework_id).to eq(gdpr_framework.id)
+ end
+
+ it 'adds two framework records' do
+ subject
+
+ expect(compliance_management_frameworks.count).to eq(3)
+ end
+ end
+
+ context 'when Gitlab.ee? is false' do
+ before do
+ expect(Gitlab).to receive(:ee?).and_return(false)
+ end
+
+ it 'does nothing' do
+ subject
+
+ expect(compliance_management_frameworks.count).to eq(0)
+ end
+ end
+end
diff --git a/spec/migrations/schedule_blocked_by_links_replacement_spec.rb b/spec/migrations/schedule_blocked_by_links_replacement_spec.rb
new file mode 100644
index 00000000000..36610507921
--- /dev/null
+++ b/spec/migrations/schedule_blocked_by_links_replacement_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20201015073808_schedule_blocked_by_links_replacement')
+
+RSpec.describe ScheduleBlockedByLinksReplacement do
+ let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') }
+ let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') }
+ let(:issue2) { table(:issues).create!(project_id: project.id, title: 'b') }
+ let(:issue3) { table(:issues).create!(project_id: project.id, title: 'c') }
+ let!(:issue_links) do
+ [
+ table(:issue_links).create!(source_id: issue1.id, target_id: issue2.id, link_type: 1),
+ table(:issue_links).create!(source_id: issue2.id, target_id: issue1.id, link_type: 2),
+ table(:issue_links).create!(source_id: issue1.id, target_id: issue3.id, link_type: 2)
+ ]
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ it 'schedules jobs for blocked_by links' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
+ 2.minutes, issue_links[1].id, issue_links[1].id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
+ 4.minutes, issue_links[2].id, issue_links[2].id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/schedule_migrate_u2f_webauthn_spec.rb b/spec/migrations/schedule_migrate_u2f_webauthn_spec.rb
new file mode 100644
index 00000000000..5dc4d676063
--- /dev/null
+++ b/spec/migrations/schedule_migrate_u2f_webauthn_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200929114107_schedule_migrate_u2f_webauthn.rb')
+
+RSpec.describe ScheduleMigrateU2fWebauthn do
+ let(:migration_name) { described_class::MIGRATION }
+ let(:u2f_registrations) { table(:u2f_registrations) }
+ let(:webauthn_registrations) { table(:webauthn_registrations) }
+
+ let(:users) { table(:users) }
+
+ let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ end
+
+ context 'when there are u2f registrations' do
+ let!(:u2f_reg_1) { create_u2f_registration(1, 'reg1') }
+ let!(:u2f_reg_2) { create_u2f_registration(2, 'reg2') }
+
+ it 'schedules a background migration' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(migration_name).to be_scheduled_delayed_migration(2.minutes, 1, 1)
+ expect(migration_name).to be_scheduled_delayed_migration(4.minutes, 2, 2)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+
+ context 'when there are no u2f registrations' do
+ it 'does not schedule background migrations' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(0)
+ end
+ end
+ end
+ end
+
+ def create_u2f_registration(id, name)
+ device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
+ u2f_registrations.create!({ id: id,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw),
+ counter: 5,
+ name: name,
+ user_id: user.id })
+ end
+end
diff --git a/spec/migrations/set_job_waiter_ttl_spec.rb b/spec/migrations/set_job_waiter_ttl_spec.rb
new file mode 100644
index 00000000000..b9cf7c55798
--- /dev/null
+++ b/spec/migrations/set_job_waiter_ttl_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200930144340_set_job_waiter_ttl.rb')
+
+RSpec.describe SetJobWaiterTtl, :redis do
+ it 'sets TTLs where necessary' do
+ waiter_with_ttl = Gitlab::JobWaiter.new.key
+ waiter_without_ttl = Gitlab::JobWaiter.new.key
+ key_with_ttl = "foo:bar"
+ key_without_ttl = "foo:qux"
+
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(waiter_with_ttl, "zzz", ex: 2000)
+ redis.set(waiter_without_ttl, "zzz")
+ redis.set(key_with_ttl, "zzz", ex: 2000)
+ redis.set(key_without_ttl, "zzz")
+
+ described_class.new.up
+
+ # This is the point of the migration. We know the migration uses a TTL of 21_600
+ expect(redis.ttl(waiter_without_ttl)).to be > 20_000
+
+ # Other TTL's should be untouched by the migration
+ expect(redis.ttl(waiter_with_ttl)).to be_between(1000, 2000)
+ expect(redis.ttl(key_with_ttl)).to be_between(1000, 2000)
+ expect(redis.ttl(key_without_ttl)).to eq(-1)
+ end
+ end
+end
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index eb9dcca842d..b57062b5fc1 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -133,7 +133,7 @@ RSpec.describe AlertManagement::Alert do
let(:new_alert) { build(:alert_management_alert, new_status, fingerprint: fingerprint, project: project) }
before do
- existing_alert.public_send(described_class::STATUS_EVENTS[existing_status])
+ existing_alert.change_status_to(existing_status)
end
if params[:valid]
@@ -170,6 +170,12 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to be_valid }
end
+
+ context 'nested array' do
+ let(:hosts) { ['111.111.111.111', ['111.111.111.111']] }
+
+ it { is_expected.not_to be_valid }
+ end
end
end
@@ -189,14 +195,14 @@ RSpec.describe AlertManagement::Alert do
end
describe '.for_status' do
- let(:status) { AlertManagement::Alert::STATUSES[:resolved] }
+ let(:status) { :resolved }
subject { AlertManagement::Alert.for_status(status) }
it { is_expected.to match_array(resolved_alert) }
context 'with multiple statuses' do
- let(:status) { AlertManagement::Alert::STATUSES.values_at(:resolved, :ignored) }
+ let(:status) { [:resolved, :ignored] }
it { is_expected.to match_array([resolved_alert, ignored_alert]) }
end
@@ -230,6 +236,35 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to match_array(env_alert) }
end
+ describe '.for_assignee_username' do
+ let_it_be(:alert) { triggered_alert }
+ let_it_be(:assignee) { create(:user) }
+
+ subject { AlertManagement::Alert.for_assignee_username(assignee_username) }
+
+ before_all do
+ alert.update!(assignees: [assignee])
+ end
+
+ context 'when matching assignee_username' do
+ let(:assignee_username) { assignee.username }
+
+ it { is_expected.to contain_exactly(alert) }
+ end
+
+ context 'when unknown assignee_username' do
+ let(:assignee_username) { 'unknown username' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'with empty assignee_username' do
+ let(:assignee_username) { ' ' }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.order_severity_with_open_prometheus_alert' do
subject { described_class.where(project: alert_project).order_severity_with_open_prometheus_alert }
@@ -241,19 +276,6 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to eq([triggered_critical_alert, triggered_high_alert]) }
end
- describe '.counts_by_status' do
- subject { described_class.counts_by_status }
-
- it do
- is_expected.to eq(
- triggered_alert.status => 1,
- acknowledged_alert.status => 1,
- resolved_alert.status => 1,
- ignored_alert.status => 1
- )
- end
- end
-
describe '.counts_by_project_id' do
subject { described_class.counts_by_project_id }
@@ -278,6 +300,55 @@ RSpec.describe AlertManagement::Alert do
end
end
+ describe '.status_value' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :status_value) do
+ :triggered | 0
+ :acknowledged | 1
+ :resolved | 2
+ :ignored | 3
+ :unknown | nil
+ end
+
+ with_them do
+ it 'returns status value by its name' do
+ expect(described_class.status_value(status)).to eq(status_value)
+ end
+ end
+ end
+
+ describe '.status_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:raw_status, :status) do
+ 0 | :triggered
+ 1 | :acknowledged
+ 2 | :resolved
+ 3 | :ignored
+ -1 | nil
+ end
+
+ with_them do
+ it 'returns status name by its values' do
+ expect(described_class.status_name(raw_status)).to eq(status)
+ end
+ end
+ end
+
+ describe '.counts_by_status' do
+ subject { described_class.counts_by_status }
+
+ it do
+ is_expected.to eq(
+ triggered: 1,
+ acknowledged: 1,
+ resolved: 1,
+ ignored: 1
+ )
+ end
+ end
+
describe '.last_prometheus_alert_by_project_id' do
subject { described_class.last_prometheus_alert_by_project_id }
@@ -363,6 +434,24 @@ RSpec.describe AlertManagement::Alert do
end
end
+ describe '.open_status?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :is_open_status) do
+ :triggered | true
+ :acknowledged | true
+ :resolved | false
+ :ignored | false
+ nil | false
+ end
+
+ with_them do
+ it 'returns true when the status is open status' do
+ expect(described_class.open_status?(status)).to eq(is_open_status)
+ end
+ end
+ end
+
describe '#to_reference' do
it { expect(triggered_alert.to_reference).to eq("^alert##{triggered_alert.iid}") }
end
@@ -453,4 +542,54 @@ RSpec.describe AlertManagement::Alert do
expect { subject }.to change { alert.events }.by(1)
end
end
+
+ describe '#status_event_for' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:for_status, :event) do
+ :triggered | :trigger
+ 'triggered' | :trigger
+ :acknowledged | :acknowledge
+ 'acknowledged' | :acknowledge
+ :resolved | :resolve
+ 'resolved' | :resolve
+ :ignored | :ignore
+ 'ignored' | :ignore
+ :unknown | nil
+ nil | nil
+ '' | nil
+ 1 | nil
+ end
+
+ with_them do
+ let(:alert) { build(:alert_management_alert, project: project) }
+
+ it 'returns event by status name' do
+ expect(alert.status_event_for(for_status)).to eq(event)
+ end
+ end
+ end
+
+ describe '#change_status_to' do
+ let_it_be_with_reload(:alert) { create(:alert_management_alert, project: project) }
+
+ context 'with valid statuses' do
+ it 'changes the status to triggered' do
+ alert.acknowledge! # change to non-triggered status
+ expect { alert.change_status_to(:triggered) }.to change { alert.triggered? }.to(true)
+ end
+
+ %i(acknowledged resolved ignored).each do |status|
+ it "changes the status to #{status}" do
+ expect { alert.change_status_to(status) }.to change { alert.public_send(:"#{status}?") }.to(true)
+ end
+ end
+ end
+
+ context 'with invalid status' do
+ it 'does not change the current status' do
+ expect { alert.change_status_to(nil) }.not_to change { alert.status }
+ end
+ end
+ end
end
diff --git a/spec/models/alert_management/http_integration_spec.rb b/spec/models/alert_management/http_integration_spec.rb
new file mode 100644
index 00000000000..37d67dfe09a
--- /dev/null
+++ b/spec/models/alert_management/http_integration_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AlertManagement::HttpIntegration do
+ let_it_be(:project) { create(:project) }
+
+ subject(:integration) { build(:alert_management_http_integration) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_presence_of(:endpoint_identifier) }
+ it { is_expected.to validate_length_of(:endpoint_identifier).is_at_most(255) }
+
+ context 'when active' do
+ # Using `create` instead of `build` the integration so `token` is set.
+ # Uniqueness spec saves integration with `validate: false` otherwise.
+ subject { create(:alert_management_http_integration) }
+
+ it { is_expected.to validate_uniqueness_of(:endpoint_identifier).scoped_to(:project_id, :active) }
+ end
+
+ context 'when inactive' do
+ subject { create(:alert_management_http_integration, :inactive) }
+
+ it { is_expected.not_to validate_uniqueness_of(:endpoint_identifier).scoped_to(:project_id, :active) }
+ end
+ end
+
+ describe '#token' do
+ subject { integration.token }
+
+ shared_context 'assign token' do |token|
+ let!(:previous_token) { integration.token }
+
+ before do
+ integration.token = token
+ integration.valid?
+ end
+ end
+
+ shared_examples 'valid token' do
+ it { is_expected.to match(/\A\h{32}\z/) }
+ end
+
+ context 'when unsaved' do
+ context 'when unassigned' do
+ before do
+ integration.valid?
+ end
+
+ it_behaves_like 'valid token'
+ end
+
+ context 'when assigned' do
+ include_context 'assign token', 'random_token'
+
+ it_behaves_like 'valid token'
+ it { is_expected.not_to eq('random_token') }
+ end
+ end
+
+ context 'when persisted' do
+ before do
+ integration.save!
+ integration.reload
+ end
+
+ it_behaves_like 'valid token'
+
+ context 'when resetting' do
+ include_context 'assign token', ''
+
+ it_behaves_like 'valid token'
+ it { is_expected.not_to eq(previous_token) }
+ end
+
+ context 'when reassigning' do
+ include_context 'assign token', 'random_token'
+
+ it_behaves_like 'valid token'
+ it { is_expected.to eq(previous_token) }
+ end
+ end
+ end
+end
diff --git a/spec/models/analytics/instance_statistics/measurement_spec.rb b/spec/models/analytics/instance_statistics/measurement_spec.rb
index 4df847ea524..379272cfcb9 100644
--- a/spec/models/analytics/instance_statistics/measurement_spec.rb
+++ b/spec/models/analytics/instance_statistics/measurement_spec.rb
@@ -20,7 +20,11 @@ RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do
issues: 3,
merge_requests: 4,
groups: 5,
- pipelines: 6
+ pipelines: 6,
+ pipelines_succeeded: 7,
+ pipelines_failed: 8,
+ pipelines_canceled: 9,
+ pipelines_skipped: 10
}.with_indifferent_access)
end
end
@@ -42,4 +46,28 @@ RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do
it { is_expected.to match_array([measurement_1, measurement_2]) }
end
end
+
+ describe '#measurement_identifier_values' do
+ subject { described_class.measurement_identifier_values.count }
+
+ context 'when the `store_ci_pipeline_counts_by_status` feature flag is off' do
+ let(:expected_count) { Analytics::InstanceStatistics::Measurement.identifiers.size - Analytics::InstanceStatistics::Measurement::EXPERIMENTAL_IDENTIFIERS.size }
+
+ before do
+ stub_feature_flags(store_ci_pipeline_counts_by_status: false)
+ end
+
+ it { is_expected.to eq(expected_count) }
+ end
+
+ context 'when the `store_ci_pipeline_counts_by_status` feature flag is on' do
+ let(:expected_count) { Analytics::InstanceStatistics::Measurement.identifiers.size }
+
+ before do
+ stub_feature_flags(store_ci_pipeline_counts_by_status: true)
+ end
+
+ it { is_expected.to eq(expected_count) }
+ end
+ end
end
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index 5ea1907543a..d080b298e2f 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -90,4 +90,12 @@ RSpec.describe ApplicationRecord do
expect(User.at_most(2).count).to eq(2)
end
end
+
+ describe '.where_exists' do
+ it 'produces a WHERE EXISTS query' do
+ user = create(:user)
+
+ expect(User.where_exists(User.limit(1))).to eq([user])
+ end
+ end
end
diff --git a/spec/models/application_setting/term_spec.rb b/spec/models/application_setting/term_spec.rb
index 82347453437..51a6027698f 100644
--- a/spec/models/application_setting/term_spec.rb
+++ b/spec/models/application_setting/term_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe ApplicationSetting::Term do
describe '#accepted_by_user?' do
let(:user) { create(:user) }
+ let(:project_bot) { create(:user, :project_bot) }
let(:term) { create(:term) }
it 'is true when the user accepted the terms' do
@@ -25,6 +26,10 @@ RSpec.describe ApplicationSetting::Term do
expect(term.accepted_by_user?(user)).to be(true)
end
+ it 'is true when user is a bot' do
+ expect(term.accepted_by_user?(project_bot)).to be(true)
+ end
+
it 'is false when the user declined the terms' do
decline_terms(term, user)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 9f76fb3330d..fb702d10a42 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -112,6 +112,35 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value(nil).for(:repository_storages_weighted_default) }
it { is_expected.not_to allow_value({ default: 100, shouldntexist: 50 }).for(:repository_storages_weighted) }
+ context 'help_page_documentation_base_url validations' do
+ it { is_expected.to allow_value(nil).for(:help_page_documentation_base_url) }
+ it { is_expected.to allow_value('https://docs.gitlab.com').for(:help_page_documentation_base_url) }
+ it { is_expected.to allow_value('http://127.0.0.1').for(:help_page_documentation_base_url) }
+ it { is_expected.not_to allow_value('docs.gitlab.com').for(:help_page_documentation_base_url) }
+
+ context 'when url length validation' do
+ let(:value) { 'http://'.ljust(length, 'A') }
+
+ context 'when value string length is 255 characters' do
+ let(:length) { 255 }
+
+ it 'allows the value' do
+ is_expected.to allow_value(value).for(:help_page_documentation_base_url)
+ end
+ end
+
+ context 'when value string length exceeds 255 characters' do
+ let(:length) { 256 }
+
+ it 'does not allow the value' do
+ is_expected.not_to allow_value(value)
+ .for(:help_page_documentation_base_url)
+ .with_message('is too long (maximum is 255 characters)')
+ end
+ end
+ end
+ end
+
context 'grafana_url validations' do
before do
subject.instance_variable_set(:@parsed_grafana_url, nil)
@@ -320,7 +349,7 @@ RSpec.describe ApplicationSetting do
end
end
- it_behaves_like 'an object with email-formated attributes', :admin_notification_email do
+ it_behaves_like 'an object with email-formated attributes', :abuse_notification_email do
subject { setting }
end
@@ -778,6 +807,23 @@ RSpec.describe ApplicationSetting do
end
end
+ describe '#instance_review_permitted?', :request_store do
+ subject { setting.instance_review_permitted? }
+
+ before do
+ RequestStore.store[:current_license] = nil
+ expect(Rails.cache).to receive(:fetch).and_return(
+ ::ApplicationSetting::INSTANCE_REVIEW_MIN_USERS + users_over_minimum
+ )
+ end
+
+ where(users_over_minimum: [-1, 0, 1])
+
+ with_them do
+ it { is_expected.to be(users_over_minimum >= 0) }
+ end
+ end
+
describe 'email_restrictions' do
context 'when email restrictions are enabled' do
before do
diff --git a/spec/models/audit_event_spec.rb b/spec/models/audit_event_spec.rb
index a1ed48c57f4..5c87c2e68db 100644
--- a/spec/models/audit_event_spec.rb
+++ b/spec/models/audit_event_spec.rb
@@ -6,6 +6,13 @@ RSpec.describe AuditEvent do
let_it_be(:audit_event) { create(:project_audit_event) }
subject { audit_event }
+ describe 'validations' do
+ include_examples 'validates IP address' do
+ let(:attribute) { :ip_address }
+ let(:object) { create(:audit_event) }
+ end
+ end
+
describe '#as_json' do
context 'ip_address' do
subject { build(:group_audit_event, ip_address: '192.168.1.1').as_json }
diff --git a/spec/models/authentication_event_spec.rb b/spec/models/authentication_event_spec.rb
index 56b0111f2c7..483d45c08be 100644
--- a/spec/models/authentication_event_spec.rb
+++ b/spec/models/authentication_event_spec.rb
@@ -11,5 +11,41 @@ RSpec.describe AuthenticationEvent do
it { is_expected.to validate_presence_of(:provider) }
it { is_expected.to validate_presence_of(:user_name) }
it { is_expected.to validate_presence_of(:result) }
+
+ include_examples 'validates IP address' do
+ let(:attribute) { :ip_address }
+ let(:object) { create(:authentication_event) }
+ end
+ end
+
+ describe 'scopes' do
+ let_it_be(:ldap_event) { create(:authentication_event, provider: :ldapmain, result: :failed) }
+ let_it_be(:google_oauth2) { create(:authentication_event, provider: :google_oauth2, result: :success) }
+
+ describe '.for_provider' do
+ it 'returns events only for the specified provider' do
+ expect(described_class.for_provider(:ldapmain)).to match_array ldap_event
+ end
+ end
+
+ describe '.ldap' do
+ it 'returns all events for an LDAP provider' do
+ expect(described_class.ldap).to match_array ldap_event
+ end
+ end
+ end
+
+ describe '.providers' do
+ before do
+ create(:authentication_event, provider: :ldapmain)
+ create(:authentication_event, provider: :google_oauth2)
+ create(:authentication_event, provider: :standard)
+ create(:authentication_event, provider: :standard)
+ create(:authentication_event, provider: :standard)
+ end
+
+ it 'returns an array of distinct providers' do
+ expect(described_class.providers).to match_array %w(ldapmain google_oauth2 standard)
+ end
end
end
diff --git a/spec/models/blob_viewer/markup_spec.rb b/spec/models/blob_viewer/markup_spec.rb
new file mode 100644
index 00000000000..13b040d62d0
--- /dev/null
+++ b/spec/models/blob_viewer/markup_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BlobViewer::Markup do
+ include FakeBlobHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:blob) { fake_blob(path: 'CHANGELOG.md') }
+
+ subject { described_class.new(blob) }
+
+ describe '#banzai_render_context' do
+ it 'returns context needed for banzai rendering' do
+ expect(subject.banzai_render_context.keys).to eq([:cache_key])
+ end
+
+ context 'when blob does respond to rendered_markup' do
+ before do
+ allow(blob).to receive(:rendered_markup).and_return("some rendered markup")
+ end
+
+ it 'does sets rendered key' do
+ expect(subject.banzai_render_context.keys).to include(:rendered)
+ end
+ end
+
+ context 'when cached_markdown_blob feature flag is disabled' do
+ before do
+ stub_feature_flags(cached_markdown_blob: false)
+ end
+
+ it 'does not set cache_key key' do
+ expect(subject.banzai_render_context.keys).not_to include(:cache_key)
+ end
+ end
+ end
+end
diff --git a/spec/models/bulk_import_spec.rb b/spec/models/bulk_import_spec.rb
new file mode 100644
index 00000000000..1a7e1ed8119
--- /dev/null
+++ b/spec/models/bulk_import_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImport, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:user).required }
+ it { is_expected.to have_one(:configuration) }
+ it { is_expected.to have_many(:entities) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:source_type) }
+ it { is_expected.to validate_presence_of(:status) }
+
+ it { is_expected.to define_enum_for(:source_type).with_values(%i[gitlab]) }
+ end
+end
diff --git a/spec/models/bulk_imports/configuration_spec.rb b/spec/models/bulk_imports/configuration_spec.rb
new file mode 100644
index 00000000000..1cbfef631ac
--- /dev/null
+++ b/spec/models/bulk_imports/configuration_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Configuration, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:bulk_import).required }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_length_of(:url).is_at_most(255) }
+ it { is_expected.to validate_length_of(:access_token).is_at_most(255) }
+
+ it { is_expected.to validate_presence_of(:url) }
+ it { is_expected.to validate_presence_of(:access_token) }
+ end
+end
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
new file mode 100644
index 00000000000..ad6e3ec6f30
--- /dev/null
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Entity, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:bulk_import).required }
+ it { is_expected.to belong_to(:parent) }
+ it { is_expected.to belong_to(:group) }
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:source_type) }
+ it { is_expected.to validate_presence_of(:source_full_path) }
+ it { is_expected.to validate_presence_of(:destination_name) }
+ it { is_expected.to validate_presence_of(:destination_namespace) }
+
+ it { is_expected.to define_enum_for(:source_type).with_values(%i[group_entity project_entity]) }
+
+ context 'when associated with a group and project' do
+ it 'is invalid' do
+ entity = build(:bulk_import_entity, group: build(:group), project: build(:project))
+
+ expect(entity).not_to be_valid
+ expect(entity.errors).to include(:project, :group)
+ end
+ end
+
+ context 'when not associated with a group or project' do
+ it 'is valid' do
+ entity = build(:bulk_import_entity, group: nil, project: nil)
+
+ expect(entity).to be_valid
+ end
+ end
+
+ context 'when associated with a group and no project' do
+ it 'is valid as a group_entity' do
+ entity = build(:bulk_import_entity, :group_entity, group: build(:group), project: nil)
+
+ expect(entity).to be_valid
+ end
+
+ it 'is invalid as a project_entity' do
+ entity = build(:bulk_import_entity, :project_entity, group: build(:group), project: nil)
+
+ expect(entity).not_to be_valid
+ expect(entity.errors).to include(:group)
+ end
+ end
+
+ context 'when associated with a project and no group' do
+ it 'is valid' do
+ entity = build(:bulk_import_entity, :project_entity, group: nil, project: build(:project))
+
+ expect(entity).to be_valid
+ end
+
+ it 'is invalid as a project_entity' do
+ entity = build(:bulk_import_entity, :group_entity, group: nil, project: build(:project))
+
+ expect(entity).not_to be_valid
+ expect(entity.errors).to include(:project)
+ end
+ end
+
+ context 'when the parent is a group import' do
+ it 'is valid' do
+ entity = build(:bulk_import_entity, parent: build(:bulk_import_entity, :group_entity))
+
+ expect(entity).to be_valid
+ end
+ end
+
+ context 'when the parent is a project import' do
+ it 'is invalid' do
+ entity = build(:bulk_import_entity, parent: build(:bulk_import_entity, :project_entity))
+
+ expect(entity).not_to be_valid
+ expect(entity.errors).to include(:parent)
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 850fc1ec6e6..c464e176c17 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -59,30 +59,20 @@ RSpec.describe Ci::Bridge do
describe 'state machine transitions' do
context 'when bridge points towards downstream' do
- it 'schedules downstream pipeline creation' do
- expect(bridge).to receive(:schedule_downstream_pipeline!)
+ %i[created manual].each do |status|
+ it "schedules downstream pipeline creation when the status is #{status}" do
+ bridge.status = status
- bridge.enqueue!
- end
- end
- end
-
- describe 'state machine transitions' do
- context 'when bridge points towards downstream' do
- it 'schedules downstream pipeline creation' do
- expect(bridge).to receive(:schedule_downstream_pipeline!)
+ expect(bridge).to receive(:schedule_downstream_pipeline!)
- bridge.enqueue!
+ bridge.enqueue!
+ end
end
- end
- end
- describe 'state machine transitions' do
- context 'when bridge points towards downstream' do
- it 'schedules downstream pipeline creation' do
- expect(bridge).to receive(:schedule_downstream_pipeline!)
+ it 'raises error when the status is failed' do
+ bridge.status = :failed
- bridge.enqueue!
+ expect { bridge.enqueue! }.to raise_error(StateMachines::InvalidTransition)
end
end
end
@@ -304,4 +294,67 @@ RSpec.describe Ci::Bridge do
end
end
end
+
+ describe '#play' do
+ let(:downstream_project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:bridge) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: downstream_project) }
+
+ subject { bridge.play(user) }
+
+ before do
+ project.add_maintainer(user)
+ downstream_project.add_maintainer(user)
+ end
+
+ it 'enqueues the bridge' do
+ subject
+
+ expect(bridge).to be_pending
+ end
+ end
+
+ describe '#playable?' do
+ context 'when bridge is a manual action' do
+ subject { build_stubbed(:ci_bridge, :manual).playable? }
+
+ it { is_expected.to be_truthy }
+
+ context 'when FF ci_manual_bridges is disabled' do
+ before do
+ stub_feature_flags(ci_manual_bridges: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when build is not a manual action' do
+ subject { build_stubbed(:ci_bridge, :created).playable? }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#action?' do
+ context 'when bridge is a manual action' do
+ subject { build_stubbed(:ci_bridge, :manual).action? }
+
+ it { is_expected.to be_truthy }
+
+ context 'when FF ci_manual_bridges is disabled' do
+ before do
+ stub_feature_flags(ci_manual_bridges: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when build is not a manual action' do
+ subject { build_stubbed(:ci_bridge, :created).action? }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/models/ci/build_pending_state_spec.rb b/spec/models/ci/build_pending_state_spec.rb
new file mode 100644
index 00000000000..a546d2aff65
--- /dev/null
+++ b/spec/models/ci/build_pending_state_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildPendingState do
+ describe '#crc32' do
+ context 'when checksum does not exist' do
+ let(:pending_state) do
+ build(:ci_build_pending_state, trace_checksum: nil)
+ end
+
+ it 'returns nil' do
+ expect(pending_state.crc32).to be_nil
+ end
+ end
+
+ context 'when checksum is in hexadecimal' do
+ let(:pending_state) do
+ build(:ci_build_pending_state, trace_checksum: 'crc32:75bcd15')
+ end
+
+ it 'returns decimal representation of the checksum' do
+ expect(pending_state.crc32).to eq 123456789
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 1e551d9ee33..f1d51324bbf 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1109,7 +1109,8 @@ RSpec.describe Ci::Build do
let(:environment) { deployment.environment }
before do
- allow(Deployments::FinishedWorker).to receive(:perform_async)
+ allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
+ allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
end
it 'has deployments record with created status' do
@@ -1129,7 +1130,8 @@ RSpec.describe Ci::Build do
context 'when transits to success' do
before do
- allow(Deployments::SuccessWorker).to receive(:perform_async)
+ allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
+ allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
build.success!
end
@@ -2305,6 +2307,54 @@ RSpec.describe Ci::Build do
end
end
+ describe '#has_expired_locked_archive_artifacts?' do
+ subject { build.has_expired_locked_archive_artifacts? }
+
+ context 'when build does not have artifacts' do
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'when build has artifacts' do
+ before do
+ create(:ci_job_artifact, :archive, job: build)
+ end
+
+ context 'when artifacts are unlocked' do
+ before do
+ build.pipeline.unlocked!
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when artifacts are locked' do
+ before do
+ build.pipeline.artifacts_locked!
+ end
+
+ context 'when artifacts do not expire' do
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when artifacts expire in the future' do
+ before do
+ build.update!(artifacts_expire_at: 1.day.from_now)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when artifacts expired in the past' do
+ before do
+ build.update!(artifacts_expire_at: 1.day.ago)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+ end
+
describe '#has_expiring_archive_artifacts?' do
context 'when artifacts have expiration date set' do
before do
@@ -2504,94 +2554,6 @@ RSpec.describe Ci::Build do
end
end
- describe 'CHANGED_PAGES variables' do
- let(:route_map_yaml) do
- <<~ROUTEMAP
- - source: 'bar/branch-test.txt'
- public: '/bar/branches'
- - source: 'with space/README.md'
- public: '/README'
- ROUTEMAP
- end
-
- before do
- allow_any_instance_of(Project)
- .to receive(:route_map_for).with(/.+/)
- .and_return(Gitlab::RouteMap.new(route_map_yaml))
- end
-
- context 'with a deployment environment and a merge request' do
- let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
- let(:environment) { create(:environment, project: merge_request.project, name: "foo-#{project.default_branch}") }
- let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
-
- let(:full_urls) do
- [
- File.join(environment.external_url, '/bar/branches'),
- File.join(environment.external_url, '/README')
- ]
- end
-
- it 'populates CI_MERGE_REQUEST_CHANGED_PAGES_* variables' do
- expect(subject).to include(
- {
- key: 'CI_MERGE_REQUEST_CHANGED_PAGE_PATHS',
- value: '/bar/branches,/README',
- public: true,
- masked: false
- },
- {
- key: 'CI_MERGE_REQUEST_CHANGED_PAGE_URLS',
- value: full_urls.join(','),
- public: true,
- masked: false
- }
- )
- end
-
- context 'with a deployment environment and no merge request' do
- let(:environment) { create(:environment, project: project, name: "foo-#{project.default_branch}") }
- let(:build) { create(:ci_build, pipeline: pipeline, environment: environment.name) }
-
- it 'does not append CHANGED_PAGES variables' do
- ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
-
- expect(ci_variables).to be_empty
- end
- end
-
- context 'with no deployment environment and a present merge request' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project, target_project: project) }
- let(:build) { create(:ci_build, pipeline: merge_request.all_pipelines.take) }
-
- it 'does not append CHANGED_PAGES variables' do
- ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
-
- expect(ci_variables).to be_empty
- end
- end
-
- context 'with no deployment environment and no merge request' do
- it 'does not append CHANGED_PAGES variables' do
- ci_variables = subject.select { |var| var[:key] =~ /MERGE_REQUEST_CHANGED_PAGES/ }
-
- expect(ci_variables).to be_empty
- end
- end
- end
-
- context 'with the :modified_path_ci_variables feature flag disabled' do
- before do
- stub_feature_flags(modified_path_ci_variables: false)
- end
-
- it 'does not set CI_MERGE_REQUEST_CHANGED_PAGES_* variables' do
- expect(subject.find { |var| var[:key] == 'CI_MERGE_REQUEST_CHANGED_PAGE_PATHS' }).to be_nil
- expect(subject.find { |var| var[:key] == 'CI_MERGE_REQUEST_CHANGED_PAGE_URLS' }).to be_nil
- end
- end
- end
-
context 'when build has user' do
let(:user_variables) do
[
@@ -4652,4 +4614,24 @@ RSpec.describe Ci::Build do
it { is_expected.to be_nil }
end
end
+
+ describe '#run_on_status_commit' do
+ it 'runs provided hook after status commit' do
+ action = spy('action')
+
+ build.run_on_status_commit { action.perform! }
+ build.success!
+
+ expect(action).to have_received(:perform!).once
+ end
+
+ it 'does not run hooks when status has not changed' do
+ action = spy('action')
+
+ build.run_on_status_commit { action.perform! }
+ build.save!
+
+ expect(action).not_to have_received(:perform!)
+ end
+ end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index fefe5e3bfca..cdbdd2b1d20 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -502,22 +502,12 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
describe '#persist_data!' do
let(:build) { create(:ci_build, :running) }
- subject { build_trace_chunk.persist_data! }
-
- shared_examples_for 'Atomic operation' do
- context 'when the other process is persisting' do
- let(:lease_key) { "trace_write:#{build_trace_chunk.build.id}:chunks:#{build_trace_chunk.chunk_index}" }
-
- before do
- stub_exclusive_lease_taken(lease_key)
- end
-
- it 'raise an error' do
- expect { subject }.to raise_error('Failed to obtain a lock')
- end
- end
+ before do
+ build_trace_chunk.save!
end
+ subject { build_trace_chunk.persist_data! }
+
context 'when data_store is redis' do
let(:data_store) { :redis }
@@ -548,8 +538,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.reload.checksum).to eq '3398914352'
end
-
- it_behaves_like 'Atomic operation'
end
context 'when data size has not reached CHUNK_SIZE' do
@@ -575,6 +563,62 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.fog?).to be_truthy
end
end
+
+ context 'when the chunk has been modifed by a different worker' do
+ it 'reloads the chunk before migration' do
+ described_class
+ .find(build_trace_chunk.id)
+ .update!(data_store: :fog)
+
+ build_trace_chunk.persist_data!
+ end
+
+ it 'verifies the operation using optimistic locking' do
+ allow(build_trace_chunk)
+ .to receive(:save!)
+ .and_raise(ActiveRecord::StaleObjectError)
+
+ expect { build_trace_chunk.persist_data! }
+ .to raise_error(described_class::FailedToPersistDataError)
+ end
+
+ it 'does not allow flushing unpersisted chunk' do
+ build_trace_chunk.checksum = '12345'
+
+ expect { build_trace_chunk.persist_data! }
+ .to raise_error(described_class::FailedToPersistDataError,
+ /Modifed build trace chunk detected/)
+ end
+ end
+
+ context 'when the chunk is being locked by a different worker' do
+ let(:metrics) { spy('metrics') }
+
+ it 'does not raise an exception' do
+ lock_chunk do
+ expect { build_trace_chunk.persist_data! }.not_to raise_error
+ end
+ end
+
+ it 'increments stalled chunk trace metric' do
+ allow(build_trace_chunk)
+ .to receive(:metrics)
+ .and_return(metrics)
+
+ lock_chunk { build_trace_chunk.persist_data! }
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :stalled)
+ .once
+ end
+
+ def lock_chunk(&block)
+ "trace_write:#{build.id}:chunks:#{chunk_index}".then do |key|
+ build_trace_chunk.in_lock(key, &block)
+ end
+ end
+ end
end
end
@@ -609,8 +653,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
-
- it_behaves_like 'Atomic operation'
end
context 'when data size has not reached CHUNK_SIZE' do
@@ -670,8 +712,6 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to eq(data)
end
-
- it_behaves_like 'Atomic operation'
end
context 'when data size has not reached CHUNK_SIZE' do
@@ -779,4 +819,62 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
end
+
+ describe 'comparable build trace chunks' do
+ describe '#<=>' do
+ context 'when chunks are associated with different builds' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
+
+ it 'returns nil' do
+ expect(first <=> second).to be_nil
+ end
+ end
+
+ context 'when there are two chunks with different indexes' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
+
+ it 'indicates the the first one is greater than then second' do
+ expect(first <=> second).to eq 1
+ end
+ end
+
+ context 'when there are two chunks with the same index within the same build' do
+ let(:chunk) { create(:ci_build_trace_chunk) }
+
+ it 'indicates the these are equal' do
+ expect(chunk <=> chunk).to be_zero # rubocop:disable Lint/UselessComparison
+ end
+ end
+ end
+
+ describe '#==' do
+ context 'when chunks have the same index' do
+ let(:chunk) { create(:ci_build_trace_chunk) }
+
+ it 'indicates that the chunks are equal' do
+ expect(chunk).to eq chunk
+ end
+ end
+
+ context 'when chunks have different indexes' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
+
+ it 'indicates that the chunks are not equal' do
+ expect(first).not_to eq second
+ end
+ end
+
+ context 'when chunks are associated with different builds' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
+
+ it 'indicates that the chunks are not equal' do
+ expect(first).not_to eq second
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/deleted_object_spec.rb b/spec/models/ci/deleted_object_spec.rb
new file mode 100644
index 00000000000..cb8911d5027
--- /dev/null
+++ b/spec/models/ci/deleted_object_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeletedObject, :aggregate_failures do
+ describe 'attributes' do
+ it { is_expected.to respond_to(:file) }
+ it { is_expected.to respond_to(:store_dir) }
+ it { is_expected.to respond_to(:file_store) }
+ it { is_expected.to respond_to(:pick_up_at) }
+ end
+
+ describe '.bulk_import' do
+ context 'with data' do
+ let!(:artifact) { create(:ci_job_artifact, :archive, :expired) }
+
+ it 'imports data' do
+ expect { described_class.bulk_import(Ci::JobArtifact.all) }.to change { described_class.count }.by(1)
+
+ deleted_artifact = described_class.first
+
+ expect(deleted_artifact.file_store).to eq(artifact.file_store)
+ expect(deleted_artifact.store_dir).to eq(artifact.file.store_dir.to_s)
+ expect(deleted_artifact.file_identifier).to eq(artifact.file_identifier)
+ expect(deleted_artifact.pick_up_at).to eq(artifact.expire_at)
+ end
+ end
+
+ context 'with invalid data' do
+ let!(:artifact) { create(:ci_job_artifact) }
+
+ it 'does not import anything' do
+ expect(artifact.file_identifier).to be_nil
+
+ expect { described_class.bulk_import([artifact]) }
+ .not_to change { described_class.count }
+ end
+ end
+
+ context 'with empty data' do
+ it 'returns successfully' do
+ expect { described_class.bulk_import([]) }
+ .not_to change { described_class.count }
+ end
+ end
+ end
+
+ context 'ActiveRecord scopes' do
+ let_it_be(:not_ready) { create(:ci_deleted_object, pick_up_at: 1.day.from_now) }
+ let_it_be(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
+
+ describe '.ready_for_destruction' do
+ it 'returns objects that are ready' do
+ result = described_class.ready_for_destruction(2)
+
+ expect(result).to contain_exactly(ready)
+ end
+ end
+
+ describe '.lock_for_destruction' do
+ subject(:result) { described_class.lock_for_destruction(10) }
+
+ it 'returns objects that are ready' do
+ expect(result).to contain_exactly(ready)
+ end
+
+ it 'selects only the id' do
+ expect(result.select_values).to contain_exactly(:id)
+ end
+
+ it 'orders by pick_up_at' do
+ expect(result.order_values.map(&:to_sql))
+ .to contain_exactly("\"ci_deleted_objects\".\"pick_up_at\" ASC")
+ end
+
+ it 'applies limit' do
+ expect(result.limit_value).to eq(10)
+ end
+
+ it 'uses select for update' do
+ expect(result.locked?).to eq('FOR UPDATE SKIP LOCKED')
+ end
+ end
+ end
+
+ describe '#delete_file_from_storage' do
+ let(:object) { build(:ci_deleted_object) }
+
+ it 'does not raise errors' do
+ expect(object.file).to receive(:remove!).and_raise(StandardError)
+
+ expect(object.delete_file_from_storage).to be_falsy
+ end
+ end
+end
diff --git a/spec/models/ci/freeze_period_status_spec.rb b/spec/models/ci/freeze_period_status_spec.rb
index 831895cb528..f51381f7a5f 100644
--- a/spec/models/ci/freeze_period_status_spec.rb
+++ b/spec/models/ci/freeze_period_status_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::FreezePeriodStatus do
shared_examples 'within freeze period' do |time|
it 'is frozen' do
- Timecop.freeze(time) do
+ travel_to(time) do
expect(subject).to be_truthy
end
end
@@ -19,7 +19,7 @@ RSpec.describe Ci::FreezePeriodStatus do
shared_examples 'outside freeze period' do |time|
it 'is not frozen' do
- Timecop.freeze(time) do
+ travel_to(time) do
expect(subject).to be_falsy
end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 779839df670..26851c93ac3 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Ci::JobArtifact do
it_behaves_like 'having unique enum values'
- it_behaves_like 'UpdateProjectStatistics' do
+ it_behaves_like 'UpdateProjectStatistics', :with_counter_attribute do
let_it_be(:job, reload: true) { create(:ci_build) }
subject { build(:ci_job_artifact, :archive, job: job, size: 107464) }
@@ -44,7 +44,7 @@ RSpec.describe Ci::JobArtifact do
let!(:metrics_report) { create(:ci_job_artifact, :junit) }
let!(:codequality_report) { create(:ci_job_artifact, :codequality) }
- it { is_expected.to eq([metrics_report, codequality_report]) }
+ it { is_expected.to match_array([metrics_report, codequality_report]) }
end
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 949d5f7bd04..cec3b544e50 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Ci::PipelineSchedule do
subject { described_class.runnable_schedules }
let!(:pipeline_schedule) do
- Timecop.freeze(1.day.ago) do
+ travel_to(1.day.ago) do
create(:ci_pipeline_schedule, :hourly)
end
end
@@ -118,7 +118,7 @@ RSpec.describe Ci::PipelineSchedule do
let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
it "updates next_run_at to the sidekiq worker's execution time" do
- Timecop.freeze(Time.zone.parse("2019-06-01 12:18:00+0000")) do
+ travel_to(Time.zone.parse("2019-06-01 12:18:00+0000")) do
expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 228a1e8f7a2..88d08f1ec45 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2436,7 +2436,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#retry_failed' do
- let(:latest_status) { pipeline.statuses.latest.pluck(:status) }
+ let(:latest_status) { pipeline.latest_statuses.pluck(:status) }
before do
stub_not_protect_default_branch
@@ -2988,6 +2988,57 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#builds_in_self_and_descendants' do
+ subject(:builds) { pipeline.builds_in_self_and_descendants }
+
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let!(:build) { create(:ci_build, pipeline: pipeline) }
+
+ context 'when pipeline is standalone' do
+ it 'returns the list of builds' do
+ expect(builds).to contain_exactly(build)
+ end
+ end
+
+ context 'when pipeline is parent of another pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, pipeline: child_pipeline) }
+
+ it 'returns the list of builds' do
+ expect(builds).to contain_exactly(build, child_build)
+ end
+ end
+
+ context 'when pipeline is parent of another parent pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, pipeline: child_pipeline) }
+ let(:child_of_child_pipeline) { create(:ci_pipeline, child_of: child_pipeline) }
+ let!(:child_of_child_build) { create(:ci_build, pipeline: child_of_child_pipeline) }
+
+ it 'returns the list of builds' do
+ expect(builds).to contain_exactly(build, child_build, child_of_child_build)
+ end
+ end
+ end
+
+ describe '#build_with_artifacts_in_self_and_descendants' do
+ let!(:build) { create(:ci_build, name: 'test', pipeline: pipeline) }
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, :artifacts, name: 'test', pipeline: child_pipeline) }
+
+ it 'returns the build with a given name, having artifacts' do
+ expect(pipeline.build_with_artifacts_in_self_and_descendants('test')).to eq(child_build)
+ end
+
+ context 'when same job name is present in both parent and child pipeline' do
+ let!(:build) { create(:ci_build, :artifacts, name: 'test', pipeline: pipeline) }
+
+ it 'returns the job in the parent pipeline' do
+ expect(pipeline.build_with_artifacts_in_self_and_descendants('test')).to eq(build)
+ end
+ end
+ end
+
describe '#find_job_with_archive_artifacts' do
let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
let!(:job_without_artifacts) { create(:ci_build, name: 'rspec', pipeline: pipeline) }
@@ -3628,6 +3679,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(builds).to include(rspec, jest)
expect(builds).not_to include(karma)
end
+
+ it 'returns only latest builds' do
+ obsolete = create(:ci_build, name: "jest", coverage: 10.12, pipeline: pipeline, retried: true)
+ retried = create(:ci_build, name: "jest", coverage: 20.11, pipeline: pipeline)
+
+ builds = pipeline.builds_with_coverage
+
+ expect(builds).to include(retried)
+ expect(builds).not_to include(obsolete)
+ end
end
describe '#base_and_ancestors' do
diff --git a/spec/models/ci_platform_metric_spec.rb b/spec/models/ci_platform_metric_spec.rb
index 0b00875df43..f73db713791 100644
--- a/spec/models/ci_platform_metric_spec.rb
+++ b/spec/models/ci_platform_metric_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe CiPlatformMetric do
let(:tomorrow) { today + 1.day }
it 'inserts platform target counts for that day' do
- Timecop.freeze(today) do
+ travel_to(today) do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'ECS')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'ECS')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FARGATE')
@@ -53,7 +53,7 @@ RSpec.describe CiPlatformMetric do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FARGATE')
described_class.insert_auto_devops_platform_targets!
end
- Timecop.freeze(tomorrow) do
+ travel_to(tomorrow) do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FARGATE')
described_class.insert_auto_devops_platform_targets!
end
@@ -69,7 +69,7 @@ RSpec.describe CiPlatformMetric do
let(:today) { Time.zone.local(1982, 4, 24) }
it 'ignores those values' do
- Timecop.freeze(today) do
+ travel_to(today) do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'ECS')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FOO')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'BAR')
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index 99de0d1ddf7..148bb3cf870 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -13,6 +13,20 @@ RSpec.describe Clusters::Agent do
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
describe 'scopes' do
+ describe '.ordered_by_name' do
+ let(:names) { %w(agent-d agent-b agent-a agent-c) }
+
+ subject { described_class.ordered_by_name }
+
+ before do
+ names.each do |name|
+ create(:cluster_agent, name: name)
+ end
+ end
+
+ it { expect(subject.map(&:name)).to eq(names.sort) }
+ end
+
describe '.with_name' do
let!(:matching_name) { create(:cluster_agent, name: 'matching-name') }
let!(:other_name) { create(:cluster_agent, name: 'other-name') }
diff --git a/spec/models/clusters/applications/fluentd_spec.rb b/spec/models/clusters/applications/fluentd_spec.rb
index be7b4a87947..3bda3e99ec1 100644
--- a/spec/models/clusters/applications/fluentd_spec.rb
+++ b/spec/models/clusters/applications/fluentd_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Clusters::Applications::Fluentd do
it 'is initialized with fluentd arguments' do
expect(subject.name).to eq('fluentd')
- expect(subject.chart).to eq('stable/fluentd')
+ expect(subject.chart).to eq('fluentd/fluentd')
expect(subject.version).to eq('2.4.0')
expect(subject).to be_rbac
end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index e029283326f..196d57aff7b 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -135,7 +135,7 @@ RSpec.describe Clusters::Applications::Ingress do
it 'is initialized with ingress arguments' do
expect(subject.name).to eq('ingress')
- expect(subject.chart).to eq('stable/nginx-ingress')
+ expect(subject.chart).to eq('ingress/nginx-ingress')
expect(subject.version).to eq('1.40.2')
expect(subject).to be_rbac
expect(subject.files).to eq(ingress.files)
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 82971596176..b450900bee6 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -152,7 +152,7 @@ RSpec.describe Clusters::Applications::Prometheus do
it 'is initialized with 3 arguments' do
expect(subject.name).to eq('prometheus')
- expect(subject.chart).to eq('stable/prometheus')
+ expect(subject.chart).to eq('prometheus/prometheus')
expect(subject.version).to eq('10.4.1')
expect(subject).to be_rbac
expect(subject.files).to eq(prometheus.files)
@@ -240,7 +240,7 @@ RSpec.describe Clusters::Applications::Prometheus do
it 'is initialized with 3 arguments' do
expect(patch_command.name).to eq('prometheus')
- expect(patch_command.chart).to eq('stable/prometheus')
+ expect(patch_command.chart).to eq('prometheus/prometheus')
expect(patch_command.version).to eq('10.4.1')
expect(patch_command.files).to eq(prometheus.files)
end
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index fbabfd25b2f..ef916c73e0b 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -69,8 +69,8 @@ RSpec.describe Clusters::Applications::Runner do
expect(values).to include('privileged: true')
expect(values).to include('image: ubuntu:16.04')
expect(values).to include('resources')
- expect(values).to match(/runnerToken: '?#{Regexp.escape(ci_runner.token)}/)
- expect(values).to match(/gitlabUrl: '?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/)
+ expect(values).to match(/runnerToken: ['"]?#{Regexp.escape(ci_runner.token)}/)
+ expect(values).to match(/gitlabUrl: ['"]?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/)
end
context 'without a runner' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 024539e34ec..dd9b96f39ad 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to delegate_method(:external_hostname).to(:application_ingress).with_prefix }
it { is_expected.to respond_to :project }
+ it { is_expected.to be_namespace_per_environment }
describe 'applications have inverse_of: :cluster option' do
let(:cluster) { create(:cluster) }
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index c6a2b67a008..e877ba2ac96 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -412,7 +412,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do
end
let(:namespace) { "project-namespace" }
- let(:environment) { instance_double(Environment, deployment_namespace: namespace) }
+ let(:environment) { instance_double(Environment, deployment_namespace: namespace, project: service.cluster.project) }
subject { service.calculate_reactive_cache_for(environment) }
@@ -428,6 +428,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do
before do
stub_kubeclient_pods(namespace)
stub_kubeclient_deployments(namespace)
+ stub_kubeclient_ingresses(namespace)
end
it { is_expected.to include(pods: [expected_pod_cached_data]) }
@@ -437,6 +438,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do
before do
stub_kubeclient_pods(namespace, status: 500)
stub_kubeclient_deployments(namespace, status: 500)
+ stub_kubeclient_ingresses(namespace, status: 500)
end
it { expect { subject }.to raise_error(Kubeclient::HttpError) }
@@ -446,6 +448,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do
before do
stub_kubeclient_pods(namespace, status: 404)
stub_kubeclient_deployments(namespace, status: 404)
+ stub_kubeclient_ingresses(namespace, status: 404)
end
it { is_expected.to include(pods: []) }
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 6e23f95af03..877188097fd 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -493,47 +493,104 @@ RSpec.describe CommitStatus do
end
end
- describe '#group_name' do
- let(:commit_status) do
- build(:commit_status, pipeline: pipeline, stage: 'test')
- end
-
- subject { commit_status.group_name }
+ context 'with the one_dimensional_matrix feature flag disabled' do
+ describe '#group_name' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: false)
+ end
- tests = {
- 'rspec:windows' => 'rspec:windows',
- 'rspec:windows 0' => 'rspec:windows 0',
- 'rspec:windows 0 test' => 'rspec:windows 0 test',
- 'rspec:windows 0 1' => 'rspec:windows',
- 'rspec:windows 0 1 name' => 'rspec:windows name',
- 'rspec:windows 0/1' => 'rspec:windows',
- 'rspec:windows 0/1 name' => 'rspec:windows name',
- 'rspec:windows 0:1' => 'rspec:windows',
- 'rspec:windows 0:1 name' => 'rspec:windows name',
- 'rspec:windows 10000 20000' => 'rspec:windows',
- 'rspec:windows 0 : / 1' => 'rspec:windows',
- 'rspec:windows 0 : / 1 name' => 'rspec:windows name',
- '0 1 name ruby' => 'name ruby',
- '0 :/ 1 name ruby' => 'name ruby',
- 'rspec: [aws]' => 'rspec: [aws]',
- 'rspec: [aws] 0/1' => 'rspec: [aws]',
- 'rspec: [aws, max memory]' => 'rspec',
- 'rspec:linux: [aws, max memory, data]' => 'rspec:linux',
- 'rspec: [inception: [something, other thing], value]' => 'rspec',
- 'rspec:windows 0/1: [name, other]' => 'rspec:windows',
- 'rspec:windows: [name, other] 0/1' => 'rspec:windows',
- 'rspec:windows: [name, 0/1] 0/1' => 'rspec:windows',
- 'rspec:windows: [0/1, name]' => 'rspec:windows',
- 'rspec:windows: [, ]' => 'rspec:windows',
- 'rspec:windows: [name]' => 'rspec:windows: [name]',
- 'rspec:windows: [name,other]' => 'rspec:windows: [name,other]'
- }
+ let(:commit_status) do
+ build(:commit_status, pipeline: pipeline, stage: 'test')
+ end
+
+ subject { commit_status.group_name }
+
+ tests = {
+ 'rspec:windows' => 'rspec:windows',
+ 'rspec:windows 0' => 'rspec:windows 0',
+ 'rspec:windows 0 test' => 'rspec:windows 0 test',
+ 'rspec:windows 0 1' => 'rspec:windows',
+ 'rspec:windows 0 1 name' => 'rspec:windows name',
+ 'rspec:windows 0/1' => 'rspec:windows',
+ 'rspec:windows 0/1 name' => 'rspec:windows name',
+ 'rspec:windows 0:1' => 'rspec:windows',
+ 'rspec:windows 0:1 name' => 'rspec:windows name',
+ 'rspec:windows 10000 20000' => 'rspec:windows',
+ 'rspec:windows 0 : / 1' => 'rspec:windows',
+ 'rspec:windows 0 : / 1 name' => 'rspec:windows name',
+ '0 1 name ruby' => 'name ruby',
+ '0 :/ 1 name ruby' => 'name ruby',
+ 'rspec: [aws]' => 'rspec: [aws]',
+ 'rspec: [aws] 0/1' => 'rspec: [aws]',
+ 'rspec: [aws, max memory]' => 'rspec',
+ 'rspec:linux: [aws, max memory, data]' => 'rspec:linux',
+ 'rspec: [inception: [something, other thing], value]' => 'rspec',
+ 'rspec:windows 0/1: [name, other]' => 'rspec:windows',
+ 'rspec:windows: [name, other] 0/1' => 'rspec:windows',
+ 'rspec:windows: [name, 0/1] 0/1' => 'rspec:windows',
+ 'rspec:windows: [0/1, name]' => 'rspec:windows',
+ 'rspec:windows: [, ]' => 'rspec:windows',
+ 'rspec:windows: [name]' => 'rspec:windows: [name]',
+ 'rspec:windows: [name,other]' => 'rspec:windows: [name,other]'
+ }
+
+ tests.each do |name, group_name|
+ it "'#{name}' puts in '#{group_name}'" do
+ commit_status.name = name
+
+ is_expected.to eq(group_name)
+ end
+ end
+ end
+ end
- tests.each do |name, group_name|
- it "'#{name}' puts in '#{group_name}'" do
- commit_status.name = name
+ context 'with one_dimensional_matrix feature flag enabled' do
+ describe '#group_name' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: true)
+ end
- is_expected.to eq(group_name)
+ let(:commit_status) do
+ build(:commit_status, pipeline: pipeline, stage: 'test')
+ end
+
+ subject { commit_status.group_name }
+
+ tests = {
+ 'rspec:windows' => 'rspec:windows',
+ 'rspec:windows 0' => 'rspec:windows 0',
+ 'rspec:windows 0 test' => 'rspec:windows 0 test',
+ 'rspec:windows 0 1' => 'rspec:windows',
+ 'rspec:windows 0 1 name' => 'rspec:windows name',
+ 'rspec:windows 0/1' => 'rspec:windows',
+ 'rspec:windows 0/1 name' => 'rspec:windows name',
+ 'rspec:windows 0:1' => 'rspec:windows',
+ 'rspec:windows 0:1 name' => 'rspec:windows name',
+ 'rspec:windows 10000 20000' => 'rspec:windows',
+ 'rspec:windows 0 : / 1' => 'rspec:windows',
+ 'rspec:windows 0 : / 1 name' => 'rspec:windows name',
+ '0 1 name ruby' => 'name ruby',
+ '0 :/ 1 name ruby' => 'name ruby',
+ 'rspec: [aws]' => 'rspec',
+ 'rspec: [aws] 0/1' => 'rspec',
+ 'rspec: [aws, max memory]' => 'rspec',
+ 'rspec:linux: [aws, max memory, data]' => 'rspec:linux',
+ 'rspec: [inception: [something, other thing], value]' => 'rspec',
+ 'rspec:windows 0/1: [name, other]' => 'rspec:windows',
+ 'rspec:windows: [name, other] 0/1' => 'rspec:windows',
+ 'rspec:windows: [name, 0/1] 0/1' => 'rspec:windows',
+ 'rspec:windows: [0/1, name]' => 'rspec:windows',
+ 'rspec:windows: [, ]' => 'rspec:windows',
+ 'rspec:windows: [name]' => 'rspec:windows',
+ 'rspec:windows: [name,other]' => 'rspec:windows'
+ }
+
+ tests.each do |name, group_name|
+ it "'#{name}' puts in '#{group_name}'" do
+ commit_status.name = name
+
+ is_expected.to eq(group_name)
+ end
end
end
end
diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb
index 8a8eeea39dc..5bed2cb9a14 100644
--- a/spec/models/concerns/avatarable_spec.rb
+++ b/spec/models/concerns/avatarable_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Avatarable do
it 'validates the file size' do
expect(validator).to receive(:validate_each).and_call_original
- project.update(avatar: 'uploads/avatar.png')
+ project.update!(avatar: 'uploads/avatar.png')
end
end
@@ -29,7 +29,7 @@ RSpec.describe Avatarable do
it 'skips validation of file size' do
expect(validator).not_to receive(:validate_each)
- project.update(name: 'Hello world')
+ project.update!(name: 'Hello world')
end
end
end
diff --git a/spec/models/concerns/bulk_insertable_associations_spec.rb b/spec/models/concerns/bulk_insertable_associations_spec.rb
index 5a40639e493..25b13c8233d 100644
--- a/spec/models/concerns/bulk_insertable_associations_spec.rb
+++ b/spec/models/concerns/bulk_insertable_associations_spec.rb
@@ -187,7 +187,7 @@ RSpec.describe BulkInsertableAssociations do
it 'invalidates the parent and returns false' do
build_invalid_items(parent: parent)
- expect(save_with_bulk_inserts(parent, bangify: false)).to be false
+ expect(BulkInsertableAssociations.with_bulk_insert { parent.save }).to be false # rubocop:disable Rails/SaveBang
expect(parent.errors[:bulk_foos].size).to eq(1)
expect(BulkFoo.count).to eq(0)
@@ -211,8 +211,8 @@ RSpec.describe BulkInsertableAssociations do
private
- def save_with_bulk_inserts(entity, bangify: true)
- BulkInsertableAssociations.with_bulk_insert { bangify ? entity.save! : entity.save }
+ def save_with_bulk_inserts(entity)
+ BulkInsertableAssociations.with_bulk_insert { entity.save! }
end
def build_items(parent:, relation: :bulk_foos, count: 10)
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 440943171c3..37e2f5fb8d4 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -285,7 +285,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it_behaves_like 'a class with cached markdown fields'
describe '#attribute_invalidated?' do
- let(:thing) { klass.create(description: markdown, description_html: html, cached_markdown_version: cache_version) }
+ let(:thing) { klass.create!(description: markdown, description_html: html, cached_markdown_version: cache_version) }
it 'returns true when cached_markdown_version is different' do
thing.cached_markdown_version += 1
@@ -318,7 +318,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
let(:thing) do
# This forces the record to have outdated HTML. We can't use `create` because the `before_create` hook
# would re-render the HTML to the latest version
- klass.create.tap do |thing|
+ klass.create!.tap do |thing|
thing.update_columns(description: markdown, description_html: old_html, cached_markdown_version: old_version)
end
end
@@ -326,7 +326,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it 'correctly updates cached HTML even if refresh_markdown_cache is called before updating the attribute' do
thing.refresh_markdown_cache
- thing.update(description: updated_markdown)
+ thing.update!(description: updated_markdown)
expect(thing.description_html).to eq(updated_html)
end
diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb
index 521b47c63fd..5fb7cdb4443 100644
--- a/spec/models/concerns/case_sensitivity_spec.rb
+++ b/spec/models/concerns/case_sensitivity_spec.rb
@@ -12,8 +12,8 @@ RSpec.describe CaseSensitivity do
end
end
- let!(:model_1) { model.create(path: 'mOdEl-1', name: 'mOdEl 1') }
- let!(:model_2) { model.create(path: 'mOdEl-2', name: 'mOdEl 2') }
+ let!(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1') }
+ let!(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2') }
it 'finds a single instance by a single attribute regardless of case' do
expect(model.iwhere(path: 'MODEL-1')).to contain_exactly(model_1)
diff --git a/spec/models/concerns/checksummable_spec.rb b/spec/models/concerns/checksummable_spec.rb
index b469b2e5c18..3a0387333e8 100644
--- a/spec/models/concerns/checksummable_spec.rb
+++ b/spec/models/concerns/checksummable_spec.rb
@@ -3,17 +3,21 @@
require 'spec_helper'
RSpec.describe Checksummable do
- describe ".hexdigest" do
- let(:fake_class) do
- Class.new do
- include Checksummable
- end
+ subject do
+ Class.new { include Checksummable }
+ end
+
+ describe ".crc32" do
+ it 'returns the CRC32 of data' do
+ expect(subject.crc32('abcd')).to eq 3984772369
end
+ end
+ describe ".hexdigest" do
it 'returns the SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect(fake_class.hexdigest(__FILE__)).to eq(expected)
+ expect(subject.hexdigest(__FILE__)).to eq(expected)
end
end
end
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index f23865a5dbb..a19fbae3cfb 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -12,6 +12,36 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
let(:model) { CounterAttributeModel.find(project_statistics.id) }
end
+ describe 'after_flush callbacks' do
+ let(:attribute) { model.class.counter_attributes.first}
+
+ subject { model.flush_increments_to_database!(attribute) }
+
+ it 'has registered callbacks' do # defined in :counter_attribute RSpec tag
+ expect(model.class.after_flush_callbacks.size).to eq(1)
+ end
+
+ context 'when there are increments to flush' do
+ before do
+ model.delayed_increment_counter(attribute, 10)
+ end
+
+ it 'executes the callbacks' do
+ subject
+
+ expect(model.flushed).to be_truthy
+ end
+ end
+
+ context 'when there are no increments to flush' do
+ it 'does not execute the callbacks' do
+ subject
+
+ expect(model.flushed).to be_nil
+ end
+ end
+ end
+
describe '.steal_increments' do
let(:increment_key) { 'counters:Model:123:attribute' }
let(:flushed_key) { 'counter:Model:123:attribute:flushed' }
diff --git a/spec/models/concerns/each_batch_spec.rb b/spec/models/concerns/each_batch_spec.rb
index 3c93c8a7a79..8b70753633c 100644
--- a/spec/models/concerns/each_batch_spec.rb
+++ b/spec/models/concerns/each_batch_spec.rb
@@ -18,13 +18,13 @@ RSpec.describe EachBatch do
shared_examples 'each_batch handling' do |kwargs|
it 'yields an ActiveRecord::Relation when a block is given' do
- model.each_batch(kwargs) do |relation|
+ model.each_batch(**kwargs) do |relation|
expect(relation).to be_a_kind_of(ActiveRecord::Relation)
end
end
it 'yields a batch index as the second argument' do
- model.each_batch(kwargs) do |_, index|
+ model.each_batch(**kwargs) do |_, index|
expect(index).to eq(1)
end
end
@@ -32,7 +32,7 @@ RSpec.describe EachBatch do
it 'accepts a custom batch size' do
amount = 0
- model.each_batch(kwargs.merge({ of: 1 })) { amount += 1 }
+ model.each_batch(**kwargs.merge({ of: 1 })) { amount += 1 }
expect(amount).to eq(5)
end
diff --git a/spec/models/concerns/featurable_spec.rb b/spec/models/concerns/featurable_spec.rb
index 31186b5fc77..99acc563950 100644
--- a/spec/models/concerns/featurable_spec.rb
+++ b/spec/models/concerns/featurable_spec.rb
@@ -180,6 +180,6 @@ RSpec.describe Featurable do
def update_all_project_features(project, features, value)
project_feature_attributes = features.map { |f| ["#{f}_access_level", value] }.to_h
- project.project_feature.update(project_feature_attributes)
+ project.project_feature.update!(project_feature_attributes)
end
end
diff --git a/spec/models/concerns/has_user_type_spec.rb b/spec/models/concerns/has_user_type_spec.rb
index 9496bb57b8b..c87bbf24c30 100644
--- a/spec/models/concerns/has_user_type_spec.rb
+++ b/spec/models/concerns/has_user_type_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe User do
specify 'types consistency checks', :aggregate_failures do
expect(described_class::USER_TYPES.keys)
- .to match_array(%w[human ghost alert_bot project_bot support_bot service_user visual_review_bot migration_bot])
+ .to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot visual_review_bot migration_bot])
expect(described_class::USER_TYPES).to include(*described_class::BOT_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::NON_INTERNAL_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::INTERNAL_USER_TYPES)
@@ -31,6 +31,12 @@ RSpec.describe User do
end
end
+ describe '.without_bots' do
+ it 'includes everyone except bots' do
+ expect(described_class.without_bots).to match_array(everyone - bots)
+ end
+ end
+
describe '.bots_without_project_bot' do
it 'includes all bots except project_bot' do
expect(described_class.bots_without_project_bot).to match_array(bots - [project_bot])
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 44561e2e55a..ff5b270cf33 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Issuable do
it 'returns nil when author is nil' do
issue.author_id = nil
- issue.save(validate: false)
+ issue.save!(validate: false)
expect(issue.author_name).to eq nil
end
@@ -361,13 +361,13 @@ RSpec.describe Issuable do
end
it 'returns true when a subcription exists and subscribed is true' do
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ issue.subscriptions.create!(user: user, project: project, subscribed: true)
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- issue.subscriptions.create(user: user, project: project, subscribed: false)
+ issue.subscriptions.create!(user: user, project: project, subscribed: false)
expect(issue.subscribed?(user, project)).to be_falsey
end
@@ -383,13 +383,13 @@ RSpec.describe Issuable do
end
it 'returns true when a subcription exists and subscribed is true' do
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ issue.subscriptions.create!(user: user, project: project, subscribed: true)
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- issue.subscriptions.create(user: user, project: project, subscribed: false)
+ issue.subscriptions.create!(user: user, project: project, subscribed: false)
expect(issue.subscribed?(user, project)).to be_falsey
end
@@ -437,7 +437,7 @@ RSpec.describe Issuable do
let(:labels) { create_list(:label, 2) }
before do
- issue.update(labels: [labels[1]])
+ issue.update!(labels: [labels[1]])
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(issue).and_return(builder)
end
@@ -456,7 +456,7 @@ RSpec.describe Issuable do
context 'total_time_spent is updated' do
before do
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
- issue.save
+ issue.save!
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(issue).and_return(builder)
end
@@ -497,8 +497,8 @@ RSpec.describe Issuable do
let(:user2) { create(:user) }
before do
- merge_request.update(assignees: [user])
- merge_request.update(assignees: [user, user2])
+ merge_request.update!(assignees: [user])
+ merge_request.update!(assignees: [user, user2])
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(merge_request).and_return(builder)
end
@@ -554,7 +554,7 @@ RSpec.describe Issuable do
before do
label_link = issue.label_links.find_by(label_id: second_label.id)
label_link.label_id = nil
- label_link.save(validate: false)
+ label_link.save!(validate: false)
end
it 'filters out bad labels' do
@@ -824,7 +824,7 @@ RSpec.describe Issuable do
where(:issuable_type, :supports_time_tracking) do
:issue | true
- :incident | false
+ :incident | true
:merge_request | true
end
@@ -926,58 +926,4 @@ RSpec.describe Issuable do
end
end
end
-
- describe '#update_severity' do
- let(:severity) { 'low' }
-
- subject(:update_severity) { issuable.update_severity(severity) }
-
- context 'when issuable not an incident' do
- %i(issue merge_request).each do |issuable_type|
- let(:issuable) { build_stubbed(issuable_type) }
-
- it { is_expected.to be_nil }
-
- it 'does not set severity' do
- expect { subject }.not_to change(IssuableSeverity, :count)
- end
- end
- end
-
- context 'when issuable is an incident' do
- let!(:issuable) { create(:incident) }
-
- context 'when issuable does not have issuable severity yet' do
- it 'creates new record' do
- expect { update_severity }.to change { IssuableSeverity.where(issue: issuable).count }.to(1)
- end
-
- it 'sets severity to specified value' do
- expect { update_severity }.to change { issuable.severity }.to('low')
- end
- end
-
- context 'when issuable has an issuable severity' do
- let!(:issuable_severity) { create(:issuable_severity, issue: issuable, severity: 'medium') }
-
- it 'does not create new record' do
- expect { update_severity }.not_to change(IssuableSeverity, :count)
- end
-
- it 'updates existing issuable severity' do
- expect { update_severity }.to change { issuable_severity.severity }.to(severity)
- end
- end
-
- context 'when severity value is unsupported' do
- let(:severity) { 'unsupported-severity' }
-
- it 'sets the severity to default value' do
- update_severity
-
- expect(issuable.issuable_severity.severity).to eq(IssuableSeverity::DEFAULT)
- end
- end
- end
- end
end
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 758b5aa2ce4..516c0fd75bc 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -177,7 +177,7 @@ RSpec.describe Issue, "Mentionable" do
expect(SystemNoteService).not_to receive(:cross_reference)
- issue.update(description: 'New description')
+ issue.update!(description: 'New description')
issue.create_new_cross_references!
end
@@ -186,7 +186,7 @@ RSpec.describe Issue, "Mentionable" do
expect(SystemNoteService).to receive(:cross_reference).with(issues[1], any_args)
- issue.update(description: issues[1].to_reference)
+ issue.update!(description: issues[1].to_reference)
issue.create_new_cross_references!
end
@@ -196,7 +196,7 @@ RSpec.describe Issue, "Mentionable" do
expect(SystemNoteService).to receive(:cross_reference).with(issues[1], any_args)
- note.update(note: issues[1].to_reference)
+ note.update!(note: issues[1].to_reference)
note.create_new_cross_references!
end
end
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index f5b82e42ad4..c37582cb65d 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Milestoneable do
it 'returns true with a milestone from the the parent of the issue project group' do
parent = create(:group)
- group.update(parent: parent)
+ group.update!(parent: parent)
milestone = create(:milestone, group: parent)
expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index 58cd054efd5..3b8fc465421 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Milestone, 'Milestoneish' do
with_them do
before do
- project.update(visibility_level: project_visibility_levels[visibility])
+ project.update!(visibility_level: project_visibility_levels[visibility])
end
it 'returns the proper participants' do
@@ -139,7 +139,7 @@ RSpec.describe Milestone, 'Milestoneish' do
with_them do
before do
- project.update(visibility_level: project_visibility_levels[visibility])
+ project.update!(visibility_level: project_visibility_levels[visibility])
end
it 'returns the proper participants' do
@@ -171,7 +171,7 @@ RSpec.describe Milestone, 'Milestoneish' do
context 'when project is private' do
before do
- project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
it 'does not return any merge request for a non member' do
@@ -195,7 +195,7 @@ RSpec.describe Milestone, 'Milestoneish' do
context 'when merge requests are available to project members' do
before do
- project.project_feature.update(merge_requests_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
end
it 'does not return any merge request for a non member' do
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index b12ad82920f..7e031bdd263 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
self.reactive_cache_lifetime = 5.minutes
self.reactive_cache_refresh_interval = 15.seconds
+ self.reactive_cache_work_type = :no_dependency
attr_reader :id
@@ -372,4 +373,14 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
it { expect(subject.reactive_cache_hard_limit).to be_nil }
it { expect(subject.reactive_cache_worker_finder).to respond_to(:call) }
end
+
+ describe 'classes including this concern' do
+ it 'sets reactive_cache_work_type' do
+ classes = ObjectSpace.each_object(Class).select do |klass|
+ klass < described_class && klass.name
+ end
+
+ expect(classes).to all(have_attributes(reactive_cache_work_type: be_in(described_class::WORK_TYPE.keys)))
+ end
+ end
end
diff --git a/spec/models/concerns/resolvable_discussion_spec.rb b/spec/models/concerns/resolvable_discussion_spec.rb
index c91ddfee944..c0e5ddc23b1 100644
--- a/spec/models/concerns/resolvable_discussion_spec.rb
+++ b/spec/models/concerns/resolvable_discussion_spec.rb
@@ -553,13 +553,13 @@ RSpec.describe Discussion, ResolvableDiscussion do
let(:time) { Time.current.utc }
before do
- Timecop.freeze(time - 1.second) do
+ travel_to(time - 1.second) do
first_note.resolve!(current_user)
end
- Timecop.freeze(time) do
+ travel_to(time) do
third_note.resolve!(current_user)
end
- Timecop.freeze(time + 1.second) do
+ travel_to(time + 1.second) do
second_note.resolve!(current_user)
end
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 15d754861b2..e4cf68663ef 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Group, 'Routable' do
end
it 'updates route record on path change' do
- group.update(path: 'wow', name: 'much')
+ group.update!(path: 'wow', name: 'much')
expect(group.route.path).to eq('wow')
expect(group.route.name).to eq('much')
diff --git a/spec/models/concerns/schedulable_spec.rb b/spec/models/concerns/schedulable_spec.rb
index 875c2d80e55..62acd12e267 100644
--- a/spec/models/concerns/schedulable_spec.rb
+++ b/spec/models/concerns/schedulable_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Schedulable do
context 'for a pipeline_schedule' do
# let! is used to reset the next_run_at value before each spec
let(:object) do
- Timecop.freeze(1.day.ago) do
+ travel_to(1.day.ago) do
create(:ci_pipeline_schedule, :hourly)
end
end
diff --git a/spec/models/concerns/subscribable_spec.rb b/spec/models/concerns/subscribable_spec.rb
index 2a43e748e58..3e52ca5cf63 100644
--- a/spec/models/concerns/subscribable_spec.rb
+++ b/spec/models/concerns/subscribable_spec.rb
@@ -20,13 +20,13 @@ RSpec.describe Subscribable, 'Subscribable' do
end
it 'returns true when a subcription exists and subscribed is true' do
- resource.subscriptions.create(user: user_1, subscribed: true)
+ resource.subscriptions.create!(user: user_1, subscribed: true)
expect(resource.subscribed?(user_1)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- resource.subscriptions.create(user: user_1, subscribed: false)
+ resource.subscriptions.create!(user: user_1, subscribed: false)
expect(resource.subscribed?(user_1)).to be_falsey
end
@@ -38,13 +38,13 @@ RSpec.describe Subscribable, 'Subscribable' do
end
it 'returns true when a subcription exists and subscribed is true' do
- resource.subscriptions.create(user: user_1, project: project, subscribed: true)
+ resource.subscriptions.create!(user: user_1, project: project, subscribed: true)
expect(resource.subscribed?(user_1, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- resource.subscriptions.create(user: user_1, project: project, subscribed: false)
+ resource.subscriptions.create!(user: user_1, project: project, subscribed: false)
expect(resource.subscribed?(user_1, project)).to be_falsey
end
@@ -58,9 +58,9 @@ RSpec.describe Subscribable, 'Subscribable' do
it 'returns the subscribed users' do
user_2 = create(:user)
- resource.subscriptions.create(user: user_1, subscribed: true)
- resource.subscriptions.create(user: user_2, project: project, subscribed: true)
- resource.subscriptions.create(user: create(:user), project: project, subscribed: false)
+ resource.subscriptions.create!(user: user_1, subscribed: true)
+ resource.subscriptions.create!(user: user_2, project: project, subscribed: true)
+ resource.subscriptions.create!(user: create(:user), project: project, subscribed: false)
expect(resource.subscribers(project)).to contain_exactly(user_1, user_2)
end
@@ -113,7 +113,7 @@ RSpec.describe Subscribable, 'Subscribable' do
describe '#unsubscribe' do
context 'without project' do
it 'unsubscribes the given current user' do
- resource.subscriptions.create(user: user_1, subscribed: true)
+ resource.subscriptions.create!(user: user_1, subscribed: true)
expect(resource.subscribed?(user_1)).to be_truthy
resource.unsubscribe(user_1)
@@ -124,7 +124,7 @@ RSpec.describe Subscribable, 'Subscribable' do
context 'with project' do
it 'unsubscribes the given current user' do
- resource.subscriptions.create(user: user_1, project: project, subscribed: true)
+ resource.subscriptions.create!(user: user_1, project: project, subscribed: true)
expect(resource.subscribed?(user_1, project)).to be_truthy
resource.unsubscribe(user_1, project)
@@ -139,7 +139,7 @@ RSpec.describe Subscribable, 'Subscribable' do
context 'when desired_state is set to true' do
context 'when a user is subscribed to the resource' do
it 'keeps the user subscribed' do
- resource.subscriptions.create(user: user_1, subscribed: true, project: resource_project)
+ resource.subscriptions.create!(user: user_1, subscribed: true, project: resource_project)
resource.set_subscription(user_1, true, resource_project)
@@ -159,7 +159,7 @@ RSpec.describe Subscribable, 'Subscribable' do
context 'when desired_state is set to false' do
context 'when a user is subscribed to the resource' do
it 'unsubscribes the user from the resource' do
- resource.subscriptions.create(user: user_1, subscribed: true, project: resource_project)
+ resource.subscriptions.create!(user: user_1, subscribed: true, project: resource_project)
expect { resource.set_subscription(user_1, false, resource_project) }
.to change { resource.subscribed?(user_1, resource_project) }
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index e0e764fc63c..90e94b5dca9 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
subject { PersonalAccessToken.find_by_token(token_value) }
it 'finds the token' do
- personal_access_token.save
+ personal_access_token.save!
expect(subject).to eq(personal_access_token)
end
diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb
index 588685b04bf..1d9dbe8a867 100644
--- a/spec/models/container_expiration_policy_spec.rb
+++ b/spec/models/container_expiration_policy_spec.rb
@@ -104,6 +104,18 @@ RSpec.describe ContainerExpirationPolicy, type: :model do
end
end
+ describe '.executable' do
+ subject { described_class.executable }
+
+ let_it_be(:policy1) { create(:container_expiration_policy, :runnable) }
+ let_it_be(:container_repository1) { create(:container_repository, project: policy1.project) }
+ let_it_be(:policy2) { create(:container_expiration_policy, :runnable) }
+ let_it_be(:container_repository2) { create(:container_repository, project: policy2.project) }
+ let_it_be(:policy3) { create(:container_expiration_policy, :runnable) }
+
+ it { is_expected.to contain_exactly(policy1, policy2) }
+ end
+
describe '#disable!' do
let_it_be(:container_expiration_policy) { create(:container_expiration_policy) }
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 953f92d103b..2a7aaed5204 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -184,6 +184,33 @@ RSpec.describe ContainerRepository do
end
end
+ describe '#start_expiration_policy!' do
+ subject { repository.start_expiration_policy! }
+
+ it 'sets the expiration policy started at to now' do
+ Timecop.freeze do
+ expect { subject }
+ .to change { repository.expiration_policy_started_at }.from(nil).to(Time.zone.now)
+ end
+ end
+ end
+
+ describe '#reset_expiration_policy_started_at!' do
+ subject { repository.reset_expiration_policy_started_at! }
+
+ before do
+ repository.start_expiration_policy!
+ end
+
+ it 'resets the expiration policy started at' do
+ started_at = repository.expiration_policy_started_at
+
+ expect(started_at).not_to be_nil
+ expect { subject }
+ .to change { repository.expiration_policy_started_at }.from(started_at).to(nil)
+ end
+ end
+
describe '.build_from_path' do
let(:registry_path) do
ContainerRegistry::Path.new(project.full_path + '/some/image')
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index 9fd3751be13..60a3e3fc0e2 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -353,4 +353,29 @@ RSpec.describe DeployToken do
end
end
end
+
+ describe '#accessible_projects' do
+ subject { deploy_token.accessible_projects }
+
+ context 'when a deploy token is associated to a project' do
+ let_it_be(:deploy_token) { create(:deploy_token, :project) }
+
+ it 'returns only projects directly associated with the token' do
+ expect(deploy_token).to receive(:projects)
+
+ subject
+ end
+ end
+
+ context 'when a deploy token is associated to a group' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:deploy_token) { create(:deploy_token, :group, groups: [group]) }
+
+ it 'returns all projects from the group' do
+ expect(group).to receive(:all_projects)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 1c7b11257ce..3e855584c38 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -98,16 +98,36 @@ RSpec.describe Deployment do
context 'when deployment runs' do
let(:deployment) { create(:deployment) }
- before do
- deployment.run!
- end
-
it 'starts running' do
freeze_time do
+ deployment.run!
+
expect(deployment).to be_running
expect(deployment.finished_at).to be_nil
end
end
+
+ it 'executes Deployments::ExecuteHooksWorker asynchronously' do
+ expect(Deployments::ExecuteHooksWorker)
+ .to receive(:perform_async).with(deployment.id)
+
+ deployment.run!
+ end
+
+ it 'does not execute Deployments::ExecuteHooksWorker when feature is disabled' do
+ stub_feature_flags(ci_send_deployment_hook_when_start: false)
+ expect(Deployments::ExecuteHooksWorker)
+ .not_to receive(:perform_async).with(deployment.id)
+
+ deployment.run!
+ end
+
+ it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
+ expect(Deployments::DropOlderDeploymentsWorker)
+ .to receive(:perform_async).once.with(deployment.id)
+
+ deployment.run!
+ end
end
context 'when deployment succeeded' do
@@ -122,15 +142,15 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::SuccessWorker asynchronously' do
- expect(Deployments::SuccessWorker)
+ it 'executes Deployments::UpdateEnvironmentWorker asynchronously' do
+ expect(Deployments::UpdateEnvironmentWorker)
.to receive(:perform_async).with(deployment.id)
deployment.succeed!
end
- it 'executes Deployments::FinishedWorker asynchronously' do
- expect(Deployments::FinishedWorker)
+ it 'executes Deployments::ExecuteHooksWorker asynchronously' do
+ expect(Deployments::ExecuteHooksWorker)
.to receive(:perform_async).with(deployment.id)
deployment.succeed!
@@ -149,12 +169,19 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::FinishedWorker asynchronously' do
- expect(Deployments::FinishedWorker)
+ it 'executes Deployments::LinkMergeRequestWorker asynchronously' do
+ expect(Deployments::LinkMergeRequestWorker)
.to receive(:perform_async).with(deployment.id)
deployment.drop!
end
+
+ it 'executes Deployments::ExecuteHooksWorker asynchronously' do
+ expect(Deployments::ExecuteHooksWorker)
+ .to receive(:perform_async).with(deployment.id)
+
+ deployment.drop!
+ end
end
context 'when deployment was canceled' do
@@ -169,12 +196,19 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::FinishedWorker asynchronously' do
- expect(Deployments::FinishedWorker)
+ it 'executes Deployments::LinkMergeRequestWorker asynchronously' do
+ expect(Deployments::LinkMergeRequestWorker)
.to receive(:perform_async).with(deployment.id)
deployment.cancel!
end
+
+ it 'executes Deployments::ExecuteHooksWorker asynchronously' do
+ expect(Deployments::ExecuteHooksWorker)
+ .to receive(:perform_async).with(deployment.id)
+
+ deployment.cancel!
+ end
end
end
@@ -580,9 +614,10 @@ RSpec.describe Deployment do
expect(deploy).to be_success
end
- it 'schedules SuccessWorker and FinishedWorker when finishing a deploy' do
- expect(Deployments::SuccessWorker).to receive(:perform_async)
- expect(Deployments::FinishedWorker).to receive(:perform_async)
+ it 'schedules workers when finishing a deploy' do
+ expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
+ expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
+ expect(Deployments::ExecuteHooksWorker).to receive(:perform_async)
deploy.update_status('success')
end
diff --git a/spec/models/design_management/design_at_version_spec.rb b/spec/models/design_management/design_at_version_spec.rb
index 3c1ff45c53f..220de80a52a 100644
--- a/spec/models/design_management/design_at_version_spec.rb
+++ b/spec/models/design_management/design_at_version_spec.rb
@@ -274,29 +274,6 @@ RSpec.describe DesignManagement::DesignAtVersion do
build(:design_at_version, design: design, version: version).id
end
- describe '.instantiate' do
- context 'when attrs are valid' do
- subject do
- described_class.instantiate(design: design, version: version)
- end
-
- it { is_expected.to be_a(described_class).and(be_valid) }
- end
-
- context 'when attrs are invalid' do
- subject do
- described_class.instantiate(
- design: create(:design),
- version: create(:design_version)
- )
- end
-
- it 'raises a validation error' do
- expect { subject }.to raise_error(ActiveModel::ValidationError)
- end
- end
- end
-
describe '.lazy_find' do
let!(:version_a) do
create(:design_version, designs: create_list(:design, 3, issue: issue))
diff --git a/spec/models/design_management/design_collection_spec.rb b/spec/models/design_management/design_collection_spec.rb
index 8575cc80b5b..bc8330c7dd3 100644
--- a/spec/models/design_management/design_collection_spec.rb
+++ b/spec/models/design_management/design_collection_spec.rb
@@ -101,6 +101,18 @@ RSpec.describe DesignManagement::DesignCollection do
end
end
+ describe "#empty?" do
+ it "is true when the design collection has no designs" do
+ expect(collection).to be_empty
+ end
+
+ it "is false when the design collection has designs" do
+ create(:design, issue: issue)
+
+ expect(collection).not_to be_empty
+ end
+ end
+
describe "#versions" do
it "includes versions for all designs" do
version_1 = create(:design_version)
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index d4adc0d42d0..2ce9f00a056 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -206,6 +206,15 @@ RSpec.describe DesignManagement::Design do
end
end
+ describe ".build_full_path" do
+ it "builds the full path for a design" do
+ design = build(:design, issue: issue, filename: "hello.jpg")
+ expected_path = "#{DesignManagement.designs_directory}/issue-#{design.issue.iid}/hello.jpg"
+
+ expect(described_class.build_full_path(issue, design)).to eq(expected_path)
+ end
+ end
+
describe '#visible_in?' do
let_it_be(:issue) { create(:issue, project: issue.project) }
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 433ede97b82..06d3e9da286 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -312,18 +312,25 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
describe '#update_merge_request_metrics?' do
{
+ 'gprd' => false,
+ 'prod' => true,
+ 'prod-test' => false,
+ 'PROD' => true,
'production' => true,
+ 'production-test' => false,
+ 'PRODUCTION' => true,
'production/eu' => true,
+ 'PRODUCTION/EU' => true,
'production/www.gitlab.com' => true,
'productioneu' => false,
- 'Production' => false,
- 'Production/eu' => false,
+ 'Production' => true,
+ 'Production/eu' => true,
'test-production' => false
}.each do |name, expected_value|
it "returns #{expected_value} for #{name}" do
env = create(:environment, name: name)
- expect(env.update_merge_request_metrics?).to eq(expected_value)
+ expect(env.update_merge_request_metrics?).to eq(expected_value), "Expected the name '#{name}' to result in #{expected_value}, but it didn't."
end
end
end
diff --git a/spec/models/environment_status_spec.rb b/spec/models/environment_status_spec.rb
index a6954fb5d56..09a73a4cdcb 100644
--- a/spec/models/environment_status_spec.rb
+++ b/spec/models/environment_status_spec.rb
@@ -66,18 +66,6 @@ RSpec.describe EnvironmentStatus do
end
end
- describe '#changed_paths' do
- subject { environment_status.changed_urls }
-
- it { is_expected.to contain_exactly("#{environment.external_url}/ruby-style-guide.html", "#{environment.external_url}/html/page.html") }
- end
-
- describe '#changed_urls' do
- subject { environment_status.changed_paths }
-
- it { is_expected.to contain_exactly('ruby-style-guide.html', 'html/page.html') }
- end
-
describe '.for_merge_request' do
let(:admin) { create(:admin) }
let!(:pipeline) { create(:ci_pipeline, sha: sha, merge_requests_as_head_pipeline: [merge_request]) }
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index bafcb7a3741..47492715c11 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -918,6 +918,56 @@ RSpec.describe Event do
expect(destroyed).to eq('deleted')
expect(archived).to eq('archived')
end
+
+ it 'handles correct push_action' do
+ project = create(:project)
+ user = create(:user)
+ project.add_developer(user)
+ push_event = create_push_event(project, user)
+
+ expect(push_event.push_action?).to be true
+ expect(push_event.action_name).to eq('pushed to')
+ end
+
+ context 'handles correct base actions' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:trait, :action_name) do
+ :created | 'created'
+ :updated | 'opened'
+ :closed | 'closed'
+ :reopened | 'opened'
+ :commented | 'commented on'
+ :merged | 'accepted'
+ :joined | 'joined'
+ :left | 'left'
+ :destroyed | 'destroyed'
+ :expired | 'removed due to membership expiration from'
+ :approved | 'approved'
+ end
+
+ with_them do
+ it 'with correct name and method' do
+ event = build(:event, trait)
+
+ expect(event.action_name).to eq(action_name)
+ end
+ end
+ end
+
+ context 'for created_project_action?' do
+ it 'returns created for created event' do
+ action = build(:project_created_event)
+
+ expect(action.action_name).to eq('created')
+ end
+
+ it 'returns imported for imported event' do
+ action = build(:project_imported_event)
+
+ expect(action.action_name).to eq('imported')
+ end
+ end
end
def create_push_event(project, user)
diff --git a/spec/models/group_import_state_spec.rb b/spec/models/group_import_state_spec.rb
index 4404ef64966..469b5c96ac9 100644
--- a/spec/models/group_import_state_spec.rb
+++ b/spec/models/group_import_state_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe GroupImportState do
describe 'validations' do
let_it_be(:group) { create(:group) }
+ it { is_expected.to belong_to(:user).required }
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_presence_of(:status) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 15972f66fd6..cc29e20710a 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -222,6 +222,50 @@ RSpec.describe Group do
end
end
end
+
+ describe '#two_factor_authentication_allowed' do
+ let_it_be(:group) { create(:group) }
+
+ context 'for a parent group' do
+ it 'is valid' do
+ group.require_two_factor_authentication = true
+
+ expect(group).to be_valid
+ end
+ end
+
+ context 'for a child group' do
+ let(:sub_group) { create(:group, parent: group) }
+
+ it 'is valid when parent group allows' do
+ sub_group.require_two_factor_authentication = true
+
+ expect(sub_group).to be_valid
+ end
+
+ it 'is invalid when parent group blocks' do
+ group.namespace_settings.update!(allow_mfa_for_subgroups: false)
+ sub_group.require_two_factor_authentication = true
+
+ expect(sub_group).to be_invalid
+ expect(sub_group.errors[:require_two_factor_authentication]).to include('is forbidden by a top-level group')
+ end
+ end
+ end
+ end
+
+ describe '.without_integration' do
+ let(:another_group) { create(:group) }
+ let(:instance_integration) { build(:jira_service, :instance) }
+
+ before do
+ create(:jira_service, group: group, project: nil)
+ create(:slack_service, group: another_group, project: nil)
+ end
+
+ it 'returns groups without integration' do
+ expect(Group.without_integration(instance_integration)).to contain_exactly(another_group)
+ end
end
describe '.public_or_visible_to_user' do
@@ -1330,229 +1374,156 @@ RSpec.describe Group do
end
end
- describe '#shared_runners_allowed?' do
- using RSpec::Parameterized::TableSyntax
-
- where(:shared_runners_enabled, :allow_descendants_override, :expected_shared_runners_allowed) do
- true | false | true
- true | true | true
- false | false | false
- false | true | true
- end
-
- with_them do
- let!(:group) { create(:group, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override) }
-
- it 'returns the expected result' do
- expect(group.shared_runners_allowed?).to eq(expected_shared_runners_allowed)
- end
- end
+ def subject_and_reload(*models)
+ subject
+ models.map(&:reload)
end
- describe '#parent_allows_shared_runners?' do
- context 'when parent group is present' do
- using RSpec::Parameterized::TableSyntax
-
- where(:shared_runners_enabled, :allow_descendants_override, :expected_shared_runners_allowed) do
- true | false | true
- true | true | true
- false | false | false
- false | true | true
+ describe '#update_shared_runners_setting!' do
+ context 'enabled' do
+ subject { group.update_shared_runners_setting!('enabled') }
+
+ context 'group that its ancestors have shared runners disabled' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
+
+ it 'raises error and does not enable shared Runners' do
+ expect { subject_and_reload(parent, group, project) }
+ .to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
+ .and not_change { parent.shared_runners_enabled }
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
end
- with_them do
- let!(:parent_group) { create(:group, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override) }
- let!(:group) { create(:group, parent: parent_group) }
+ context 'root group with shared runners disabled' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- it 'returns the expected result' do
- expect(group.parent_allows_shared_runners?).to eq(expected_shared_runners_allowed)
+ it 'enables shared Runners only for itself' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.shared_runners_enabled }.from(false).to(true)
+ .and not_change { sub_group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
end
end
end
- context 'when parent group is missing' do
- let!(:group) { create(:group) }
-
- it 'returns true' do
- expect(group.parent_allows_shared_runners?).to be_truthy
+ context 'disabled_and_unoverridable' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
+ let_it_be(:sub_group_2) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
+ let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
+
+ subject { group.update_shared_runners_setting!('disabled_and_unoverridable') }
+
+ it 'disables shared Runners for all descendant groups and projects' do
+ expect { subject_and_reload(group, sub_group, sub_group_2, project, project_2) }
+ .to change { group.shared_runners_enabled }.from(true).to(false)
+ .and not_change { group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { sub_group.shared_runners_enabled }
+ .and change { sub_group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
+ .and change { sub_group_2.shared_runners_enabled }.from(true).to(false)
+ .and not_change { sub_group_2.allow_descendants_override_disabled_shared_runners }
+ .and change { project.shared_runners_enabled }.from(true).to(false)
+ .and change { project_2.shared_runners_enabled }.from(true).to(false)
+ end
+
+ context 'with override on self' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+
+ it 'disables it' do
+ expect { subject_and_reload(group) }
+ .to not_change { group.shared_runners_enabled }
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
+ end
end
end
- end
- describe '#parent_enabled_shared_runners?' do
- subject { group.parent_enabled_shared_runners? }
+ context 'disabled_with_override' do
+ subject { group.update_shared_runners_setting!('disabled_with_override') }
- context 'when parent group is present' do
- context 'When shared Runners are disabled' do
- let!(:parent_group) { create(:group, :shared_runners_disabled) }
- let!(:group) { create(:group, parent: parent_group) }
+ context 'top level group' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- it { is_expected.to be_falsy }
+ it 'enables allow descendants to override only for itself' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { sub_group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { sub_group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
end
- context 'When shared Runners are enabled' do
- let!(:parent_group) { create(:group) }
- let!(:group) { create(:group, parent: parent_group) }
+ context 'group that its ancestors have shared Runners disabled but allows to override' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
- it { is_expected.to be_truthy }
+ it 'enables allow descendants to override' do
+ expect { subject_and_reload(parent, group, project) }
+ .to not_change { parent.allow_descendants_override_disabled_shared_runners }
+ .and not_change { parent.shared_runners_enabled }
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
end
- end
-
- context 'when parent group is missing' do
- let!(:group) { create(:group) }
-
- it { is_expected.to be_truthy }
- end
- end
- describe '#enable_shared_runners!' do
- subject { group.enable_shared_runners! }
+ context 'when parent does not allow' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
- context 'group that its ancestors have shared runners disabled' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
-
- it 'raises error and does not enable shared Runners' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Shared Runners disabled for the parent group')
- .and not_change { parent.reload.shared_runners_enabled }
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it 'raises error and does not allow descendants to override' do
+ expect { subject_and_reload(parent, group) }
+ .to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
+ .and not_change { parent.allow_descendants_override_disabled_shared_runners }
+ .and not_change { parent.shared_runners_enabled }
+ .and not_change { group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { group.shared_runners_enabled }
+ end
end
- end
- context 'root group with shared runners disabled' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
+ context 'top level group that has shared Runners enabled' do
+ let_it_be(:group) { create(:group, shared_runners_enabled: true) }
+ let_it_be(:sub_group) { create(:group, shared_runners_enabled: true, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
- it 'enables shared Runners only for itself' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(false).to(true)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it 'enables allow descendants to override & disables shared runners everywhere' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.shared_runners_enabled }.from(true).to(false)
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and change { sub_group.shared_runners_enabled }.from(true).to(false)
+ .and change { project.shared_runners_enabled }.from(true).to(false)
+ end
end
end
end
- describe '#disable_shared_runners!' do
- let_it_be(:group) { create(:group) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
- let_it_be(:sub_group_2) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
- let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
-
- subject { group.disable_shared_runners! }
-
- it 'disables shared Runners for all descendant groups and projects' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and change { sub_group_2.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { sub_group_2.reload.allow_descendants_override_disabled_shared_runners }
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- .and change { project_2.reload.shared_runners_enabled }.from(true).to(false)
- end
- end
-
- describe '#allow_descendants_override_disabled_shared_runners!' do
- subject { group.allow_descendants_override_disabled_shared_runners! }
-
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
-
- it 'enables allow descendants to override only for itself' do
- expect { subject }
- .to change { group.reload.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
- end
- end
-
- context 'group that its ancestors have shared Runners disabled but allows to override' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
-
- it 'enables allow descendants to override' do
- expect { subject }
- .to not_change { parent.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { parent.reload.shared_runners_enabled }
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
- end
- end
-
- context 'when parent does not allow' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
-
- it 'raises error and does not allow descendants to override' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Group level shared Runners not allowed')
- .and not_change { parent.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { parent.reload.shared_runners_enabled }
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { group.reload.shared_runners_enabled }
- end
- end
-
- context 'top level group that has shared Runners enabled' do
- let_it_be(:group) { create(:group, shared_runners_enabled: true) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
-
- it 'raises error and does not change config' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Shared Runners enabled')
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ describe "#default_branch_name" do
+ context "group.namespace_settings does not have a default branch name" do
+ it "returns nil" do
+ expect(group.default_branch_name).to be_nil
end
end
- end
- describe '#disallow_descendants_override_disabled_shared_runners!' do
- subject { group.disallow_descendants_override_disabled_shared_runners! }
+ context "group.namespace_settings has a default branch name" do
+ let(:example_branch_name) { "example_branch_name" }
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners ) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
-
- it 'disables allow project to override for descendants and disables project shared Runners' do
- expect { subject }
- .to not_change { group.reload.shared_runners_enabled }
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and change { sub_group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
+ before do
+ expect(group.namespace_settings)
+ .to receive(:default_branch_name)
+ .and_return(example_branch_name)
end
- end
-
- context 'top level group that has shared Runners enabled' do
- let_it_be(:group) { create(:group, shared_runners_enabled: true) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- it 'results error and does not change config' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Shared Runners enabled')
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it "returns the default branch name" do
+ expect(group.default_branch_name).to eq(example_branch_name)
end
end
end
@@ -1600,4 +1571,24 @@ RSpec.describe Group do
end
end
end
+
+ describe '#parent_allows_two_factor_authentication?' do
+ it 'returns true for top-level group' do
+ expect(group.parent_allows_two_factor_authentication?).to eq(true)
+ end
+
+ context 'for subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+
+ it 'returns true if parent group allows two factor authentication for its descendants' do
+ expect(subgroup.parent_allows_two_factor_authentication?).to eq(true)
+ end
+
+ it 'returns true if parent group allows two factor authentication for its descendants' do
+ group.namespace_settings.update!(allow_mfa_for_subgroups: false)
+
+ expect(subgroup.parent_allows_two_factor_authentication?).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
index cdef125e890..9fee1b0ae7b 100644
--- a/spec/models/import_failure_spec.rb
+++ b/spec/models/import_failure_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe ImportFailure do
it 'orders hard failures by newest first' do
older_failure = hard_failure.dup
- Timecop.freeze(1.day.before(hard_failure.created_at)) do
+ travel_to(1.day.before(hard_failure.created_at)) do
older_failure.save!
expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure, older_failure])
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 87ba0f3f7e6..d89b323f525 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -3,26 +3,26 @@
require 'spec_helper'
RSpec.describe Integration do
- let!(:project_1) { create(:project) }
- let!(:project_2) { create(:project) }
- let!(:project_3) { create(:project) }
- let(:instance_integration) { create(:jira_service, :instance) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:instance_integration) { create(:jira_service, :instance) }
before do
create(:jira_service, project: project_1, inherit_from_id: instance_integration.id)
create(:jira_service, project: project_2, inherit_from_id: nil)
- create(:slack_service, project: project_1, inherit_from_id: nil)
+ create(:slack_service, project: project_3, inherit_from_id: nil)
end
- describe '#with_custom_integration_for' do
+ describe '.with_custom_integration_for' do
it 'returns projects with custom integrations' do
expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2)
end
end
- describe '#ids_without_integration' do
- it 'returns projects ids without an integration' do
- expect(Project.ids_without_integration(instance_integration, 100)).to contain_exactly(project_3.id)
+ describe '.without_integration' do
+ it 'returns projects without integration' do
+ expect(Project.without_integration(instance_integration)).to contain_exactly(project_3)
end
end
end
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 966e4321378..1d3c09a48b7 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Issue::Metrics do
context "milestones" do
it "records the first time an issue is associated with a milestone" do
time = Time.current
- Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -47,9 +47,9 @@ RSpec.describe Issue::Metrics do
it "does not record the second time an issue is associated with a milestone" do
time = Time.current
- Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
- Timecop.freeze(time + 2.hours) { subject.update(milestone: nil) }
- Timecop.freeze(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time + 2.hours) { subject.update(milestone: nil) }
+ travel_to(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -61,7 +61,7 @@ RSpec.describe Issue::Metrics do
it "records the first time an issue is associated with a list label" do
list_label = create(:list).label
time = Time.current
- Timecop.freeze(time) { subject.update(label_ids: [list_label.id]) }
+ travel_to(time) { subject.update(label_ids: [list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -71,9 +71,9 @@ RSpec.describe Issue::Metrics do
it "does not record the second time an issue is associated with a list label" do
time = Time.current
first_list_label = create(:list).label
- Timecop.freeze(time) { subject.update(label_ids: [first_list_label.id]) }
+ travel_to(time) { subject.update(label_ids: [first_list_label.id]) }
second_list_label = create(:list).label
- Timecop.freeze(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
+ travel_to(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
diff --git a/spec/models/issue_email_participant_spec.rb b/spec/models/issue_email_participant_spec.rb
new file mode 100644
index 00000000000..f19e65e31f3
--- /dev/null
+++ b/spec/models/issue_email_participant_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssueEmailParticipant do
+ describe "Associations" do
+ it { is_expected.to belong_to(:issue) }
+ end
+
+ describe 'Validations' do
+ subject { build(:issue_email_participant) }
+
+ it { is_expected.to validate_presence_of(:issue) }
+ it { is_expected.to validate_presence_of(:email) }
+ it { is_expected.to validate_uniqueness_of(:email).scoped_to([:issue_id]) }
+
+ it_behaves_like 'an object with RFC3696 compliant email-formated attributes', :email
+ end
+end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 283d945157b..16ea2989eda 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -28,10 +28,11 @@ RSpec.describe Issue do
it { is_expected.to have_and_belong_to_many(:prometheus_alert_events) }
it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:prometheus_alerts) }
+ it { is_expected.to have_many(:issue_email_participants) }
describe 'versions.most_recent' do
it 'returns the most recent version' do
- issue = create(:issue)
+ issue = create(:issue, project: reusable_project)
create_list(:design_version, 2, issue: issue)
last_version = create(:design_version, issue: issue)
@@ -79,19 +80,19 @@ RSpec.describe Issue do
end
end
- subject { create(:issue) }
+ subject { create(:issue, project: reusable_project) }
describe 'callbacks' do
describe '#ensure_metrics' do
it 'creates metrics after saving' do
- issue = create(:issue)
+ issue = create(:issue, project: reusable_project)
expect(issue.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
end
it 'does not create duplicate metrics for an issue' do
- issue = create(:issue)
+ issue = create(:issue, project: reusable_project)
issue.close!
@@ -102,6 +103,14 @@ RSpec.describe Issue do
it 'records current metrics' do
expect_any_instance_of(Issue::Metrics).to receive(:record!)
+ create(:issue, project: reusable_project)
+ end
+ end
+
+ describe '#record_create_action' do
+ it 'records the creation action after saving' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
+
create(:issue)
end
end
@@ -111,8 +120,8 @@ RSpec.describe Issue do
subject { described_class.with_alert_management_alerts }
it 'gets only issues with alerts' do
- alert = create(:alert_management_alert, issue: create(:issue))
- issue = create(:issue)
+ alert = create(:alert_management_alert, project: reusable_project, issue: create(:issue, project: reusable_project))
+ issue = create(:issue, project: reusable_project)
expect(subject).to contain_exactly(alert.issue)
expect(subject).not_to include(issue)
@@ -130,10 +139,9 @@ RSpec.describe Issue do
end
describe '.with_issue_type' do
- let_it_be(:project) { create(:project) }
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:incident) { create(:incident, project: project) }
- let_it_be(:test_case) { create(:quality_test_case, project: project) }
+ let_it_be(:issue) { create(:issue, project: reusable_project) }
+ let_it_be(:incident) { create(:incident, project: reusable_project) }
+ let_it_be(:test_case) { create(:quality_test_case, project: reusable_project) }
it 'gives issues with the given issue type' do
expect(described_class.with_issue_type('issue'))
@@ -146,6 +154,24 @@ RSpec.describe Issue do
end
end
+ describe '.order_severity' do
+ let_it_be(:issue_high_severity) { create(:issuable_severity, severity: :high).issue }
+ let_it_be(:issue_low_severity) { create(:issuable_severity, severity: :low).issue }
+ let_it_be(:issue_no_severity) { create(:incident) }
+
+ context 'sorting ascending' do
+ subject { described_class.order_severity_asc }
+
+ it { is_expected.to eq([issue_no_severity, issue_low_severity, issue_high_severity]) }
+ end
+
+ context 'sorting descending' do
+ subject { described_class.order_severity_desc }
+
+ it { is_expected.to eq([issue_high_severity, issue_low_severity, issue_no_severity]) }
+ end
+ end
+
describe '#order_by_position_and_priority' do
let(:project) { reusable_project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
@@ -195,7 +221,7 @@ RSpec.describe Issue do
end
describe '#close' do
- subject(:issue) { create(:issue, state: 'opened') }
+ subject(:issue) { create(:issue, project: reusable_project, state: 'opened') }
it 'sets closed_at to Time.current when an issue is closed' do
expect { issue.close }.to change { issue.closed_at }.from(nil)
@@ -210,7 +236,7 @@ RSpec.describe Issue do
end
describe '#reopen' do
- let(:issue) { create(:issue, state: 'closed', closed_at: Time.current, closed_by: user) }
+ let(:issue) { create(:issue, project: reusable_project, state: 'closed', closed_at: Time.current, closed_by: user) }
it 'sets closed_at to nil when an issue is reopend' do
expect { issue.reopen }.to change { issue.closed_at }.to(nil)
@@ -293,7 +319,7 @@ RSpec.describe Issue do
end
describe '#assignee_or_author?' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
it 'returns true for a user that is assigned to an issue' do
issue.assignees << user
@@ -313,22 +339,21 @@ RSpec.describe Issue do
end
describe '#related_issues' do
- let(:user) { create(:user) }
- let(:authorized_project) { create(:project) }
- let(:authorized_project2) { create(:project) }
- let(:unauthorized_project) { create(:project) }
+ let_it_be(:authorized_project) { create(:project) }
+ let_it_be(:authorized_project2) { create(:project) }
+ let_it_be(:unauthorized_project) { create(:project) }
- let(:authorized_issue_a) { create(:issue, project: authorized_project) }
- let(:authorized_issue_b) { create(:issue, project: authorized_project) }
- let(:authorized_issue_c) { create(:issue, project: authorized_project2) }
+ let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
+ let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
+ let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
- let(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
+ let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
- let!(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
- let!(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
- let!(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
+ let_it_be(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
+ let_it_be(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
+ let_it_be(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
- before do
+ before_all do
authorized_project.add_developer(user)
authorized_project2.add_developer(user)
end
@@ -366,17 +391,16 @@ RSpec.describe Issue do
end
context 'user is reporter in project issue belongs to' do
- let(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
+ let(:issue) { create(:issue, project: reusable_project) }
- before do
- project.add_reporter(user)
+ before_all do
+ reusable_project.add_reporter(user)
end
it { is_expected.to eq true }
context 'issue not persisted' do
- let(:issue) { build(:issue, project: project) }
+ let(:issue) { build(:issue, project: reusable_project) }
it { is_expected.to eq false }
end
@@ -384,7 +408,7 @@ RSpec.describe Issue do
context 'checking destination project also' do
subject { issue.can_move?(user, to_project) }
- let(:to_project) { create(:project) }
+ let_it_be(:to_project) { create(:project) }
context 'destination project allowed' do
before do
@@ -420,7 +444,7 @@ RSpec.describe Issue do
end
describe '#duplicated?' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
subject { issue.duplicated? }
@@ -429,7 +453,7 @@ RSpec.describe Issue do
end
context 'issue already duplicated' do
- let(:duplicated_to_issue) { create(:issue) }
+ let(:duplicated_to_issue) { create(:issue, project: reusable_project) }
let(:issue) { create(:issue, duplicated_to: duplicated_to_issue) }
it { is_expected.to eq true }
@@ -440,13 +464,13 @@ RSpec.describe Issue do
subject { issue.from_service_desk? }
context 'when issue author is support bot' do
- let(:issue) { create(:issue, author: ::User.support_bot) }
+ let(:issue) { create(:issue, project: reusable_project, author: ::User.support_bot) }
it { is_expected.to be_truthy }
end
context 'when issue author is not support bot' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
it { is_expected.to be_falsey }
end
@@ -495,7 +519,7 @@ RSpec.describe Issue do
end
describe '#has_related_branch?' do
- let(:issue) { create(:issue, title: "Blue Bell Knoll") }
+ let(:issue) { create(:issue, project: reusable_project, title: "Blue Bell Knoll") }
subject { issue.has_related_branch? }
@@ -528,7 +552,7 @@ RSpec.describe Issue do
end
describe "#to_branch_name" do
- let(:issue) { create(:issue, title: 'testing-issue') }
+ let_it_be(:issue) { create(:issue, project: reusable_project, title: 'testing-issue') }
it 'starts with the issue iid' do
expect(issue.to_branch_name).to match(/\A#{issue.iid}-[A-Za-z\-]+\z/)
@@ -539,12 +563,12 @@ RSpec.describe Issue do
end
it "does not contain the issue title if confidential" do
- issue = create(:issue, title: 'testing-issue', confidential: true)
+ issue = create(:issue, project: reusable_project, title: 'testing-issue', confidential: true)
expect(issue.to_branch_name).to match(/confidential-issue\z/)
end
context 'issue title longer than 100 characters' do
- let(:issue) { create(:issue, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
+ let_it_be(:issue) { create(:issue, project: reusable_project, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
it "truncates branch name to at most 100 characters" do
expect(issue.to_branch_name.length).to be <= 100
@@ -581,15 +605,14 @@ RSpec.describe Issue do
describe '#participants' do
context 'using a public project' do
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:issue) { create(:issue, project: reusable_project) }
let!(:note1) do
- create(:note_on_issue, noteable: issue, project: project, note: 'a')
+ create(:note_on_issue, noteable: issue, project: reusable_project, note: 'a')
end
let!(:note2) do
- create(:note_on_issue, noteable: issue, project: project, note: 'b')
+ create(:note_on_issue, noteable: issue, project: reusable_project, note: 'b')
end
it 'includes the issue author' do
@@ -604,8 +627,8 @@ RSpec.describe Issue do
context 'using a private project' do
it 'does not include mentioned users that do not have access to the project' do
project = create(:project)
- user = create(:user)
issue = create(:issue, project: project)
+ user = create(:user)
create(:note_on_issue,
noteable: issue,
@@ -621,10 +644,9 @@ RSpec.describe Issue do
it 'updates when assignees change' do
user1 = create(:user)
user2 = create(:user)
- project = create(:project)
- issue = create(:issue, assignees: [user1], project: project)
- project.add_developer(user1)
- project.add_developer(user2)
+ issue = create(:issue, assignees: [user1], project: reusable_project)
+ reusable_project.add_developer(user1)
+ reusable_project.add_developer(user2)
expect(user1.assigned_open_issues_count).to eq(1)
expect(user2.assigned_open_issues_count).to eq(0)
@@ -638,9 +660,8 @@ RSpec.describe Issue do
end
describe '#visible_to_user?' do
- let(:project) { build(:project) }
+ let(:project) { reusable_project }
let(:issue) { build(:issue, project: project) }
- let(:user) { create(:user) }
subject { issue.visible_to_user?(user) }
@@ -661,6 +682,10 @@ RSpec.describe Issue do
context 'without a user' do
let(:user) { nil }
+ before do
+ project.project_feature.update_attribute(:issues_access_level, ProjectFeature::PUBLIC)
+ end
+
it 'returns true when the issue is publicly visible' do
expect(issue).to receive(:publicly_visible?).and_return(true)
@@ -995,7 +1020,8 @@ RSpec.describe Issue do
with_them do
it 'checks for spam on issues that can be seen anonymously' do
- project = create(:project, visibility_level: visibility_level)
+ project = reusable_project
+ project.update(visibility_level: visibility_level)
issue = create(:issue, project: project, confidential: confidential, description: 'original description')
issue.assign_attributes(new_attributes)
@@ -1016,8 +1042,8 @@ RSpec.describe Issue do
describe '.public_only' do
it 'only returns public issues' do
- public_issue = create(:issue)
- create(:issue, confidential: true)
+ public_issue = create(:issue, project: reusable_project)
+ create(:issue, project: reusable_project, confidential: true)
expect(described_class.public_only).to eq([public_issue])
end
@@ -1025,15 +1051,15 @@ RSpec.describe Issue do
describe '.confidential_only' do
it 'only returns confidential_only issues' do
- create(:issue)
- confidential_issue = create(:issue, confidential: true)
+ create(:issue, project: reusable_project)
+ confidential_issue = create(:issue, project: reusable_project, confidential: true)
expect(described_class.confidential_only).to eq([confidential_issue])
end
end
describe '.by_project_id_and_iid' do
- let_it_be(:issue_a) { create(:issue) }
+ let_it_be(:issue_a) { create(:issue, project: reusable_project) }
let_it_be(:issue_b) { create(:issue, iid: issue_a.iid) }
let_it_be(:issue_c) { create(:issue, project: issue_a.project) }
let_it_be(:issue_d) { create(:issue, project: issue_a.project) }
@@ -1050,8 +1076,8 @@ RSpec.describe Issue do
describe '.service_desk' do
it 'returns the service desk issue' do
- service_desk_issue = create(:issue, author: ::User.support_bot)
- regular_issue = create(:issue)
+ service_desk_issue = create(:issue, project: reusable_project, author: ::User.support_bot)
+ regular_issue = create(:issue, project: reusable_project)
expect(described_class.service_desk).to include(service_desk_issue)
expect(described_class.service_desk).not_to include(regular_issue)
@@ -1064,7 +1090,7 @@ RSpec.describe Issue do
describe "#labels_hook_attrs" do
let(:label) { create(:label) }
- let(:issue) { create(:labeled_issue, labels: [label]) }
+ let(:issue) { create(:labeled_issue, project: reusable_project, labels: [label]) }
it "returns a list of label hook attributes" do
expect(issue.labels_hook_attrs).to eq([label.hook_attrs])
@@ -1073,7 +1099,7 @@ RSpec.describe Issue do
context "relative positioning" do
it_behaves_like "a class that supports relative positioning" do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { reusable_project }
let(:factory) { :issue }
let(:default_params) { { project: project } }
end
@@ -1083,7 +1109,7 @@ RSpec.describe Issue do
describe "#previous_updated_at" do
let_it_be(:updated_at) { Time.zone.local(2012, 01, 06) }
- let_it_be(:issue) { create(:issue, updated_at: updated_at) }
+ let_it_be(:issue) { create(:issue, project: reusable_project, updated_at: updated_at) }
it 'returns updated_at value if updated_at did not change at all' do
allow(issue).to receive(:previous_changes).and_return({})
@@ -1121,7 +1147,7 @@ RSpec.describe Issue do
end
describe 'current designs' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
subject { issue.designs.current }
@@ -1213,4 +1239,12 @@ RSpec.describe Issue do
expect(issue.allows_reviewers?).to be(false)
end
end
+
+ describe '#issue_type_supports?' do
+ let_it_be(:issue) { create(:issue) }
+
+ it 'raises error when feature is invalid' do
+ expect { issue.issue_type_supports?(:unkown_feature) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/models/iteration_spec.rb b/spec/models/iteration_spec.rb
index 19a1625aad3..e7ec5de0ef1 100644
--- a/spec/models/iteration_spec.rb
+++ b/spec/models/iteration_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe Iteration do
let(:start_date) { 5.days.from_now }
let(:due_date) { 6.days.from_now }
- shared_examples_for 'overlapping dates' do
+ shared_examples_for 'overlapping dates' do |skip_constraint_test: false|
context 'when start_date is in range' do
let(:start_date) { 5.days.from_now }
let(:due_date) { 3.weeks.from_now }
@@ -129,9 +129,11 @@ RSpec.describe Iteration do
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
- it 'is not valid even if forced' do
- subject.validate # to generate iid/etc
- expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ unless skip_constraint_test
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
@@ -144,9 +146,11 @@ RSpec.describe Iteration do
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
- it 'is not valid even if forced' do
- subject.validate # to generate iid/etc
- expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ unless skip_constraint_test
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
@@ -156,9 +160,11 @@ RSpec.describe Iteration do
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
- it 'is not valid even if forced' do
- subject.validate # to generate iid/etc
- expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ unless skip_constraint_test
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
end
@@ -177,6 +183,14 @@ RSpec.describe Iteration do
expect { subject.save! }.not_to raise_exception
end
end
+
+ context 'sub-group' do
+ let(:subgroup) { create(:group, parent: group) }
+
+ subject { build(:iteration, group: subgroup, start_date: start_date, due_date: due_date) }
+
+ it_behaves_like 'overlapping dates', skip_constraint_test: true
+ end
end
context 'project' do
@@ -210,6 +224,17 @@ RSpec.describe Iteration do
end
end
end
+
+ context 'project in a group' do
+ let_it_be(:project) { create(:project, group: create(:group)) }
+ let_it_be(:existing_iteration) { create(:iteration, :skip_project_validation, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
+
+ subject { build(:iteration, :skip_project_validation, project: project, start_date: start_date, due_date: due_date) }
+
+ it_behaves_like 'overlapping dates' do
+ let(:constraint_name) { 'iteration_start_and_due_daterange_project_id_constraint' }
+ end
+ end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 90950d93db4..118b1492cd6 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -212,6 +212,16 @@ RSpec.describe Member do
it { expect(described_class.non_request).to include @accepted_request_member }
end
+ describe '.not_accepted_invitations' do
+ let_it_be(:not_accepted_invitation) { create(:project_member, :invited) }
+ let_it_be(:accepted_invitation) { create(:project_member, :invited, invite_accepted_at: Date.today) }
+
+ subject { described_class.not_accepted_invitations }
+
+ it { is_expected.to include(not_accepted_invitation) }
+ it { is_expected.not_to include(accepted_invitation) }
+ end
+
describe '.not_accepted_invitations_by_user' do
let(:invited_by_user) { create(:project_member, :invited, project: project, created_by: @owner_user) }
@@ -225,6 +235,33 @@ RSpec.describe Member do
it { is_expected.to contain_exactly(invited_by_user) }
end
+ describe '.not_expired' do
+ let_it_be(:expiring_yesterday) { create(:group_member, expires_at: 1.day.from_now) }
+ let_it_be(:expiring_today) { create(:group_member, expires_at: 2.days.from_now) }
+ let_it_be(:expiring_tomorrow) { create(:group_member, expires_at: 3.days.from_now) }
+ let_it_be(:not_expiring) { create(:group_member) }
+
+ subject { described_class.not_expired }
+
+ around do |example|
+ travel_to(2.days.from_now) { example.run }
+ end
+
+ it { is_expected.not_to include(expiring_yesterday, expiring_today) }
+ it { is_expected.to include(expiring_tomorrow, not_expiring) }
+ end
+
+ describe '.last_ten_days_excluding_today' do
+ let_it_be(:created_today) { create(:group_member, created_at: Date.today.beginning_of_day) }
+ let_it_be(:created_yesterday) { create(:group_member, created_at: 1.day.ago) }
+ let_it_be(:created_eleven_days_ago) { create(:group_member, created_at: 11.days.ago) }
+
+ subject { described_class.last_ten_days_excluding_today }
+
+ it { is_expected.to include(created_yesterday) }
+ it { is_expected.not_to include(created_today, created_eleven_days_ago) }
+ end
+
describe '.search_invite_email' do
it 'returns only members the matching e-mail' do
create(:group_member, :invited)
@@ -683,6 +720,45 @@ RSpec.describe Member do
end
end
+ describe '#send_invitation_reminder' do
+ subject { member.send_invitation_reminder(0) }
+
+ context 'an invited group member' do
+ let!(:member) { create(:group_member, :invited) }
+
+ it 'sends a reminder' do
+ expect_any_instance_of(NotificationService).to receive(:invite_member_reminder).with(member, member.raw_invite_token, 0)
+
+ subject
+ end
+ end
+
+ context 'an invited member without a raw invite token set' do
+ let!(:member) { create(:group_member, :invited) }
+
+ before do
+ member.instance_variable_set(:@raw_invite_token, nil)
+ allow_any_instance_of(NotificationService).to receive(:invite_member_reminder)
+ end
+
+ it 'generates a new token' do
+ expect(member).to receive(:generate_invite_token!)
+
+ subject
+ end
+ end
+
+ context 'an uninvited member' do
+ let!(:member) { create(:group_member) }
+
+ it 'does not send a reminder' do
+ expect_any_instance_of(NotificationService).not_to receive(:invite_member_reminder)
+
+ subject
+ end
+ end
+ end
+
describe "#invite_to_unknown_user?" do
subject { member.invite_to_unknown_user? }
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 2c64201e84d..6706083fd91 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -180,6 +180,17 @@ RSpec.describe MergeRequestDiff do
expect(diff.external_diff_store).to eq(file_store)
end
+ it 'migrates a nil diff file' do
+ expect(diff).not_to be_stored_externally
+ MergeRequestDiffFile.where(merge_request_diff_id: diff.id).update_all(diff: nil)
+
+ stub_external_diffs_setting(enabled: true)
+
+ diff.migrate_files_to_external_storage!
+
+ expect(diff).to be_stored_externally
+ end
+
it 'safely handles a transaction error when migrating to external storage' do
expect(diff).not_to be_stored_externally
expect(diff.external_diff).not_to be_exists
@@ -646,13 +657,32 @@ RSpec.describe MergeRequestDiff do
expect(diff_with_commits.commit_shas).to all(match(/\h{40}/))
end
- context 'with limit attribute' do
+ shared_examples 'limited number of shas' do
it 'returns limited number of shas' do
expect(diff_with_commits.commit_shas(limit: 2).size).to eq(2)
expect(diff_with_commits.commit_shas(limit: 100).size).to eq(29)
expect(diff_with_commits.commit_shas.size).to eq(29)
end
end
+
+ context 'with limit attribute' do
+ it_behaves_like 'limited number of shas'
+ end
+
+ context 'with preloaded diff commits' do
+ before do
+ # preloads the merge_request_diff_commits association
+ diff_with_commits.merge_request_diff_commits.to_a
+ end
+
+ it_behaves_like 'limited number of shas'
+
+ it 'does not trigger any query' do
+ count = ActiveRecord::QueryRecorder.new { diff_with_commits.commit_shas(limit: 2) }.count
+
+ expect(count).to eq(0)
+ end
+ end
end
describe '#compare_with' do
@@ -865,4 +895,25 @@ RSpec.describe MergeRequestDiff do
expect(subject.lines_count).to eq 189
end
end
+
+ describe '.latest_diff_for_merge_requests' do
+ let_it_be(:merge_request_1) { create(:merge_request_without_merge_request_diff) }
+ let_it_be(:merge_request_1_diff_1) { create(:merge_request_diff, merge_request: merge_request_1, created_at: 3.days.ago) }
+ let_it_be(:merge_request_1_diff_2) { create(:merge_request_diff, merge_request: merge_request_1, created_at: 1.day.ago) }
+
+ let_it_be(:merge_request_2) { create(:merge_request_without_merge_request_diff) }
+ let_it_be(:merge_request_2_diff_1) { create(:merge_request_diff, merge_request: merge_request_2, created_at: 3.days.ago) }
+
+ let_it_be(:merge_request_3) { create(:merge_request_without_merge_request_diff) }
+
+ subject { described_class.latest_diff_for_merge_requests([merge_request_1, merge_request_2]) }
+
+ it 'loads the latest merge_request_diff record for the given merge requests' do
+ expect(subject).to match_array([merge_request_1_diff_2, merge_request_2_diff_1])
+ end
+
+ it 'loads nothing if the merge request has no diff record' do
+ expect(described_class.latest_diff_for_merge_requests(merge_request_3)).to be_empty
+ end
+ end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 98f709a0610..ddb3ffdda2f 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2358,48 +2358,43 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- context 'when state event tracking is disabled' do
+ context 'when no metrics or merge event exists' do
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request, :merged) }
+
before do
- stub_feature_flags(track_resource_state_change_events: false)
+ merge_request.metrics.destroy!
end
- context 'when merging note is persisted, but no metrics or merge event exists' do
- let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request, :merged) }
-
+ context 'when resource event for the merge exists' do
before do
- merge_request.metrics.destroy!
-
SystemNoteService.change_status(merge_request,
merge_request.target_project,
user,
merge_request.state, nil)
end
- it 'returns merging note creation date' do
+ it 'returns the resource event creation date' do
expect(merge_request.reload.metrics).to be_nil
expect(merge_request.merge_event).to be_nil
- expect(merge_request.notes.count).to eq(1)
- expect(merge_request.merged_at).to eq(merge_request.notes.first.created_at)
+ expect(merge_request.resource_state_events.count).to eq(1)
+ expect(merge_request.merged_at).to eq(merge_request.resource_state_events.first.created_at)
end
end
- end
-
- context 'when state event tracking is enabled' do
- let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request, :merged) }
-
- before do
- merge_request.metrics.destroy!
- SystemNoteService.change_status(merge_request,
- merge_request.target_project,
- user,
- merge_request.state, nil)
- end
+ context 'when system note for the merge exists' do
+ before do
+ # We do not create these system notes anymore but we need this to work for existing MRs
+ # that used system notes instead of resource state events
+ create(:note, :system, noteable: merge_request, note: 'merged')
+ end
- it 'does not create a system note' do
- expect(merge_request.notes).to be_empty
+ it 'returns the merging note creation date' do
+ expect(merge_request.reload.metrics).to be_nil
+ expect(merge_request.merge_event).to be_nil
+ expect(merge_request.notes.count).to eq(1)
+ expect(merge_request.merged_at).to eq(merge_request.notes.first.created_at)
+ end
end
end
end
@@ -3525,6 +3520,25 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#merge_base_pipeline' do
+ let(:merge_request) do
+ create(:merge_request, :with_merge_request_pipeline)
+ end
+
+ let(:merge_base_pipeline) do
+ create(:ci_pipeline, ref: merge_request.target_branch, sha: merge_request.target_branch_sha)
+ end
+
+ before do
+ merge_base_pipeline
+ merge_request.update_head_pipeline
+ end
+
+ it 'returns a pipeline pointing to a commit on the target ref' do
+ expect(merge_request.merge_base_pipeline).to eq(merge_base_pipeline)
+ end
+ end
+
describe '#has_commits?' do
it 'returns true when merge request diff has commits' do
allow(subject.merge_request_diff).to receive(:commits_count)
@@ -4214,14 +4228,26 @@ RSpec.describe MergeRequest, factory_default: :keep do
it 'returns true' do
expect(subject.diffable_merge_ref?).to eq(true)
end
- end
- end
- context 'merge request cannot be merged' do
- it 'returns false' do
- subject.mark_as_unchecked!
+ context 'merge request cannot be merged' do
+ before do
+ subject.mark_as_unchecked!
+ end
+
+ it 'returns false' do
+ expect(subject.diffable_merge_ref?).to eq(true)
+ end
+
+ context 'display_merge_conflicts_in_diff is disabled' do
+ before do
+ stub_feature_flags(display_merge_conflicts_in_diff: false)
+ end
- expect(subject.diffable_merge_ref?).to eq(false)
+ it 'returns false' do
+ expect(subject.diffable_merge_ref?).to eq(false)
+ end
+ end
+ end
end
end
end
@@ -4261,24 +4287,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '#allows_reviewers?' do
- it 'returns false without merge_request_reviewers feature' do
- stub_feature_flags(merge_request_reviewers: false)
-
- merge_request = build_stubbed(:merge_request)
-
- expect(merge_request.allows_reviewers?).to be(false)
- end
-
- it 'returns true with merge_request_reviewers feature' do
- stub_feature_flags(merge_request_reviewers: true)
-
- merge_request = build_stubbed(:merge_request)
-
- expect(merge_request.allows_reviewers?).to be(true)
- end
- end
-
describe '#merge_ref_head' do
let(:merge_request) { create(:merge_request) }
@@ -4304,4 +4312,36 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
end
+
+ describe '#allows_reviewers?' do
+ it 'returns false without merge_request_reviewers feature' do
+ stub_feature_flags(merge_request_reviewers: false)
+
+ merge_request = build_stubbed(:merge_request)
+
+ expect(merge_request.allows_reviewers?).to be(false)
+ end
+
+ it 'returns true with merge_request_reviewers feature' do
+ stub_feature_flags(merge_request_reviewers: true)
+
+ merge_request = build_stubbed(:merge_request)
+
+ expect(merge_request.allows_reviewers?).to be(true)
+ end
+ end
+
+ describe '#update_and_mark_in_progress_merge_commit_sha' do
+ let(:ref) { subject.target_project.repository.commit.id }
+
+ before do
+ expect(subject.target_project).to receive(:mark_primary_write_location)
+ end
+
+ it 'updates commit ID' do
+ expect { subject.update_and_mark_in_progress_merge_commit_sha(ref) }
+ .to change { subject.in_progress_merge_commit_sha }
+ .from(nil).to(ref)
+ end
+ end
end
diff --git a/spec/models/milestone_release_spec.rb b/spec/models/milestone_release_spec.rb
index 3c781545d8a..b2a174f1d90 100644
--- a/spec/models/milestone_release_spec.rb
+++ b/spec/models/milestone_release_spec.rb
@@ -10,8 +10,8 @@ RSpec.describe MilestoneRelease do
subject { build(:milestone_release, release: release, milestone: milestone) }
describe 'associations' do
- it { is_expected.to belong_to(:milestone) }
it { is_expected.to belong_to(:release) }
+ it { is_expected.to belong_to(:milestone) }
end
context 'when trying to create the same record in milestone_releases twice' do
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index 257d78dfa2c..c6e8d5b129c 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -3,5 +3,71 @@
require 'spec_helper'
RSpec.describe NamespaceSetting, type: :model do
- it { is_expected.to belong_to(:namespace) }
+ # Relationships
+ #
+ describe "Associations" do
+ it { is_expected.to belong_to(:namespace) }
+ end
+
+ describe "validations" do
+ describe "#default_branch_name_content" do
+ let_it_be(:group) { create(:group) }
+
+ let(:namespace_settings) { group.namespace_settings }
+
+ shared_examples "doesn't return an error" do
+ it "doesn't return an error" do
+ expect(namespace_settings.valid?).to be_truthy
+ expect(namespace_settings.errors.full_messages).to be_empty
+ end
+ end
+
+ context "when not set" do
+ it_behaves_like "doesn't return an error"
+ end
+
+ context "when set" do
+ before do
+ namespace_settings.default_branch_name = "example_branch_name"
+ end
+
+ it_behaves_like "doesn't return an error"
+ end
+
+ context "when an empty string" do
+ before do
+ namespace_settings.default_branch_name = ''
+ end
+
+ it "returns an error" do
+ expect(namespace_settings.valid?).to be_falsey
+ expect(namespace_settings.errors.full_messages).not_to be_empty
+ end
+ end
+ end
+
+ describe '#allow_mfa_for_group' do
+ let(:settings) { group.namespace_settings }
+
+ context 'group is top-level group' do
+ let(:group) { create(:group) }
+
+ it 'is valid' do
+ settings.allow_mfa_for_subgroups = false
+
+ expect(settings).to be_valid
+ end
+ end
+
+ context 'group is a subgroup' do
+ let(:group) { create(:group, parent: create(:group)) }
+
+ it 'is invalid' do
+ settings.allow_mfa_for_subgroups = false
+
+ expect(settings).to be_invalid
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index ca1f06370d4..91b18f346c6 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -855,13 +855,49 @@ RSpec.describe Namespace do
end
describe '#all_projects' do
- let(:group) { create(:group) }
- let(:child) { create(:group, parent: group) }
- let!(:project1) { create(:project_empty_repo, namespace: group) }
- let!(:project2) { create(:project_empty_repo, namespace: child) }
+ shared_examples 'all projects for a namespace' do
+ let(:namespace) { create(:namespace) }
+ let(:child) { create(:group, parent: namespace) }
+ let!(:project1) { create(:project_empty_repo, namespace: namespace) }
+ let!(:project2) { create(:project_empty_repo, namespace: child) }
+
+ it { expect(namespace.all_projects.to_a).to match_array([project2, project1]) }
+ it { expect(child.all_projects.to_a).to match_array([project2]) }
+ end
+
+ shared_examples 'all project examples' do
+ include_examples 'all projects for a namespace'
+
+ context 'when namespace is a group' do
+ let_it_be(:namespace) { create(:group) }
+
+ include_examples 'all projects for a namespace'
+ end
- it { expect(group.all_projects.to_a).to match_array([project2, project1]) }
- it { expect(child.all_projects.to_a).to match_array([project2]) }
+ context 'when namespace is a user namespace' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user_namespace) { create(:namespace, owner: user) }
+ let_it_be(:project) { create(:project, namespace: user_namespace) }
+
+ it { expect(user_namespace.all_projects.to_a).to match_array([project]) }
+ end
+ end
+
+ context 'with recursive approach' do
+ before do
+ stub_feature_flags(recursive_approach_for_all_projects: true)
+ end
+
+ include_examples 'all project examples'
+ end
+
+ context 'with route path wildcard approach' do
+ before do
+ stub_feature_flags(recursive_approach_for_all_projects: false)
+ end
+
+ include_examples 'all project examples'
+ end
end
describe '#all_pipelines' do
@@ -1320,4 +1356,140 @@ RSpec.describe Namespace do
end
end
end
+
+ describe '#shared_runners_setting' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :shared_runners_setting) do
+ true | true | 'enabled'
+ true | false | 'enabled'
+ false | true | 'disabled_with_override'
+ false | false | 'disabled_and_unoverridable'
+ end
+
+ with_them do
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+
+ it 'returns the result' do
+ expect(namespace.shared_runners_setting).to eq(shared_runners_setting)
+ end
+ end
+ end
+
+ describe '#shared_runners_setting_higher_than?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :other_setting, :result) do
+ true | true | 'enabled' | false
+ true | true | 'disabled_with_override' | true
+ true | true | 'disabled_and_unoverridable' | true
+ false | true | 'enabled' | false
+ false | true | 'disabled_with_override' | false
+ false | true | 'disabled_and_unoverridable' | true
+ false | false | 'enabled' | false
+ false | false | 'disabled_with_override' | false
+ false | false | 'disabled_and_unoverridable' | false
+ end
+
+ with_them do
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+
+ it 'returns the result' do
+ expect(namespace.shared_runners_setting_higher_than?(other_setting)).to eq(result)
+ end
+ end
+ end
+
+ describe 'validation #changing_shared_runners_enabled_is_allowed' do
+ context 'without a parent' do
+ let(:namespace) { build(:namespace, shared_runners_enabled: true) }
+
+ it 'is valid' do
+ expect(namespace).to be_valid
+ end
+ end
+
+ context 'with a parent' do
+ context 'when parent has shared runners disabled' do
+ let(:parent) { create(:namespace, :shared_runners_disabled) }
+ let(:sub_namespace) { build(:namespace, shared_runners_enabled: true, parent_id: parent.id) }
+
+ it 'is invalid' do
+ expect(sub_namespace).to be_invalid
+ expect(sub_namespace.errors[:shared_runners_enabled]).to include('cannot be enabled because parent group has shared Runners disabled')
+ end
+ end
+
+ context 'when parent has shared runners disabled but allows override' do
+ let(:parent) { create(:namespace, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let(:sub_namespace) { build(:namespace, shared_runners_enabled: true, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+
+ context 'when parent has shared runners enabled' do
+ let(:parent) { create(:namespace, shared_runners_enabled: true) }
+ let(:sub_namespace) { build(:namespace, shared_runners_enabled: true, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+ end
+ end
+
+ describe 'validation #changing_allow_descendants_override_disabled_shared_runners_is_allowed' do
+ context 'without a parent' do
+ context 'with shared runners disabled' do
+ let(:namespace) { build(:namespace, :allow_descendants_override_disabled_shared_runners, :shared_runners_disabled) }
+
+ it 'is valid' do
+ expect(namespace).to be_valid
+ end
+ end
+
+ context 'with shared runners enabled' do
+ let(:namespace) { create(:namespace) }
+
+ it 'is invalid' do
+ namespace.allow_descendants_override_disabled_shared_runners = true
+
+ expect(namespace).to be_invalid
+ expect(namespace.errors[:allow_descendants_override_disabled_shared_runners]).to include('cannot be changed if shared runners are enabled')
+ end
+ end
+ end
+
+ context 'with a parent' do
+ context 'when parent does not allow shared runners' do
+ let(:parent) { create(:namespace, :shared_runners_disabled) }
+ let(:sub_namespace) { build(:namespace, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent_id: parent.id) }
+
+ it 'is invalid' do
+ expect(sub_namespace).to be_invalid
+ expect(sub_namespace.errors[:allow_descendants_override_disabled_shared_runners]).to include('cannot be enabled because parent group does not allow it')
+ end
+ end
+
+ context 'when parent allows shared runners and setting to true' do
+ let(:parent) { create(:namespace, shared_runners_enabled: true) }
+ let(:sub_namespace) { build(:namespace, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+
+ context 'when parent allows shared runners and setting to false' do
+ let(:parent) { create(:namespace, shared_runners_enabled: true) }
+ let(:sub_namespace) { build(:namespace, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 0f765d6b09b..bc50e2af373 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -175,6 +175,7 @@ RSpec.describe NotificationSetting do
:reopen_merge_request,
:close_merge_request,
:reassign_merge_request,
+ :change_reviewer_merge_request,
:merge_merge_request,
:failed_pipeline,
:success_pipeline,
diff --git a/spec/models/operations/feature_flag_spec.rb b/spec/models/operations/feature_flag_spec.rb
index db432e73355..b4e941f2856 100644
--- a/spec/models/operations/feature_flag_spec.rb
+++ b/spec/models/operations/feature_flag_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Operations::FeatureFlag do
context 'a version 1 feature flag' do
it 'is valid if associated with Operations::FeatureFlagScope models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 1,
+ feature_flag = described_class.create!({ name: 'test', project: project, version: 1,
scopes_attributes: [{ environment_scope: '*', active: false }] })
expect(feature_flag).to be_valid
@@ -33,9 +33,10 @@ RSpec.describe Operations::FeatureFlag do
it 'is invalid if associated with Operations::FeatureFlags::Strategy models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 1,
+ feature_flag = described_class.new({ name: 'test', project: project, version: 1,
strategies_attributes: [{ name: 'default', parameters: {} }] })
+ expect(feature_flag.valid?).to eq(false)
expect(feature_flag.errors.messages).to eq({
version_associations: ["version 1 feature flags may not have strategies"]
})
@@ -45,9 +46,10 @@ RSpec.describe Operations::FeatureFlag do
context 'a version 2 feature flag' do
it 'is invalid if associated with Operations::FeatureFlagScope models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 2,
+ feature_flag = described_class.new({ name: 'test', project: project, version: 2,
scopes_attributes: [{ environment_scope: '*', active: false }] })
+ expect(feature_flag.valid?).to eq(false)
expect(feature_flag.errors.messages).to eq({
version_associations: ["version 2 feature flags may not have scopes"]
})
@@ -55,7 +57,7 @@ RSpec.describe Operations::FeatureFlag do
it 'is valid if associated with Operations::FeatureFlags::Strategy models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 2,
+ feature_flag = described_class.create!({ name: 'test', project: project, version: 2,
strategies_attributes: [{ name: 'default', parameters: {} }] })
expect(feature_flag).to be_valid
@@ -75,7 +77,7 @@ RSpec.describe Operations::FeatureFlag do
it 'defaults to 1 if unspecified' do
project = create(:project)
- feature_flag = described_class.create(name: 'my_flag', project: project, active: true)
+ feature_flag = described_class.create!(name: 'my_flag', project: project, active: true)
expect(feature_flag).to be_valid
expect(feature_flag.version_before_type_cast).to eq(1)
@@ -113,14 +115,14 @@ RSpec.describe Operations::FeatureFlag do
context 'with a version 1 feature flag' do
it 'creates a default scope' do
- feature_flag = described_class.create({ name: 'test', project: project, scopes_attributes: [], version: 1 })
+ feature_flag = described_class.create!({ name: 'test', project: project, scopes_attributes: [], version: 1 })
expect(feature_flag.scopes.count).to eq(1)
expect(feature_flag.scopes.first.environment_scope).to eq('*')
end
it 'allows specifying the default scope in the parameters' do
- feature_flag = described_class.create({ name: 'test', project: project,
+ feature_flag = described_class.create!({ name: 'test', project: project,
scopes_attributes: [{ environment_scope: '*', active: false },
{ environment_scope: 'review/*', active: true }], version: 1 })
@@ -131,7 +133,7 @@ RSpec.describe Operations::FeatureFlag do
context 'with a version 2 feature flag' do
it 'does not create a default scope' do
- feature_flag = described_class.create({ name: 'test', project: project, scopes_attributes: [], version: 2 })
+ feature_flag = described_class.create!({ name: 'test', project: project, scopes_attributes: [], version: 2 })
expect(feature_flag.scopes).to eq([])
end
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index 04e3ef26e9d..0ecb49e75f3 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -4,11 +4,12 @@ require 'spec_helper'
RSpec.describe Operations::FeatureFlags::Strategy do
let_it_be(:project) { create(:project) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
describe 'validations' do
it do
is_expected.to validate_inclusion_of(:name)
- .in_array(%w[default gradualRolloutUserId userWithId gitlabUserList])
+ .in_array(%w[default gradualRolloutUserId flexibleRollout userWithId gitlabUserList])
.with_message('strategy name is invalid')
end
@@ -19,7 +20,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'skips parameters validation' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: invalid_name, parameters: { bad: 'params' })
@@ -36,7 +36,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must have valid parameters for the strategy' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId', parameters: invalid_parameters)
@@ -45,7 +44,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'allows the parameters in any order' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { percentage: '10', groupId: 'mygroup' })
@@ -55,13 +53,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
describe 'percentage' do
where(:invalid_value) do
- [50, 40.0, { key: "value" }, "garbage", "00", "01", "101", "-1", "-10", "0100",
- "1000", "10.0", "5%", "25%", "100hi", "e100", "30m", " ", "\r\n", "\n", "\t",
- "\n10", "20\n", "\n100", "100\n", "\n ", nil]
+ [50, 40.0, { key: "value" }, "garbage", "101", "-1", "-10", "1000", "10.0", "5%", "25%",
+ "100hi", "e100", "30m", " ", "\r\n", "\n", "\t", "\n10", "20\n", "\n100", "100\n",
+ "\n ", nil]
end
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: 'mygroup', percentage: invalid_value })
@@ -75,7 +72,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: 'mygroup', percentage: valid_value })
@@ -92,7 +88,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: invalid_value, percentage: '40' })
@@ -106,7 +101,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: valid_value, percentage: '40' })
@@ -117,13 +111,132 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
end
+ context 'when the strategy name is flexibleRollout' do
+ valid_parameters = { rollout: '40', groupId: 'mygroup', stickiness: 'DEFAULT' }
+ where(invalid_parameters: [
+ nil,
+ {},
+ *valid_parameters.to_a.combination(1).to_a.map { |p| p.to_h },
+ *valid_parameters.to_a.combination(2).to_a.map { |p| p.to_h },
+ { **valid_parameters, userIds: '4' },
+ { **valid_parameters, extra: nil }
+ ])
+ with_them do
+ it 'must have valid parameters for the strategy' do
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: invalid_parameters)
+
+ expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ end
+ end
+
+ [
+ [:rollout, '10'],
+ [:stickiness, 'DEFAULT'],
+ [:groupId, 'mygroup']
+ ].permutation(3).each do |parameters|
+ it "allows the parameters in the order #{parameters.map { |p| p.first }.join(', ')}" do
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: Hash[parameters])
+
+ expect(strategy.errors[:parameters]).to be_empty
+ end
+ end
+
+ describe 'rollout' do
+ where(invalid_value: [50, 40.0, { key: "value" }, "garbage", "101", "-1", " ", "-10",
+ "1000", "10.0", "5%", "25%", "100hi", "e100", "30m", "\r\n",
+ "\n", "\t", "\n10", "20\n", "\n100", "100\n", "\n ", nil])
+ with_them do
+ it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
+ parameters = { stickiness: 'DEFAULT', groupId: 'mygroup', rollout: invalid_value }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([
+ 'rollout must be a string between 0 and 100 inclusive'
+ ])
+ end
+ end
+
+ where(valid_value: %w[0 1 10 38 100 93])
+ with_them do
+ it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
+ parameters = { stickiness: 'DEFAULT', groupId: 'mygroup', rollout: valid_value }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([])
+ end
+ end
+ end
+
+ describe 'groupId' do
+ where(invalid_value: [nil, 4, 50.0, {}, 'spaces bad', 'bad$', '%bad', '<bad', 'bad>',
+ '!bad', '.bad', 'Bad', 'bad1', "", " ", "b" * 33, "ba_d", "ba\nd"])
+ with_them do
+ it 'must be a string value of up to 32 lowercase characters' do
+ parameters = { stickiness: 'DEFAULT', groupId: invalid_value, rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq(['groupId parameter is invalid'])
+ end
+ end
+
+ where(valid_value: ["somegroup", "anothergroup", "okay", "g", "a" * 32])
+ with_them do
+ it 'must be a string value of up to 32 lowercase characters' do
+ parameters = { stickiness: 'DEFAULT', groupId: valid_value, rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([])
+ end
+ end
+ end
+
+ describe 'stickiness' do
+ where(invalid_value: [nil, " ", "default", "DEFAULT\n", "UserId", "USER", "USERID "])
+ with_them do
+ it 'must be a string representing a supported stickiness setting' do
+ parameters = { stickiness: invalid_value, groupId: 'mygroup', rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([
+ 'stickiness parameter must be DEFAULT, USERID, SESSIONID, or RANDOM'
+ ])
+ end
+ end
+
+ where(valid_value: %w[DEFAULT USERID SESSIONID RANDOM])
+ with_them do
+ it 'must be a string representing a supported stickiness setting' do
+ parameters = { stickiness: valid_value, groupId: 'mygroup', rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([])
+ end
+ end
+ end
+ end
+
context 'when the strategy name is userWithId' do
where(:invalid_parameters) do
[nil, { userIds: 'sam', percentage: '40' }, { userIds: 'sam', some: 'param' }, { percentage: '40' }, {}]
end
with_them do
it 'must have valid parameters for the strategy' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId', parameters: invalid_parameters)
@@ -140,7 +253,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is valid with a string of comma separated values' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId', parameters: { userIds: valid_value })
@@ -155,7 +267,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is invalid' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId', parameters: { userIds: invalid_value })
@@ -173,7 +284,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
parameters: invalid_value)
@@ -183,7 +293,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
parameters: {})
@@ -198,7 +307,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
parameters: invalid_value)
@@ -208,7 +316,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
parameters: {})
@@ -221,7 +328,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
describe 'associations' do
context 'when name is gitlabUserList' do
it 'is valid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
@@ -232,7 +338,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is invalid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
parameters: {})
@@ -242,7 +347,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
it 'is invalid when associated with a user list from another project' do
other_project = create(:project)
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: other_project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
@@ -255,7 +359,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when name is default' do
it 'is invalid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
@@ -266,7 +369,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
parameters: {})
@@ -277,7 +379,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when name is userWithId' do
it 'is invalid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId',
@@ -288,7 +389,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId',
parameters: { userIds: 'user1' })
@@ -299,7 +399,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when name is gradualRolloutUserId' do
it 'is invalid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
@@ -310,7 +409,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: 'default', percentage: '10' })
@@ -318,6 +416,30 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy.errors[:user_list]).to be_empty
end
end
+
+ context 'when name is flexibleRollout' do
+ it 'is invalid when associated with a user list' do
+ user_list = create(:operations_feature_flag_user_list, project: project)
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ user_list: user_list,
+ parameters: { groupId: 'default',
+ rollout: '10',
+ stickiness: 'DEFAULT' })
+
+ expect(strategy.errors[:user_list]).to eq(['must be blank'])
+ end
+
+ it 'is valid without a user list' do
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default',
+ rollout: '10',
+ stickiness: 'DEFAULT' })
+
+ expect(strategy.errors[:user_list]).to be_empty
+ end
+ end
end
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index ea1f75d04e7..ca408303524 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -108,6 +108,20 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value('.foobar').for(:name) }
it { is_expected.not_to allow_value('%foo%bar').for(:name) }
end
+
+ context 'generic package' do
+ subject { build_stubbed(:generic_package) }
+
+ it { is_expected.to allow_value('123').for(:name) }
+ it { is_expected.to allow_value('foo').for(:name) }
+ it { is_expected.to allow_value('foo.bar.baz-2.0-20190901.47283-1').for(:name) }
+ it { is_expected.not_to allow_value('../../foo').for(:name) }
+ it { is_expected.not_to allow_value('..\..\foo').for(:name) }
+ it { is_expected.not_to allow_value('%2f%2e%2e%2f%2essh%2fauthorized_keys').for(:name) }
+ it { is_expected.not_to allow_value('$foo/bar').for(:name) }
+ it { is_expected.not_to allow_value('my file name').for(:name) }
+ it { is_expected.not_to allow_value('!!().for(:name)().for(:name)').for(:name) }
+ end
end
describe '#version' do
@@ -257,7 +271,12 @@ RSpec.describe Packages::Package, type: :model do
end
it_behaves_like 'validating version to be SemVer compliant for', :npm_package
- it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
+
+ context 'nuget package' do
+ it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
+
+ it { is_expected.to allow_value('1.2.3.4').for(:version) }
+ end
end
describe '#package_already_taken' do
@@ -497,6 +516,14 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to match_array([package1, package2]) }
end
+
+ describe '.with_normalized_pypi_name' do
+ let_it_be(:pypi_package) { create(:pypi_package, name: 'Foo.bAr---BAZ_buz') }
+
+ subject { described_class.with_normalized_pypi_name('foo-bar-baz-buz') }
+
+ it { is_expected.to match_array([pypi_package]) }
+ end
end
describe '.select_distinct_name' do
diff --git a/spec/models/pages_deployment_spec.rb b/spec/models/pages_deployment_spec.rb
index eafff1ed59a..5d26ade740e 100644
--- a/spec/models/pages_deployment_spec.rb
+++ b/spec/models/pages_deployment_spec.rb
@@ -18,4 +18,21 @@ RSpec.describe PagesDeployment do
expect(create(:pages_deployment)).to be_valid
end
end
+
+ describe 'default for file_store' do
+ it 'uses local store when object storage is not enabled' do
+ expect(build(:pages_deployment).file_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+
+ it 'uses remote store when object storage is enabled' do
+ stub_pages_object_storage(::Pages::DeploymentUploader)
+
+ expect(build(:pages_deployment).file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ it 'saves size along with the file' do
+ deployment = create(:pages_deployment)
+ expect(deployment.size).to eq(deployment.file.size)
+ end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index bc6398de9a4..67fb11f34e0 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -199,6 +199,7 @@ RSpec.describe PlanLimits do
ci_max_artifact_size_secret_detection
ci_max_artifact_size_requirements
ci_max_artifact_size_coverage_fuzzing
+ ci_max_artifact_size_api_fuzzing
]
end
diff --git a/spec/models/preloaders/merge_request_diff_preloader_spec.rb b/spec/models/preloaders/merge_request_diff_preloader_spec.rb
new file mode 100644
index 00000000000..9a76d42e73f
--- /dev/null
+++ b/spec/models/preloaders/merge_request_diff_preloader_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Preloaders::MergeRequestDiffPreloader do
+ let_it_be(:merge_request_1) { create(:merge_request) }
+ let_it_be(:merge_request_2) { create(:merge_request) }
+ let_it_be(:merge_request_3) { create(:merge_request_without_merge_request_diff) }
+
+ let(:merge_requests) { [merge_request_1, merge_request_2, merge_request_3] }
+
+ def trigger(merge_requests)
+ Array(merge_requests).each(&:merge_request_diff)
+ end
+
+ def merge_requests_with_preloaded_diff
+ described_class.new(MergeRequest.where(id: merge_requests.map(&:id)).to_a).preload_all
+ end
+
+ it 'does not trigger N+1 queries' do
+ # warmup
+ trigger(merge_requests_with_preloaded_diff)
+
+ first_merge_request = merge_requests_with_preloaded_diff.first
+ clean_merge_requests = merge_requests_with_preloaded_diff
+
+ expect { trigger(clean_merge_requests) }.to issue_same_number_of_queries_as { trigger(first_merge_request) }
+ end
+end
diff --git a/spec/models/project_feature_usage_spec.rb b/spec/models/project_feature_usage_spec.rb
index 908b98ee9c2..d55d41fab85 100644
--- a/spec/models/project_feature_usage_spec.rb
+++ b/spec/models/project_feature_usage_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe ProjectFeatureUsage, type: :model do
feature_usage.log_jira_dvcs_integration_usage
first_logged_at = feature_usage.jira_dvcs_cloud_last_sync_at
- Timecop.freeze(1.hour.from_now) do
+ travel_to(1.hour.from_now) do
ProjectFeatureUsage.new(project_id: project.id).log_jira_dvcs_integration_usage
end
diff --git a/spec/models/project_repository_spec.rb b/spec/models/project_repository_spec.rb
index 6852ca0097d..eba908b0fdb 100644
--- a/spec/models/project_repository_spec.rb
+++ b/spec/models/project_repository_spec.rb
@@ -8,6 +8,11 @@ RSpec.describe ProjectRepository do
it { is_expected.to belong_to(:project) }
end
+ it_behaves_like 'shardable scopes' do
+ let_it_be(:record_1) { create(:project_repository) }
+ let_it_be(:record_2, reload: true) { create(:project_repository) }
+ end
+
describe '.find_project' do
it 'finds project by disk path' do
project = create(:project)
diff --git a/spec/models/project_repository_storage_move_spec.rb b/spec/models/project_repository_storage_move_spec.rb
index 3e679c8af4d..d32867efb39 100644
--- a/spec/models/project_repository_storage_move_spec.rb
+++ b/spec/models/project_repository_storage_move_spec.rb
@@ -43,6 +43,18 @@ RSpec.describe ProjectRepositoryStorageMove, type: :model do
end
end
+ describe 'defaults' do
+ context 'destination_storage_name' do
+ subject { build(:project_repository_storage_move) }
+
+ it 'picks storage from ApplicationSetting' do
+ expect(Gitlab::CurrentSettings).to receive(:pick_repository_storage).and_return('picked').at_least(:once)
+
+ expect(subject.destination_storage_name).to eq('picked')
+ end
+ end
+ end
+
describe 'state transitions' do
let(:project) { create(:project) }
diff --git a/spec/models/project_services/chat_message/deployment_message_spec.rb b/spec/models/project_services/chat_message/deployment_message_spec.rb
index 9c361f90ae0..6bdf2120b36 100644
--- a/spec/models/project_services/chat_message/deployment_message_spec.rb
+++ b/spec/models/project_services/chat_message/deployment_message_spec.rb
@@ -70,6 +70,17 @@ RSpec.describe ChatMessage::DeploymentMessage do
expect(message.pretext).to eq('Deploy to staging unknown')
end
+
+ it 'returns a message for a running deployment' do
+ data = {
+ status: 'running',
+ environment: 'production'
+ }
+
+ message = described_class.new(data)
+
+ expect(message.pretext).to eq('Starting deploy to production')
+ end
end
describe '#attachments' do
diff --git a/spec/models/project_services/chat_message/issue_message_spec.rb b/spec/models/project_services/chat_message/issue_message_spec.rb
index 051f4780ba4..4701ef3e49e 100644
--- a/spec/models/project_services/chat_message/issue_message_spec.rb
+++ b/spec/models/project_services/chat_message/issue_message_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe ChatMessage::IssueMessage do
context 'open' do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
- '[<http://somewhere.com|project_name>] Issue opened by Test User (test.user)')
+ '[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> opened by Test User (test.user)')
expect(subject.attachments).to eq([
{
title: "#100 Issue title",
@@ -91,7 +91,7 @@ RSpec.describe ChatMessage::IssueMessage do
context 'open' do
it 'returns a message regarding opening of issues' do
expect(subject.pretext).to eq(
- '[[project_name](http://somewhere.com)] Issue opened by Test User (test.user)')
+ '[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) opened by Test User (test.user)')
expect(subject.attachments).to eq('issue description')
expect(subject.activity).to eq({
title: 'Issue opened by Test User (test.user)',
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 16837e2b93a..76fc5a826c9 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching do
+RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowplow do
include PrometheusHelpers
include ReactiveCachingHelpers
@@ -421,18 +421,16 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching do
context "enabling manual_configuration" do
it "tracks enable event" do
service.update!(manual_configuration: false)
-
- expect(Gitlab::Tracking).to receive(:event).with('cluster:services:prometheus', 'enabled_manual_prometheus')
-
service.update!(manual_configuration: true)
+
+ expect_snowplow_event(category: 'cluster:services:prometheus', action: 'enabled_manual_prometheus')
end
it "tracks disable event" do
service.update!(manual_configuration: true)
-
- expect(Gitlab::Tracking).to receive(:event).with('cluster:services:prometheus', 'disabled_manual_prometheus')
-
service.update!(manual_configuration: false)
+
+ expect_snowplow_event(category: 'cluster:services:prometheus', action: 'disabled_manual_prometheus')
end
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index fe971832695..53a213891e9 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -72,6 +72,7 @@ RSpec.describe Project do
it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:fork_network_member) }
it { is_expected.to have_one(:auto_devops).class_name('ProjectAutoDevops') }
+ it { is_expected.to have_one(:tracing_setting).class_name('ProjectTracingSetting') }
it { is_expected.to have_one(:error_tracking_setting).class_name('ErrorTracking::ProjectErrorTrackingSetting') }
it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_one(:alerting_setting).class_name('Alerting::ProjectAlertingSetting') }
@@ -116,6 +117,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:prometheus_alert_events) }
it { is_expected.to have_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:alert_management_alerts) }
+ it { is_expected.to have_many(:alert_management_http_integrations) }
it { is_expected.to have_many(:jira_imports) }
it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:project) }
it { is_expected.to have_many(:repository_storage_moves) }
@@ -123,6 +125,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
it { is_expected.to have_many(:pipeline_artifacts) }
+ it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -133,6 +136,7 @@ RSpec.describe Project do
let_it_be(:container) { create(:project, :repository, path: 'somewhere') }
let(:stubbed_container) { build_stubbed(:project) }
let(:expected_full_path) { "#{container.namespace.full_path}/somewhere" }
+ let(:expected_lfs_enabled) { true }
end
it_behaves_like 'model with wiki' do
@@ -4329,7 +4333,7 @@ RSpec.describe Project do
end
it 'schedules HashedStorage::ProjectMigrateWorker with delayed start when the wiki repo is in use' do
- Gitlab::ReferenceCounter.new(Gitlab::GlRepository::WIKI.identifier_for_container(project)).increase
+ Gitlab::ReferenceCounter.new(Gitlab::GlRepository::WIKI.identifier_for_container(project.wiki)).increase
expect(HashedStorage::ProjectMigrateWorker).to receive(:perform_in)
@@ -4975,15 +4979,21 @@ RSpec.describe Project do
context "with an empty repository" do
let_it_be(:project) { create(:project_empty_repo) }
- context "Gitlab::CurrentSettings.default_branch_name is unavailable" do
+ context "group.default_branch_name is available" do
+ let(:project_group) { create(:group) }
+ let(:project) { create(:project, path: 'avatar', namespace: project_group) }
+
before do
expect(Gitlab::CurrentSettings)
+ .not_to receive(:default_branch_name)
+
+ expect(project.group)
.to receive(:default_branch_name)
- .and_return(nil)
+ .and_return('example_branch')
end
- it "returns that value" do
- expect(project.default_branch).to be_nil
+ it "returns the group default value" do
+ expect(project.default_branch).to eq("example_branch")
end
end
@@ -4991,11 +5001,23 @@ RSpec.describe Project do
before do
expect(Gitlab::CurrentSettings)
.to receive(:default_branch_name)
- .and_return('example_branch')
+ .and_return(example_branch_name)
end
- it "returns that value" do
- expect(project.default_branch).to eq("example_branch")
+ context "is missing or nil" do
+ let(:example_branch_name) { nil }
+
+ it "returns nil" do
+ expect(project.default_branch).to be_nil
+ end
+ end
+
+ context "is present" do
+ let(:example_branch_name) { "example_branch_name" }
+
+ it "returns the expected branch name" do
+ expect(project.default_branch).to eq(example_branch_name)
+ end
end
end
end
@@ -5487,12 +5509,13 @@ RSpec.describe Project do
describe '#find_or_initialize_services' do
it 'returns only enabled services' do
allow(Service).to receive(:available_services_names).and_return(%w[prometheus pushover teamcity])
+ allow(Service).to receive(:project_specific_services_names).and_return(%w[asana])
allow(subject).to receive(:disabled_services).and_return(%w[prometheus])
services = subject.find_or_initialize_services
- expect(services.count).to eq(2)
- expect(services.map(&:title)).to eq(['JetBrains TeamCity CI', 'Pushover'])
+ expect(services.count).to eq(3)
+ expect(services.map(&:title)).to eq(['Asana', 'JetBrains TeamCity CI', 'Pushover'])
end
end
@@ -5563,32 +5586,6 @@ RSpec.describe Project do
end
end
- describe '.for_repository_storage' do
- it 'returns the projects for a given repository storage' do
- stub_storage_settings('test_second_storage' => {
- 'path' => TestEnv::SECOND_STORAGE_PATH,
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address
- })
- expected_project = create(:project, repository_storage: 'default')
- create(:project, repository_storage: 'test_second_storage')
-
- expect(described_class.for_repository_storage('default')).to eq([expected_project])
- end
- end
-
- describe '.excluding_repository_storage' do
- it 'returns the projects excluding the given repository storage' do
- stub_storage_settings('test_second_storage' => {
- 'path' => TestEnv::SECOND_STORAGE_PATH,
- 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address
- })
- expected_project = create(:project, repository_storage: 'test_second_storage')
- create(:project, repository_storage: 'default')
-
- expect(described_class.excluding_repository_storage('default')).to eq([expected_project])
- end
- end
-
describe '.deployments' do
subject { project.deployments }
@@ -5812,6 +5809,38 @@ RSpec.describe Project do
end
end
+ describe 'validation #changing_shared_runners_enabled_is_allowed' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_setting, :project_shared_runners_enabled, :valid_record) do
+ 'enabled' | true | true
+ 'enabled' | false | true
+ 'disabled_with_override' | true | true
+ 'disabled_with_override' | false | true
+ 'disabled_and_unoverridable' | true | false
+ 'disabled_and_unoverridable' | false | true
+ end
+
+ with_them do
+ let(:group) { create(:group) }
+ let(:project) { build(:project, namespace: group, shared_runners_enabled: project_shared_runners_enabled) }
+
+ before do
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ end
+ end
+
+ it 'validates the configuration' do
+ expect(project.valid?).to eq(valid_record)
+
+ unless valid_record
+ expect(project.errors[:shared_runners_enabled]).to contain_exactly('cannot be enabled because parent group does not allow it')
+ end
+ end
+ end
+ end
+
describe '#mark_pages_as_deployed' do
let(:project) { create(:project) }
let(:artifacts_archive) { create(:ci_job_artifact, project: project) }
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 383fabcfffb..9f40dbb3401 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -201,6 +201,23 @@ RSpec.describe ProjectStatistics do
statistics.refresh!(only: [:commit_count])
end
end
+
+ context 'when the database is read-only' do
+ it 'does nothing' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect(statistics).not_to receive(:update_commit_count)
+ expect(statistics).not_to receive(:update_repository_size)
+ expect(statistics).not_to receive(:update_wiki_size)
+ expect(statistics).not_to receive(:update_lfs_objects_size)
+ expect(statistics).not_to receive(:update_snippets_size)
+ expect(statistics).not_to receive(:save!)
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ statistics.refresh!
+ end
+ end
end
describe '#update_commit_count' do
@@ -324,22 +341,51 @@ RSpec.describe ProjectStatistics do
describe '.increment_statistic' do
shared_examples 'a statistic that increases storage_size' do
it 'increases the statistic by that amount' do
- expect { described_class.increment_statistic(project.id, stat, 13) }
+ expect { described_class.increment_statistic(project, stat, 13) }
.to change { statistics.reload.send(stat) || 0 }
.by(13)
end
it 'increases also storage size by that amount' do
- expect { described_class.increment_statistic(project.id, stat, 20) }
+ expect { described_class.increment_statistic(project, stat, 20) }
.to change { statistics.reload.storage_size }
.by(20)
end
end
+ shared_examples 'a statistic that increases storage_size asynchronously' do
+ it 'stores the increment temporarily in Redis', :clean_gitlab_redis_shared_state do
+ described_class.increment_statistic(project, stat, 13)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ increment = redis.get(statistics.counter_key(stat))
+ expect(increment.to_i).to eq(13)
+ end
+ end
+
+ it 'schedules a worker to update the statistic and storage_size async' do
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, described_class.name, statistics.id, stat)
+
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, described_class.name, statistics.id, :storage_size)
+
+ described_class.increment_statistic(project, stat, 20)
+ end
+ end
+
context 'when adjusting :build_artifacts_size' do
let(:stat) { :build_artifacts_size }
- it_behaves_like 'a statistic that increases storage_size'
+ it_behaves_like 'a statistic that increases storage_size asynchronously'
+
+ it_behaves_like 'a statistic that increases storage_size' do
+ before do
+ stub_feature_flags(efficient_counter_attribute: false)
+ end
+ end
end
context 'when adjusting :pipeline_artifacts_size' do
diff --git a/spec/models/project_tracing_setting_spec.rb b/spec/models/project_tracing_setting_spec.rb
new file mode 100644
index 00000000000..a7e4e557b25
--- /dev/null
+++ b/spec/models/project_tracing_setting_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectTracingSetting do
+ describe '#external_url' do
+ let_it_be(:project) { create(:project) }
+
+ let(:tracing_setting) { project.build_tracing_setting }
+
+ describe 'Validations' do
+ describe 'external_url' do
+ it 'accepts a valid url' do
+ tracing_setting.external_url = 'https://gitlab.com'
+
+ expect(tracing_setting).to be_valid
+ end
+
+ it 'fails with an invalid url' do
+ tracing_setting.external_url = 'gitlab.com'
+
+ expect(tracing_setting).to be_invalid
+ end
+
+ it 'fails with a blank string' do
+ tracing_setting.external_url = nil
+
+ expect(tracing_setting).to be_invalid
+ end
+
+ it 'sanitizes the url' do
+ tracing_setting.external_url = %{https://replaceme.com/'><script>alert(document.cookie)</script>}
+
+ expect(tracing_setting).to be_valid
+ expect(tracing_setting.external_url).to eq(%{https://replaceme.com/'&gt;})
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 29c3d0e1a73..2e82fcf5511 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ProjectWiki do
it_behaves_like 'wiki model' do
let(:wiki_container) { create(:project, :wiki_repo, namespace: user.namespace) }
let(:wiki_container_without_repo) { create(:project, namespace: user.namespace) }
+ let(:wiki_lfs_enabled) { true }
it { is_expected.to delegate_method(:storage).to(:container) }
it { is_expected.to delegate_method(:repository_storage).to(:container) }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index a3042d619eb..31211f8ff2c 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2688,7 +2688,7 @@ RSpec.describe Repository do
expect(subject).to be_a(Gitlab::Git::Repository)
expect(subject.relative_path).to eq(project.disk_path + '.wiki.git')
expect(subject.gl_repository).to eq("wiki-#{project.id}")
- expect(subject.gl_project_path).to eq(project.full_path)
+ expect(subject.gl_project_path).to eq(project.wiki.full_path)
end
end
end
@@ -2941,12 +2941,19 @@ RSpec.describe Repository do
expect(snippet.repository.project).to be_nil
end
+ it 'returns the project for a project wiki' do
+ wiki = create(:project_wiki)
+
+ expect(wiki.project).to be(wiki.repository.project)
+ end
+
it 'returns the container if it is a project' do
expect(repository.project).to be(project)
end
it 'returns nil if the container is not a project' do
- expect(repository).to receive(:container).and_return(Group.new)
+ repository.container = Group.new
+
expect(repository.project).to be_nil
end
end
@@ -2981,17 +2988,11 @@ RSpec.describe Repository do
context 'for a project wiki repository' do
let(:repository) { project.wiki.repository }
- it 'returns true when LFS is enabled' do
- stub_lfs_setting(enabled: true)
+ it 'delegates to the project' do
+ expect(project).to receive(:lfs_enabled?).and_return(true)
is_expected.to be_truthy
end
-
- it 'returns false when LFS is disabled' do
- stub_lfs_setting(enabled: false)
-
- is_expected.to be_falsy
- end
end
context 'for a project snippet repository' do
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index 960db31d488..da1fe70c891 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -50,26 +50,36 @@ RSpec.describe ResourceLabelEvent, type: :model do
end
end
- describe '#expire_etag_cache' do
- def expect_expiration(issue)
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
- expect(instance).to receive(:touch)
- .with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
+ context 'callbacks' do
+ describe '#usage_metrics' do
+ it 'tracks changed labels' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
+
+ subject.save!
end
end
- it 'expires resource note etag cache on event save' do
- expect_expiration(subject.issuable)
+ describe '#expire_etag_cache' do
+ def expect_expiration(issue)
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
+ expect(instance).to receive(:touch)
+ .with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
+ end
+ end
- subject.save!
- end
+ it 'expires resource note etag cache on event save' do
+ expect_expiration(subject.issuable)
- it 'expires resource note etag cache on event destroy' do
- subject.save!
+ subject.save!
+ end
+
+ it 'expires resource note etag cache on event destroy' do
+ subject.save!
- expect_expiration(subject.issuable)
+ expect_expiration(subject.issuable)
- subject.destroy!
+ subject.destroy!
+ end
end
end
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
index 0a5292b2d16..c1761e5b2e8 100644
--- a/spec/models/resource_milestone_event_spec.rb
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe ResourceMilestoneEvent, type: :model do
it_behaves_like 'timebox resource event validations'
it_behaves_like 'timebox resource event states'
it_behaves_like 'timebox resource event actions'
+ it_behaves_like 'timebox resource tracks issue metrics', :milestone
describe 'associations' do
it { is_expected.to belong_to(:milestone) }
diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb
index fc6575b2db8..b8a93bdbe3b 100644
--- a/spec/models/resource_state_event_spec.rb
+++ b/spec/models/resource_state_event_spec.rb
@@ -39,4 +39,20 @@ RSpec.describe ResourceStateEvent, type: :model do
end
end
end
+
+ context 'callbacks' do
+ describe '#usage_metrics' do
+ it 'tracks closed issues' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_closed_action)
+
+ create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:closed])
+ end
+
+ it 'tracks reopened issues' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_reopened_action)
+
+ create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:reopened])
+ end
+ end
+ end
end
diff --git a/spec/models/resource_weight_event_spec.rb b/spec/models/resource_weight_event_spec.rb
deleted file mode 100644
index 8a37883d933..00000000000
--- a/spec/models/resource_weight_event_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ResourceWeightEvent, type: :model do
- it_behaves_like 'a resource event'
- it_behaves_like 'a resource event for issues'
-
- let_it_be(:user1) { create(:user) }
- let_it_be(:user2) { create(:user) }
-
- let_it_be(:issue1) { create(:issue, author: user1) }
- let_it_be(:issue2) { create(:issue, author: user1) }
- let_it_be(:issue3) { create(:issue, author: user2) }
-
- describe 'validations' do
- it { is_expected.not_to allow_value(nil).for(:issue) }
- it { is_expected.to allow_value(nil).for(:weight) }
- end
-
- describe 'associations' do
- it { is_expected.to belong_to(:issue) }
- end
-
- describe '.by_issue' do
- let_it_be(:event1) { create(:resource_weight_event, issue: issue1) }
- let_it_be(:event2) { create(:resource_weight_event, issue: issue2) }
- let_it_be(:event3) { create(:resource_weight_event, issue: issue1) }
-
- it 'returns the expected records for an issue with events' do
- events = ResourceWeightEvent.by_issue(issue1)
-
- expect(events).to contain_exactly(event1, event3)
- end
-
- it 'returns the expected records for an issue with no events' do
- events = ResourceWeightEvent.by_issue(issue3)
-
- expect(events).to be_empty
- end
- end
-
- describe '.created_after' do
- let!(:created_at1) { 1.day.ago }
- let!(:created_at2) { 2.days.ago }
- let!(:created_at3) { 3.days.ago }
-
- let!(:event1) { create(:resource_weight_event, issue: issue1, created_at: created_at1) }
- let!(:event2) { create(:resource_weight_event, issue: issue2, created_at: created_at2) }
- let!(:event3) { create(:resource_weight_event, issue: issue2, created_at: created_at3) }
-
- it 'returns the expected events' do
- events = ResourceWeightEvent.created_after(created_at3)
-
- expect(events).to contain_exactly(event1, event2)
- end
-
- it 'returns no events if time is after last record time' do
- events = ResourceWeightEvent.created_after(1.minute.ago)
-
- expect(events).to be_empty
- end
- end
-
- describe '#discussion_id' do
- let_it_be(:event) { create(:resource_weight_event, issue: issue1, created_at: Time.utc(2019, 12, 30)) }
-
- it 'returns the expected id' do
- allow(Digest::SHA1).to receive(:hexdigest)
- .with("ResourceWeightEvent-#{event.id}-#{user1.id}")
- .and_return('73d167c478')
-
- expect(event.discussion_id).to eq('73d167c478')
- end
- end
-end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 32e2012e284..db3cf19a03f 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -320,18 +320,28 @@ RSpec.describe Service do
end
it 'sets service to inactive' do
- service = described_class.build_from_integration(project.id, integration)
+ service = described_class.build_from_integration(integration, project_id: project.id)
expect(service).to be_valid
expect(service.active).to be false
end
end
- context 'when integration is an instance' do
+ context 'when integration is an instance-level integration' do
let(:integration) { create(:jira_service, :instance) }
it 'sets inherit_from_id from integration' do
- service = described_class.build_from_integration(project.id, integration)
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service.inherit_from_id).to eq(integration.id)
+ end
+ end
+
+ context 'when integration is a group-level integration' do
+ let(:integration) { create(:jira_service, group: group, project: nil) }
+
+ it 'sets inherit_from_id from integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
expect(service.inherit_from_id).to eq(integration.id)
end
@@ -350,8 +360,8 @@ RSpec.describe Service do
end
shared_examples 'service creation from an integration' do
- it 'creates a correct service' do
- service = described_class.build_from_integration(project.id, integration)
+ it 'creates a correct service for a project integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
expect(service).to be_active
expect(service.url).to eq(url)
@@ -360,6 +370,22 @@ RSpec.describe Service do
expect(service.password).to eq(password)
expect(service.template).to eq(false)
expect(service.instance).to eq(false)
+ expect(service.project).to eq(project)
+ expect(service.group).to eq(nil)
+ end
+
+ it 'creates a correct service for a group integration' do
+ service = described_class.build_from_integration(integration, group_id: group.id)
+
+ expect(service).to be_active
+ expect(service.url).to eq(url)
+ expect(service.api_url).to eq(api_url)
+ expect(service.username).to eq(username)
+ expect(service.password).to eq(password)
+ expect(service.template).to eq(false)
+ expect(service.instance).to eq(false)
+ expect(service.project).to eq(nil)
+ expect(service.group).to eq(group)
end
end
@@ -455,13 +481,119 @@ RSpec.describe Service do
expect(described_class.default_integration('JiraService', subgroup)).to eq(group_service)
end
- context 'having a service' do
+ context 'having a service with custom settings' do
let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil) }
it 'returns the closest group service for a project' do
expect(described_class.default_integration('JiraService', project)).to eq(subgroup_service)
end
end
+
+ context 'having a service inheriting settings' do
+ let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil, inherit_from_id: group_service.id) }
+
+ it 'returns the closest group service which does not inherit from its parent for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '.create_from_active_default_integrations' do
+ context 'with an active service template' do
+ let_it_be(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
+
+ it 'creates a service from the template' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.services.size).to eq(1)
+ expect(project.reload.services.first.api_url).to eq(template_integration.api_url)
+ expect(project.reload.services.first.inherit_from_id).to be_nil
+ end
+
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+
+ it 'creates a service from the instance-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.services.size).to eq(1)
+ expect(project.reload.services.first.api_url).to eq(instance_integration.api_url)
+ expect(project.reload.services.first.inherit_from_id).to eq(instance_integration.id)
+ end
+
+ context 'passing a group' do
+ it 'creates a service from the instance-level integration' do
+ described_class.create_from_active_default_integrations(group, :group_id)
+
+ expect(group.reload.services.size).to eq(1)
+ expect(group.reload.services.first.api_url).to eq(instance_integration.api_url)
+ expect(group.reload.services.first.inherit_from_id).to eq(instance_integration.id)
+ end
+ end
+
+ context 'with an active group-level integration' do
+ let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+
+ it 'creates a service from the group-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.services.size).to eq(1)
+ expect(project.reload.services.first.api_url).to eq(group_integration.api_url)
+ expect(project.reload.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+
+ context 'passing a group' do
+ let!(:subgroup) { create(:group, parent: group) }
+
+ it 'creates a service from the group-level integration' do
+ described_class.create_from_active_default_integrations(subgroup, :group_id)
+
+ expect(subgroup.reload.services.size).to eq(1)
+ expect(subgroup.reload.services.first.api_url).to eq(group_integration.api_url)
+ expect(subgroup.reload.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+ end
+
+ context 'with an active subgroup' do
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup) { create(:group, parent: group) }
+ let(:project) { create(:project, group: subgroup) }
+
+ it 'creates a service from the subgroup-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.services.size).to eq(1)
+ expect(project.reload.services.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.reload.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+
+ context 'passing a group' do
+ let!(:sub_subgroup) { create(:group, parent: subgroup) }
+
+ it 'creates a service from the subgroup-level integration' do
+ described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
+
+ expect(sub_subgroup.reload.services.size).to eq(1)
+ expect(sub_subgroup.reload.services.first.api_url).to eq(subgroup_integration.api_url)
+ expect(sub_subgroup.reload.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+
+ context 'having a service inheriting settings' do
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
+
+ it 'creates a service from the group-level integration' do
+ described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
+
+ expect(sub_subgroup.reload.services.size).to eq(1)
+ expect(sub_subgroup.reload.services.first.api_url).to eq(group_integration.api_url)
+ expect(sub_subgroup.reload.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+ end
+ end
+ end
end
end
end
diff --git a/spec/models/snippet_input_action_spec.rb b/spec/models/snippet_input_action_spec.rb
index 43dc70bea98..0a9ab47f2f0 100644
--- a/spec/models/snippet_input_action_spec.rb
+++ b/spec/models/snippet_input_action_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe SnippetInputAction do
let(:options) { { action: action, file_path: file_path, content: content, previous_path: previous_path } }
let(:expected_options) { options.merge(action: action.to_sym) }
- subject { described_class.new(options).to_commit_action }
+ subject { described_class.new(**options).to_commit_action }
it 'transforms attributes to commit action' do
expect(subject).to eq(expected_options)
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 95602a4de0e..cdbc1feefce 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -13,6 +13,11 @@ RSpec.describe SnippetRepository do
it { is_expected.to belong_to(:snippet) }
end
+ it_behaves_like 'shardable scopes' do
+ let_it_be(:record_1) { create(:snippet_repository) }
+ let_it_be(:record_2, reload: true) { create(:snippet_repository) }
+ end
+
describe '.find_snippet' do
it 'finds snippet by disk path' do
snippet = create(:snippet, author: user)
@@ -35,7 +40,7 @@ RSpec.describe SnippetRepository do
it 'returns nil when files argument is empty' do
expect(snippet.repository).not_to receive(:multi_action)
- operation = snippet_repository.multi_files_action(user, [], commit_opts)
+ operation = snippet_repository.multi_files_action(user, [], **commit_opts)
expect(operation).to be_nil
end
@@ -43,7 +48,7 @@ RSpec.describe SnippetRepository do
it 'returns nil when files argument is nil' do
expect(snippet.repository).not_to receive(:multi_action)
- operation = snippet_repository.multi_files_action(user, nil, commit_opts)
+ operation = snippet_repository.multi_files_action(user, nil, **commit_opts)
expect(operation).to be_nil
end
@@ -60,7 +65,7 @@ RSpec.describe SnippetRepository do
end
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.not_to raise_error
aggregate_failures do
@@ -77,13 +82,13 @@ RSpec.describe SnippetRepository do
it 'tries to obtain an exclusive lease' do
expect(Gitlab::ExclusiveLease).to receive(:new).with("multi_files_action:#{snippet.id}", anything).and_call_original
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
it 'cancels the lease when the method has finished' do
expect(Gitlab::ExclusiveLease).to receive(:cancel).with("multi_files_action:#{snippet.id}", anything).and_call_original
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
it 'raises an error if the lease cannot be obtained' do
@@ -92,7 +97,7 @@ RSpec.describe SnippetRepository do
end
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.to raise_error(described_class::CommitError)
end
@@ -114,7 +119,7 @@ RSpec.describe SnippetRepository do
it 'infers the commit action based on the parameters if not present' do
expect(repo).to receive(:multi_action).with(user, hash_including(actions: result))
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
context 'when commit actions are present' do
@@ -128,7 +133,7 @@ RSpec.describe SnippetRepository do
user,
hash_including(actions: array_including(hash_including(action: expected_action)))))
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
end
@@ -149,7 +154,7 @@ RSpec.describe SnippetRepository do
specify do
existing_content = blob_at(snippet, previous_path).data
- snippet_repository.multi_files_action(user, [move_action], commit_opts)
+ snippet_repository.multi_files_action(user, [move_action], **commit_opts)
blob = blob_at(snippet, new_path)
expect(blob).not_to be_nil
@@ -177,7 +182,7 @@ RSpec.describe SnippetRepository do
specify do
last_commit_id = snippet.repository.head_commit.id
- snippet_repository.multi_files_action(user, [update_action], commit_opts)
+ snippet_repository.multi_files_action(user, [update_action], **commit_opts)
expect(snippet.repository.head_commit.id).to eq last_commit_id
end
@@ -214,13 +219,13 @@ RSpec.describe SnippetRepository do
before do
expect(blob_at(snippet, default_name)).to be_nil
- snippet_repository.multi_files_action(user, [new_file], commit_opts)
+ snippet_repository.multi_files_action(user, [new_file], **commit_opts)
expect(blob_at(snippet, default_name)).to be
end
it 'reuses the existing file name' do
- snippet_repository.multi_files_action(user, [existing_file], commit_opts)
+ snippet_repository.multi_files_action(user, [existing_file], **commit_opts)
blob = blob_at(snippet, default_name)
expect(blob.data).to eq existing_file[:content]
@@ -234,7 +239,7 @@ RSpec.describe SnippetRepository do
it 'assigns a new name to the file' do
expect(blob_at(snippet, default_name)).to be_nil
- snippet_repository.multi_files_action(user, [new_file], commit_opts)
+ snippet_repository.multi_files_action(user, [new_file], **commit_opts)
blob = blob_at(snippet, default_name)
expect(blob.data).to eq new_file[:content]
@@ -246,7 +251,7 @@ RSpec.describe SnippetRepository do
before do
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.not_to raise_error
end
@@ -259,10 +264,10 @@ RSpec.describe SnippetRepository do
before do
# Pre-populate repository with 9 unnamed snippets.
- snippet_repository.multi_files_action(user, pre_populate_data, commit_opts)
+ snippet_repository.multi_files_action(user, pre_populate_data, **commit_opts)
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.not_to raise_error
end
@@ -274,7 +279,7 @@ RSpec.describe SnippetRepository do
it 'raises a path specific error' do
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.to raise_error(error)
end
end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index ab614a6d45c..d74f5faab7f 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -666,16 +666,13 @@ RSpec.describe Snippet do
let(:checker) { subject.repository_size_checker }
let(:current_size) { 60 }
+ let(:namespace) { nil }
before do
allow(subject.repository).to receive(:size).and_return(current_size)
end
- it 'sets up size checker', :aggregate_failures do
- expect(checker.current_size).to eq(current_size.megabytes)
- expect(checker.limit).to eq(Gitlab::CurrentSettings.snippet_size_limit)
- expect(checker.enabled?).to be_truthy
- end
+ include_examples 'size checker for snippet'
end
describe '#can_cache_field?' do
@@ -717,19 +714,11 @@ RSpec.describe Snippet do
end
describe '.max_file_limit' do
- subject { described_class.max_file_limit(nil) }
+ subject { described_class.max_file_limit }
it "returns #{Snippet::MAX_FILE_COUNT}" do
expect(subject).to eq Snippet::MAX_FILE_COUNT
end
-
- context 'when feature flag :snippet_multiple_files is disabled' do
- it "returns #{described_class::MAX_SINGLE_FILE_COUNT}" do
- stub_feature_flags(snippet_multiple_files: false)
-
- expect(subject).to eq described_class::MAX_SINGLE_FILE_COUNT
- end
- end
end
describe '#list_files' do
diff --git a/spec/models/snippet_statistics_spec.rb b/spec/models/snippet_statistics_spec.rb
index 8def6a0bbd4..1fb4ed47169 100644
--- a/spec/models/snippet_statistics_spec.rb
+++ b/spec/models/snippet_statistics_spec.rb
@@ -75,15 +75,28 @@ RSpec.describe SnippetStatistics do
end
describe '#refresh!' do
- subject { statistics.refresh! }
-
it 'retrieves and saves statistic data from repository' do
expect(statistics).to receive(:update_commit_count)
expect(statistics).to receive(:update_file_count)
expect(statistics).to receive(:update_repository_size)
expect(statistics).to receive(:save!)
- subject
+ statistics.refresh!
+ end
+
+ context 'when the database is read-only' do
+ it 'does nothing' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect(statistics).not_to receive(:update_commit_count)
+ expect(statistics).not_to receive(:update_file_count)
+ expect(statistics).not_to receive(:update_repository_size)
+ expect(statistics).not_to receive(:save!)
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ statistics.refresh!
+ end
end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index 01ae80a61d1..608c5bdf03a 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -15,7 +15,24 @@ RSpec.describe Terraform::State do
it { is_expected.to validate_presence_of(:project_id) }
before do
- stub_terraform_state_object_storage(Terraform::StateUploader)
+ stub_terraform_state_object_storage
+ end
+
+ describe 'scopes' do
+ describe '.ordered_by_name' do
+ let_it_be(:project) { create(:project) }
+ let(:names) { %w(state_d state_b state_a state_c) }
+
+ subject { described_class.ordered_by_name }
+
+ before do
+ names.each do |name|
+ create(:terraform_state, project: project, name: name)
+ end
+ end
+
+ it { expect(subject.map(&:name)).to eq(names.sort) }
+ end
end
describe '#file' do
@@ -43,7 +60,7 @@ RSpec.describe Terraform::State do
context 'when file is stored locally' do
before do
- stub_terraform_state_object_storage(Terraform::StateUploader, enabled: false)
+ stub_terraform_state_object_storage(enabled: false)
end
it_behaves_like 'mounted file in local store'
@@ -70,11 +87,17 @@ RSpec.describe Terraform::State do
let(:terraform_state) { create(:terraform_state, :with_file) }
it { is_expected.to eq terraform_state.file }
+
+ context 'and a version exists (migration to versioned in progress)' do
+ let!(:migrated_version) { create(:terraform_state_version, terraform_state: terraform_state) }
+
+ it { is_expected.to eq terraform_state.latest_version.file }
+ end
end
end
describe '#update_file!' do
- let(:version) { 2 }
+ let(:version) { 3 }
let(:data) { Hash[terraform_version: '0.12.21'].to_json }
subject { terraform_state.update_file!(CarrierWaveStringFile.new(data), version: version) }
@@ -98,6 +121,33 @@ RSpec.describe Terraform::State do
expect(terraform_state.latest_file.read).to eq(data)
end
+
+ context 'and a version exists (migration to versioned in progress)' do
+ let!(:migrated_version) { create(:terraform_state_version, terraform_state: terraform_state, version: 0) }
+
+ it 'creates a new version, corrects the migrated version number, and marks the state as versioned' do
+ expect { subject }.to change { Terraform::StateVersion.count }
+
+ expect(migrated_version.reload.version).to eq(1)
+ expect(migrated_version.file.read).to eq(terraform_state_file)
+
+ expect(terraform_state.reload.latest_version.version).to eq(version)
+ expect(terraform_state.latest_version.file.read).to eq(data)
+ expect(terraform_state).to be_versioning_enabled
+ end
+
+ context 'the current version cannot be determined' do
+ before do
+ migrated_version.update!(file: CarrierWaveStringFile.new('invalid-json'))
+ end
+
+ it 'uses version - 1 to correct the migrated version number' do
+ expect { subject }.to change { Terraform::StateVersion.count }
+
+ expect(migrated_version.reload.version).to eq(2)
+ end
+ end
+ end
end
end
end
diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb
index 72dd29e1571..cc5ea87159d 100644
--- a/spec/models/terraform/state_version_spec.rb
+++ b/spec/models/terraform/state_version_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Terraform::StateVersion do
subject { create(:terraform_state_version) }
before do
- stub_terraform_state_object_storage(Terraform::StateUploader)
+ stub_terraform_state_object_storage
end
describe '#file' do
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 44e81455a67..a9c4c6680cd 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -200,26 +200,42 @@ RSpec.describe Todo do
describe '#self_assigned?' do
let(:user_1) { build(:user) }
- before do
- subject.user = user_1
- subject.author = user_1
- subject.action = Todo::ASSIGNED
- end
+ context 'when self_added' do
+ before do
+ subject.user = user_1
+ subject.author = user_1
+ end
- it 'is true when todo is ASSIGNED and self_added' do
- expect(subject).to be_self_assigned
- end
+ it 'returns true for ASSIGNED' do
+ subject.action = Todo::ASSIGNED
+
+ expect(subject).to be_self_assigned
+ end
- it 'is false when the todo is not ASSIGNED' do
- subject.action = Todo::MENTIONED
+ it 'returns true for REVIEW_REQUESTED' do
+ subject.action = Todo::REVIEW_REQUESTED
- expect(subject).not_to be_self_assigned
+ expect(subject).to be_self_assigned
+ end
+
+ it 'returns false for other action' do
+ subject.action = Todo::MENTIONED
+
+ expect(subject).not_to be_self_assigned
+ end
end
- it 'is false when todo is not self_added' do
- subject.author = build(:user)
+ context 'when todo is not self_added' do
+ before do
+ subject.user = user_1
+ subject.author = build(:user)
+ end
- expect(subject).not_to be_self_assigned
+ it 'returns false' do
+ subject.action = Todo::ASSIGNED
+
+ expect(subject).not_to be_self_assigned
+ end
end
end
@@ -427,7 +443,7 @@ RSpec.describe Todo do
it 'updates updated_at' do
create(:todo, :pending)
- Timecop.freeze(1.day.from_now) do
+ travel_to(1.day.from_now) do
expected_update_date = Time.current.utc
ids = described_class.batch_update(state: :done)
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 1841288cd4b..64bff5d00aa 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -705,6 +705,34 @@ RSpec.describe User do
end
describe "scopes" do
+ context 'blocked users' do
+ let_it_be(:active_user) { create(:user) }
+ let_it_be(:blocked_user) { create(:user, :blocked) }
+ let_it_be(:ldap_blocked_user) { create(:omniauth_user, :ldap_blocked) }
+ let_it_be(:blocked_pending_approval_user) { create(:user, :blocked_pending_approval) }
+
+ describe '.blocked' do
+ subject { described_class.blocked }
+
+ it 'returns only blocked users' do
+ expect(subject).to include(
+ blocked_user,
+ ldap_blocked_user
+ )
+
+ expect(subject).not_to include(active_user, blocked_pending_approval_user)
+ end
+ end
+
+ describe '.blocked_pending_approval' do
+ subject { described_class.blocked_pending_approval }
+
+ it 'returns only pending approval users' do
+ expect(subject).to contain_exactly(blocked_pending_approval_user)
+ end
+ end
+ end
+
describe ".with_two_factor" do
it "returns users with 2fa enabled via OTP" do
user_with_2fa = create(:user, :two_factor_via_otp)
@@ -1694,6 +1722,24 @@ RSpec.describe User do
end
end
+ describe 'blocking a user pending approval' do
+ let(:user) { create(:user) }
+
+ before do
+ user.block_pending_approval
+ end
+
+ context 'an active user' do
+ it 'can be blocked pending approval' do
+ expect(user.blocked_pending_approval?).to eq(true)
+ end
+
+ it 'behaves like a blocked user' do
+ expect(user.blocked?).to eq(true)
+ end
+ end
+ end
+
describe '.filter_items' do
let(:user) { double }
@@ -1715,6 +1761,12 @@ RSpec.describe User do
expect(described_class.filter_items('blocked')).to include user
end
+ it 'filters by blocked pending approval' do
+ expect(described_class).to receive(:blocked_pending_approval).and_return([user])
+
+ expect(described_class.filter_items('blocked_pending_approval')).to include user
+ end
+
it 'filters by deactivated' do
expect(described_class).to receive(:deactivated).and_return([user])
@@ -2744,6 +2796,14 @@ RSpec.describe User do
it_behaves_like 'eligible for deactivation'
end
+
+ context 'a user who is internal' do
+ it 'returns false' do
+ internal_user = create(:user, :bot)
+
+ expect(internal_user.can_be_deactivated?).to be_falsey
+ end
+ end
end
describe "#contributed_projects" do
@@ -3902,7 +3962,7 @@ RSpec.describe User do
it 'changes the namespace (just to compare to when username is not changed)' do
expect do
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
user.update!(username: new_username)
end
end.to change { user.namespace.updated_at }
@@ -4330,28 +4390,32 @@ RSpec.describe User do
describe '#required_terms_not_accepted?' do
let(:user) { build(:user) }
+ let(:project_bot) { create(:user, :project_bot) }
subject { user.required_terms_not_accepted? }
context "when terms are not enforced" do
- it { is_expected.to be_falsy }
+ it { is_expected.to be_falsey }
end
- context "when terms are enforced and accepted by the user" do
+ context "when terms are enforced" do
before do
enforce_terms
- accept_terms(user)
end
- it { is_expected.to be_falsy }
- end
+ it "is not accepted by the user" do
+ expect(subject).to be_truthy
+ end
- context "when terms are enforced but the user has not accepted" do
- before do
- enforce_terms
+ it "is accepted by the user" do
+ accept_terms(user)
+
+ expect(subject).to be_falsey
end
- it { is_expected.to be_truthy }
+ it "auto accepts the term for project bots" do
+ expect(project_bot.required_terms_not_accepted?).to be_falsey
+ end
end
end
@@ -4818,7 +4882,8 @@ RSpec.describe User do
{ state: 'blocked' },
{ user_type: :ghost },
{ user_type: :alert_bot },
- { user_type: :support_bot }
+ { user_type: :support_bot },
+ { user_type: :security_bot }
]
end
@@ -4873,6 +4938,7 @@ RSpec.describe User do
'human' | true
'alert_bot' | false
'support_bot' | false
+ 'security_bot' | false
end
with_them do
@@ -4895,7 +4961,7 @@ RSpec.describe User do
user.block
end
- it { is_expected.to eq User::BLOCKED_MESSAGE }
+ it { is_expected.to eq :blocked }
end
context 'when user is an internal user' do
@@ -4903,7 +4969,7 @@ RSpec.describe User do
user.update(user_type: :ghost)
end
- it { is_expected.to be User::LOGIN_FORBIDDEN }
+ it { is_expected.to be :forbidden }
end
context 'when user is locked' do
@@ -4913,6 +4979,14 @@ RSpec.describe User do
it { is_expected.to be :locked }
end
+
+ context 'when user is blocked pending approval' do
+ before do
+ user.block_pending_approval!
+ end
+
+ it { is_expected.to be :blocked_pending_approval }
+ end
end
describe '#password_required?' do
@@ -4976,9 +5050,11 @@ RSpec.describe User do
it_behaves_like 'bot users', :alert_bot
it_behaves_like 'bot users', :support_bot
it_behaves_like 'bot users', :migration_bot
+ it_behaves_like 'bot users', :security_bot
it_behaves_like 'bot users', :ghost
it_behaves_like 'bot user avatars', :alert_bot, 'alert-bot.png'
it_behaves_like 'bot user avatars', :support_bot, 'support-bot.png'
+ it_behaves_like 'bot user avatars', :security_bot, 'security-bot.png'
end
end
diff --git a/spec/models/wiki_directory_spec.rb b/spec/models/wiki_directory_spec.rb
index 4cac90786eb..9b6cec99ddb 100644
--- a/spec/models/wiki_directory_spec.rb
+++ b/spec/models/wiki_directory_spec.rb
@@ -3,43 +3,97 @@
require 'spec_helper'
RSpec.describe WikiDirectory do
- describe 'validations' do
- subject { build(:wiki_directory) }
+ subject(:directory) { build(:wiki_directory) }
+ describe 'validations' do
it { is_expected.to validate_presence_of(:slug) }
end
+ describe '.group_pages' do
+ let_it_be(:toplevel1) { build(:wiki_page, title: 'aaa-toplevel1') }
+ let_it_be(:toplevel2) { build(:wiki_page, title: 'zzz-toplevel2') }
+ let_it_be(:toplevel3) { build(:wiki_page, title: 'zzz-toplevel3') }
+ let_it_be(:child1) { build(:wiki_page, title: 'parent1/child1') }
+ let_it_be(:child2) { build(:wiki_page, title: 'parent1/child2') }
+ let_it_be(:child3) { build(:wiki_page, title: 'parent2/child3') }
+ let_it_be(:grandchild1) { build(:wiki_page, title: 'parent1/subparent/grandchild1') }
+ let_it_be(:grandchild2) { build(:wiki_page, title: 'parent1/subparent/grandchild2') }
+
+ it 'returns a nested array of entries' do
+ entries = described_class.group_pages(
+ [toplevel1, toplevel2, toplevel3, child1, child2, child3, grandchild1, grandchild2].sort_by(&:title)
+ )
+
+ expect(entries).to match([
+ toplevel1,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1', entries: [
+ child1,
+ child2,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1/subparent',
+ entries: [grandchild1, grandchild2]
+ )
+ )
+ ]
+ )
+ ),
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent2',
+ entries: [child3]
+ )
+ ),
+ toplevel2,
+ toplevel3
+ ])
+ end
+ end
+
describe '#initialize' do
- context 'when there are pages' do
- let(:pages) { [build(:wiki_page)] }
- let(:directory) { described_class.new('/path_up_to/dir', pages) }
+ context 'when there are entries' do
+ let(:entries) { [build(:wiki_page)] }
+ let(:directory) { described_class.new('/path_up_to/dir', entries) }
it 'sets the slug attribute' do
expect(directory.slug).to eq('/path_up_to/dir')
end
- it 'sets the pages attribute' do
- expect(directory.pages).to eq(pages)
+ it 'sets the entries attribute' do
+ expect(directory.entries).to eq(entries)
end
end
- context 'when there are no pages' do
+ context 'when there are no entries' do
let(:directory) { described_class.new('/path_up_to/dir') }
it 'sets the slug attribute' do
expect(directory.slug).to eq('/path_up_to/dir')
end
- it 'sets the pages attribute to an empty array' do
- expect(directory.pages).to eq([])
+ it 'sets the entries attribute to an empty array' do
+ expect(directory.entries).to eq([])
end
end
end
+ describe '#title' do
+ it 'returns the basename of the directory, with hyphens replaced by spaces' do
+ directory.slug = 'parent'
+ expect(directory.title).to eq('parent')
+
+ directory.slug = 'parent/child'
+ expect(directory.title).to eq('child')
+
+ directory.slug = 'parent/child-foo'
+ expect(directory.title).to eq('child foo')
+ end
+ end
+
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- directory = build(:wiki_directory)
-
expect(directory.to_partial_path).to eq('../shared/wikis/wiki_directory')
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index aa8b9ce58b9..be94eca550c 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -4,16 +4,25 @@ require "spec_helper"
RSpec.describe WikiPage do
let_it_be(:user) { create(:user) }
- let(:container) { create(:project, :wiki_repo) }
- let(:wiki) { Wiki.for_container(container, user) }
- let(:new_page) { build(:wiki_page, wiki: wiki, title: 'test page', content: 'test content') }
+ let_it_be(:container) { create(:project) }
- let(:existing_page) do
- create(:wiki_page, wiki: wiki, title: 'test page', content: 'test content', message: 'test commit')
- wiki.find_page('test page')
+ def create_wiki_page(attrs = {})
+ page = build_wiki_page(attrs)
+
+ page.create(message: (attrs[:message] || 'test commit'))
+
+ container.wiki.find_page(page.slug)
end
- subject { new_page }
+ def build_wiki_page(attrs = {})
+ wiki_page_attrs = { container: container, content: 'test content' }.merge(attrs)
+
+ build(:wiki_page, wiki_page_attrs)
+ end
+
+ def wiki
+ container.wiki
+ end
def disable_front_matter
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
@@ -23,92 +32,16 @@ RSpec.describe WikiPage do
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => thing)
end
- describe '.group_by_directory' do
- context 'when there are no pages' do
- it 'returns an empty array' do
- expect(described_class.group_by_directory(nil)).to eq([])
- expect(described_class.group_by_directory([])).to eq([])
- end
- end
-
- context 'when there are pages' do
- before do
- wiki.create_page('dir_1/dir_1_1/page_3', 'content')
- wiki.create_page('page_1', 'content')
- wiki.create_page('dir_1/page_2', 'content')
- wiki.create_page('dir_2', 'page with dir name')
- wiki.create_page('dir_2/page_5', 'content')
- wiki.create_page('page_6', 'content')
- wiki.create_page('dir_2/page_4', 'content')
- end
-
- let(:page_1) { wiki.find_page('page_1') }
- let(:page_6) { wiki.find_page('page_6') }
- let(:page_dir_2) { wiki.find_page('dir_2') }
-
- let(:dir_1) do
- WikiDirectory.new('dir_1', [wiki.find_page('dir_1/page_2')])
- end
-
- let(:dir_1_1) do
- WikiDirectory.new('dir_1/dir_1_1', [wiki.find_page('dir_1/dir_1_1/page_3')])
- end
-
- let(:dir_2) do
- pages = [wiki.find_page('dir_2/page_5'),
- wiki.find_page('dir_2/page_4')]
- WikiDirectory.new('dir_2', pages)
- end
+ # Use for groups of tests that do not modify their `subject`.
+ #
+ # include_context 'subject is persisted page', title: 'my title'
+ shared_context 'subject is persisted page' do |attrs = {}|
+ let_it_be(:persisted_page) { create_wiki_page(attrs) }
- describe "#list_pages" do
- context 'sort by title' do
- let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages) }
- let(:expected_grouped_entries) { [dir_1_1, dir_1, page_dir_2, dir_2, page_1, page_6] }
-
- it 'returns an array with pages and directories' do
- grouped_entries.each_with_index do |page_or_dir, i|
- expected_page_or_dir = expected_grouped_entries[i]
- expected_slugs = get_slugs(expected_page_or_dir)
- slugs = get_slugs(page_or_dir)
-
- expect(slugs).to match_array(expected_slugs)
- end
- end
- end
-
- context 'sort by created_at' do
- let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages(sort: 'created_at')) }
- let(:expected_grouped_entries) { [dir_1_1, page_1, dir_1, page_dir_2, dir_2, page_6] }
-
- it 'returns an array with pages and directories' do
- grouped_entries.each_with_index do |page_or_dir, i|
- expected_page_or_dir = expected_grouped_entries[i]
- expected_slugs = get_slugs(expected_page_or_dir)
- slugs = get_slugs(page_or_dir)
-
- expect(slugs).to match_array(expected_slugs)
- end
- end
- end
-
- it 'returns an array with retained order with directories at the top' do
- expected_order = ['dir_1/dir_1_1/page_3', 'dir_1/page_2', 'dir_2', 'dir_2/page_4', 'dir_2/page_5', 'page_1', 'page_6']
-
- grouped_entries = described_class.group_by_directory(wiki.list_pages)
-
- actual_order =
- grouped_entries.flat_map do |page_or_dir|
- get_slugs(page_or_dir)
- end
- expect(actual_order).to eq(expected_order)
- end
- end
- end
+ subject { persisted_page }
end
describe '#front_matter' do
- let_it_be(:project) { create(:project) }
- let(:container) { project }
let(:wiki_page) { create(:wiki_page, container: container, content: content) }
shared_examples 'a page without front-matter' do
@@ -230,14 +163,14 @@ RSpec.describe WikiPage do
describe "#initialize" do
context "when initialized with an existing page" do
- subject { existing_page }
+ include_context 'subject is persisted page', title: 'test initialization'
it "sets the slug attribute" do
- expect(subject.slug).to eq("test-page")
+ expect(subject.slug).to eq("test-initialization")
end
it "sets the title attribute" do
- expect(subject.title).to eq("test page")
+ expect(subject.title).to eq("test initialization")
end
it "sets the formatted content attribute" do
@@ -259,6 +192,8 @@ RSpec.describe WikiPage do
end
describe "validations" do
+ subject { build_wiki_page }
+
it "validates presence of title" do
subject.attributes.delete(:title)
@@ -305,7 +240,7 @@ RSpec.describe WikiPage do
end
context 'with an existing page exceeding the limit' do
- subject { existing_page }
+ include_context 'subject is persisted page'
before do
subject
@@ -414,18 +349,22 @@ RSpec.describe WikiPage do
describe "#create" do
let(:attributes) do
{
- title: "Index",
+ title: SecureRandom.hex,
content: "Home Page",
format: "markdown",
message: 'Custom Commit Message'
}
end
+ let(:title) { attributes[:title] }
+
+ subject { build_wiki_page }
+
context "with valid attributes" do
it "saves the wiki page" do
subject.create(attributes)
- expect(wiki.find_page("Index")).not_to be_nil
+ expect(wiki.find_page(title)).not_to be_nil
end
it "returns true" do
@@ -435,7 +374,7 @@ RSpec.describe WikiPage do
it 'saves the wiki page with message' do
subject.create(attributes)
- expect(wiki.find_page("Index").message).to eq 'Custom Commit Message'
+ expect(wiki.find_page(title).message).to eq 'Custom Commit Message'
end
it 'if the title is preceded by a / it is removed' do
@@ -447,9 +386,7 @@ RSpec.describe WikiPage do
context "with invalid attributes" do
it 'does not create the page' do
- subject.create(title: '')
-
- expect(wiki.find_page('New Page')).to be_nil
+ expect { subject.create(title: '') }.not_to change { wiki.list_pages.length }
end
end
end
@@ -458,46 +395,40 @@ RSpec.describe WikiPage do
let(:title) { 'Index v1.2.3' }
describe "#create" do
- let(:attributes) { { title: title, content: "Home Page", format: "markdown" } }
-
- context "with valid attributes" do
- it "saves the wiki page" do
- subject.create(attributes)
+ subject { build_wiki_page }
- expect(wiki.find_page(title)).not_to be_nil
- end
+ it "saves the wiki page and returns true", :aggregate_failures do
+ attributes = { title: title, content: "Home Page", format: "markdown" }
- it "returns true" do
- expect(subject.create(attributes)).to eq(true)
- end
+ expect(subject.create(attributes)).to eq(true)
+ expect(wiki.find_page(title)).not_to be_nil
end
end
describe '#update' do
- subject { create(:wiki_page, wiki: wiki, title: title) }
+ subject { create_wiki_page(title: title) }
+
+ it 'updates the content of the page and returns true', :aggregate_failures do
+ expect(subject.update(content: 'new content')).to be_truthy
- it 'updates the content of the page' do
- subject.update(content: 'new content')
page = wiki.find_page(title)
expect([subject.content, page.content]).to all(eq('new content'))
end
-
- it "returns true" do
- expect(subject.update(content: "more content")).to be_truthy
- end
end
end
describe "#update" do
- subject { existing_page }
+ let!(:original_title) { subject.title }
+
+ subject { create_wiki_page }
context "with valid attributes" do
it "updates the content of the page" do
new_content = "new content"
subject.update(content: new_content)
- page = wiki.find_page('test page')
+ page = wiki.find_page(original_title)
expect([subject.content, page.content]).to all(eq("new content"))
end
@@ -514,10 +445,9 @@ RSpec.describe WikiPage do
describe 'updating front_matter' do
shared_examples 'able to update front-matter' do
it 'updates the wiki-page front-matter' do
- title = subject.title
content = subject.content
subject.update(front_matter: { slugs: ['x'] })
- page = wiki.find_page(title)
+ page = wiki.find_page(original_title)
expect([subject, page]).to all(
have_attributes(
@@ -566,10 +496,9 @@ RSpec.describe WikiPage do
end
it 'updates the wiki-page front-matter and content together' do
- title = subject.title
content = 'totally new content'
subject.update(content: content, front_matter: { slugs: ['x'] })
- page = wiki.find_page(title)
+ page = wiki.find_page(original_title)
expect([subject, page]).to all(
have_attributes(
@@ -598,11 +527,11 @@ RSpec.describe WikiPage do
context 'when renaming a page' do
it 'raises an error if the page already exists' do
- wiki.create_page('Existing Page', 'content')
+ existing_page = create_wiki_page
- expect { subject.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(subject.title).to eq 'test page'
- expect(subject.content).to eq 'new_content'
+ expect { subject.update(title: existing_page.title, content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(subject.title).to eq original_title
+ expect(subject.content).to eq 'new_content' # We don't revert the content
end
it 'updates the content and rename the file' do
@@ -623,7 +552,7 @@ RSpec.describe WikiPage do
wiki.create_page('foo/Existing Page', 'content')
expect { subject.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(subject.title).to eq 'test page'
+ expect(subject.title).to eq original_title
expect(subject.content).to eq 'new_content'
end
@@ -639,20 +568,22 @@ RSpec.describe WikiPage do
expect(page.content).to eq new_content
end
- context 'in subdir' do
- subject { create(:wiki_page, wiki: wiki, title: 'foo/Existing Page') }
-
+ describe 'in subdir' do
it 'moves the page to the root folder if the title is preceded by /' do
- expect(subject.slug).to eq 'foo/Existing-Page'
- expect(subject.update(title: '/Existing Page', content: 'new_content')).to be_truthy
- expect(subject.slug).to eq 'Existing-Page'
+ page = create_wiki_page(title: 'foo/Existing Page')
+
+ expect(page.slug).to eq 'foo/Existing-Page'
+ expect(page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
+ expect(page.slug).to eq 'Existing-Page'
end
it 'does nothing if it has the same title' do
- original_path = subject.slug
+ page = create_wiki_page(title: 'foo/Another Existing Page')
- expect(subject.update(title: 'Existing Page', content: 'new_content')).to be_truthy
- expect(subject.slug).to eq original_path
+ original_path = page.slug
+
+ expect(page.update(title: 'Another Existing Page', content: 'new_content')).to be_truthy
+ expect(page.slug).to eq original_path
end
end
@@ -660,7 +591,7 @@ RSpec.describe WikiPage do
it 'does nothing if the title is preceded by /' do
original_path = subject.slug
- expect(subject.update(title: '/test page', content: 'new_content')).to be_truthy
+ expect(subject.update(title: "/#{subject.title}", content: 'new_content')).to be_truthy
expect(subject.slug).to eq original_path
end
end
@@ -671,7 +602,7 @@ RSpec.describe WikiPage do
expect(subject.update(title: '', content: 'new_content')).to be_falsey
expect(subject.content).to eq 'new_content'
- page = wiki.find_page('test page')
+ page = wiki.find_page(original_title)
expect(page.content).to eq 'test content'
end
@@ -679,21 +610,17 @@ RSpec.describe WikiPage do
end
describe "#delete" do
- subject { existing_page }
-
- it "deletes the page" do
- subject.delete
-
- expect(wiki.list_pages).to be_empty
- end
+ it "deletes the page and returns true", :aggregate_failures do
+ page = create_wiki_page
- it "returns true" do
- expect(subject.delete).to eq(true)
+ expect do
+ expect(page.delete).to eq(true)
+ end.to change { wiki.list_pages.length }.by(-1)
end
end
describe "#versions" do
- subject { existing_page }
+ include_context 'subject is persisted page'
it "returns an array of all commits for the page" do
3.times { |i| subject.update(content: "content #{i}") }
@@ -709,19 +636,21 @@ RSpec.describe WikiPage do
describe '#title_changed?' do
using RSpec::Parameterized::TableSyntax
+ let_it_be(:unsaved_page) { build_wiki_page(title: 'test page') }
+ let_it_be(:existing_page) { create_wiki_page(title: 'test page') }
+ let_it_be(:directory_page) { create_wiki_page(title: 'parent directory/child page') }
+ let_it_be(:page_with_special_characters) { create_wiki_page(title: 'test+page') }
let(:untitled_page) { described_class.new(wiki) }
- let(:directory_page) { create(:wiki_page, title: 'parent directory/child page') }
- let(:page_with_special_characters) { create(:wiki_page, title: 'test+page') }
where(:page, :title, :changed) do
:untitled_page | nil | false
:untitled_page | 'new title' | true
- :new_page | nil | true
- :new_page | 'test page' | true
- :new_page | 'test-page' | true
- :new_page | 'test+page' | true
- :new_page | 'new title' | true
+ :unsaved_page | nil | true
+ :unsaved_page | 'test page' | true
+ :unsaved_page | 'test-page' | true
+ :unsaved_page | 'test+page' | true
+ :unsaved_page | 'new title' | true
:existing_page | nil | false
:existing_page | 'test page' | false
@@ -764,7 +693,7 @@ RSpec.describe WikiPage do
describe '#content_changed?' do
context 'with a new page' do
- subject { new_page }
+ subject { build_wiki_page }
it 'returns true if content is set' do
subject.attributes[:content] = 'new'
@@ -780,7 +709,7 @@ RSpec.describe WikiPage do
end
context 'with an existing page' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns false' do
expect(subject.content_changed?).to be(false)
@@ -816,17 +745,21 @@ RSpec.describe WikiPage do
describe '#path' do
it 'returns the path when persisted' do
- expect(existing_page.path).to eq('test-page.md')
+ existing_page = create_wiki_page(title: 'path test')
+
+ expect(existing_page.path).to eq('path-test.md')
end
it 'returns nil when not persisted' do
- expect(new_page.path).to be_nil
+ unsaved_page = build_wiki_page(title: 'path test')
+
+ expect(unsaved_page.path).to be_nil
end
end
describe '#directory' do
context 'when the page is at the root directory' do
- subject { existing_page }
+ include_context 'subject is persisted page', title: 'directory test'
it 'returns an empty string' do
expect(subject.directory).to eq('')
@@ -834,7 +767,7 @@ RSpec.describe WikiPage do
end
context 'when the page is inside an actual directory' do
- subject { create(:wiki_page, title: 'dir_1/dir_1_1/file') }
+ include_context 'subject is persisted page', title: 'dir_1/dir_1_1/directory test'
it 'returns the full directory hierarchy' do
expect(subject.directory).to eq('dir_1/dir_1_1')
@@ -843,7 +776,7 @@ RSpec.describe WikiPage do
end
describe '#historical?' do
- subject { existing_page }
+ include_context 'subject is persisted page'
let(:old_version) { subject.versions.last.id }
let(:old_page) { wiki.find_page(subject.title, old_version) }
@@ -883,22 +816,22 @@ RSpec.describe WikiPage do
describe '#persisted?' do
it 'returns true for a persisted page' do
- expect(existing_page).to be_persisted
+ expect(create_wiki_page).to be_persisted
end
it 'returns false for an unpersisted page' do
- expect(new_page).not_to be_persisted
+ expect(build_wiki_page).not_to be_persisted
end
end
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- expect(subject.to_partial_path).to eq('../shared/wikis/wiki_page')
+ expect(build_wiki_page.to_partial_path).to eq('../shared/wikis/wiki_page')
end
end
describe '#==' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns true for identical wiki page' do
expect(subject).to eq(subject)
@@ -906,7 +839,7 @@ RSpec.describe WikiPage do
it 'returns true for updated wiki page' do
subject.update(content: "Updated content")
- updated_page = wiki.find_page(existing_page.slug)
+ updated_page = wiki.find_page(subject.slug)
expect(updated_page).not_to be_nil
expect(updated_page).to eq(subject)
@@ -921,7 +854,7 @@ RSpec.describe WikiPage do
end
it 'returns false for page with different slug on same container' do
- other_page = create(:wiki_page, container: subject.container)
+ other_page = create_wiki_page
expect(subject.slug).not_to eq(other_page.slug)
expect(subject.container).to eq(other_page.container)
@@ -929,7 +862,7 @@ RSpec.describe WikiPage do
end
it 'returns false for page with the same slug on a different container' do
- other_page = create(:wiki_page, title: existing_page.slug)
+ other_page = create(:wiki_page, title: subject.slug)
expect(subject.slug).to eq(other_page.slug)
expect(subject.container).not_to eq(other_page.container)
@@ -938,7 +871,7 @@ RSpec.describe WikiPage do
end
describe '#last_commit_sha' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns commit sha' do
expect(subject.last_commit_sha).to eq subject.last_version.sha
@@ -948,13 +881,15 @@ RSpec.describe WikiPage do
last_commit_sha_before_update = subject.last_commit_sha
subject.update(content: "new content")
- page = wiki.find_page('test page')
+ page = wiki.find_page(subject.title)
expect(page.last_commit_sha).not_to eq last_commit_sha_before_update
end
end
describe '#hook_attrs' do
+ subject { build_wiki_page }
+
it 'adds absolute urls for images in the content' do
subject.attributes[:content] = 'test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)'
@@ -965,19 +900,21 @@ RSpec.describe WikiPage do
describe '#version_commit_timestamp' do
context 'for a new page' do
it 'returns nil' do
- expect(new_page.version_commit_timestamp).to be_nil
+ expect(build_wiki_page.version_commit_timestamp).to be_nil
end
end
context 'for page that exists' do
it 'returns the timestamp of the commit' do
+ existing_page = create_wiki_page
+
expect(existing_page.version_commit_timestamp).to eq(existing_page.version.commit.committed_date)
end
end
end
describe '#diffs' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns a diff instance' do
diffs = subject.diffs(foo: 'bar')
@@ -993,14 +930,4 @@ RSpec.describe WikiPage do
)
end
end
-
- private
-
- def get_slugs(page_or_dir)
- if page_or_dir.is_a? WikiPage
- [page_or_dir.slug]
- else
- page_or_dir.pages.present? ? page_or_dir.pages.map(&:slug) : []
- end
- end
end
diff --git a/spec/models/wiki_spec.rb b/spec/models/wiki_spec.rb
deleted file mode 100644
index 8dd510a0b98..00000000000
--- a/spec/models/wiki_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe Wiki do
- describe '.new' do
- it 'verifies that the user is a User' do
- expect { described_class.new(double, 1) }.to raise_error(ArgumentError)
- expect { described_class.new(double, build(:group)) }.to raise_error(ArgumentError)
- expect { described_class.new(double, build(:user)) }.not_to raise_error
- expect { described_class.new(double, nil) }.not_to raise_error
- end
- end
-end
diff --git a/spec/policies/ci/bridge_policy_spec.rb b/spec/policies/ci/bridge_policy_spec.rb
new file mode 100644
index 00000000000..e598e2f7626
--- /dev/null
+++ b/spec/policies/ci/bridge_policy_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BridgePolicy do
+ let_it_be(:user, reload: true) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:downstream_project, reload: true) { create(:project, :repository) }
+ let_it_be(:pipeline, reload: true) { create(:ci_empty_pipeline, project: project) }
+ let_it_be(:bridge, reload: true) { create(:ci_bridge, pipeline: pipeline, downstream: downstream_project) }
+
+ let(:policy) do
+ described_class.new(user, bridge)
+ end
+
+ describe '#play_job' do
+ before do
+ fake_access = double('Gitlab::UserAccess')
+ expect(fake_access).to receive(:can_update_branch?).with('master').and_return(can_update_branch)
+ expect(Gitlab::UserAccess).to receive(:new).with(user, container: downstream_project).and_return(fake_access)
+ end
+
+ context 'when user can update the downstream branch' do
+ let(:can_update_branch) { true }
+
+ it 'allows' do
+ expect(policy).to be_allowed :play_job
+ end
+ end
+
+ context 'when user can not update the downstream branch' do
+ let(:can_update_branch) { false }
+
+ it 'does not allow' do
+ expect(policy).not_to be_allowed :play_job
+ end
+ end
+ end
+end
diff --git a/spec/policies/design_management/design_policy_spec.rb b/spec/policies/design_management/design_policy_spec.rb
index 5a74d979ef3..117279d1638 100644
--- a/spec/policies/design_management/design_policy_spec.rb
+++ b/spec/policies/design_management/design_policy_spec.rb
@@ -71,6 +71,11 @@ RSpec.describe DesignManagement::DesignPolicy do
end
end
+ shared_examples_for "read-only design abilities" do
+ it { is_expected.to be_allowed(*guest_design_abilities) }
+ it { is_expected.to be_disallowed(*developer_design_abilities) }
+ end
+
shared_examples_for "design abilities available for members" do
context "for owners" do
let(:current_user) { owner }
@@ -86,8 +91,7 @@ RSpec.describe DesignManagement::DesignPolicy do
end
context "when admin mode disabled" do
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
end
@@ -106,16 +110,10 @@ RSpec.describe DesignManagement::DesignPolicy do
context "for reporters" do
let(:current_user) { reporter }
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
end
- shared_examples_for "read-only design abilities" do
- it { is_expected.to be_allowed(:read_design) }
- it { is_expected.to be_disallowed(:create_design, :destroy_design) }
- end
-
context "when DesignManagement is not enabled" do
before do
enable_design_management(false)
@@ -135,15 +133,13 @@ RSpec.describe DesignManagement::DesignPolicy do
let_it_be(:project) { create(:project, :private) }
let(:current_user) { guest }
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
context "for anonymous users in public projects" do
let(:current_user) { nil }
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
context "when the issue is confidential" do
@@ -164,20 +160,6 @@ RSpec.describe DesignManagement::DesignPolicy do
end
end
- context "when the issue is locked" do
- let_it_be(:issue) { create(:issue, :locked, project: project) }
- let(:current_user) { owner }
-
- it_behaves_like "read-only design abilities"
- end
-
- context "when the issue has moved" do
- let_it_be(:issue) { create(:issue, project: project, moved_to: create(:issue)) }
- let(:current_user) { owner }
-
- it_behaves_like "read-only design abilities"
- end
-
context "when the project is archived" do
let_it_be(:project) { create(:project, :public, :archived) }
let_it_be(:issue) { create(:issue, project: project) }
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 6cd1c201c62..2f9376f9b0a 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -130,6 +130,24 @@ RSpec.describe GlobalPolicy do
end
end
+ describe 'approving users' do
+ context 'regular user' do
+ it { is_expected.not_to be_allowed(:approve_user) }
+ end
+
+ context 'admin' do
+ let(:current_user) { create(:admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:approve_user) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(:approve_user) }
+ end
+ end
+ end
+
describe 'using project statistics filters' do
context 'regular user' do
it { is_expected.not_to be_allowed(:use_project_statistics_filters) }
@@ -187,6 +205,14 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:access_api) }
end
+ context 'user blocked pending approval' do
+ before do
+ current_user.block_pending_approval
+ end
+
+ it { is_expected.not_to be_allowed(:access_api) }
+ end
+
context 'when terms are enforced' do
before do
enforce_terms
@@ -229,12 +255,6 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:access_api) }
end
-
- it 'when `inactive_policy_condition` feature flag is turned off' do
- stub_feature_flags(inactive_policy_condition: false)
-
- is_expected.to be_allowed(:access_api)
- end
end
end
@@ -282,6 +302,14 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:receive_notifications) }
end
+
+ context 'user blocked pending approval' do
+ before do
+ current_user.block_pending_approval
+ end
+
+ it { is_expected.not_to be_allowed(:receive_notifications) }
+ end
end
describe 'git access' do
@@ -321,12 +349,6 @@ RSpec.describe GlobalPolicy do
end
it { is_expected.not_to be_allowed(:access_git) }
-
- it 'when `inactive_policy_condition` feature flag is turned off' do
- stub_feature_flags(inactive_policy_condition: false)
-
- is_expected.to be_allowed(:access_git)
- end
end
context 'when terms are enforced' do
@@ -356,6 +378,14 @@ RSpec.describe GlobalPolicy do
it { is_expected.to be_allowed(:access_git) }
end
+
+ context 'user blocked pending approval' do
+ before do
+ current_user.block_pending_approval
+ end
+
+ it { is_expected.not_to be_allowed(:access_git) }
+ end
end
describe 'read instance metadata' do
@@ -403,12 +433,6 @@ RSpec.describe GlobalPolicy do
end
it { is_expected.not_to be_allowed(:use_slash_commands) }
-
- it 'when `inactive_policy_condition` feature flag is turned off' do
- stub_feature_flags(inactive_policy_condition: false)
-
- is_expected.to be_allowed(:use_slash_commands)
- end
end
context 'when access locked' do
@@ -430,6 +454,14 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:use_slash_commands) }
end
+
+ context 'user blocked pending approval' do
+ before do
+ current_user.block_pending_approval
+ end
+
+ it { is_expected.not_to be_allowed(:use_slash_commands) }
+ end
end
describe 'create_snippet' do
@@ -462,5 +494,13 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:log_in) }
end
+
+ context 'user blocked pending approval' do
+ before do
+ current_user.block_pending_approval
+ end
+
+ it { is_expected.not_to be_allowed(:log_in) }
+ end
end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index dbe444acb58..fecf5f3e4f8 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -812,4 +812,74 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_disallowed(:create_jira_connect_subscription) }
end
end
+
+ describe 'read_package' do
+ context 'admin' do
+ let(:current_user) { admin }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:read_package) }
+ end
+
+ context 'with non member' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:read_package) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:read_package) }
+ end
+ end
+
+ context 'deploy token access' do
+ let!(:group_deploy_token) do
+ create(:group_deploy_token, group: group, deploy_token: deploy_token)
+ end
+
+ subject { described_class.new(deploy_token, group) }
+
+ context 'a deploy token with read_package_registry scope' do
+ let(:deploy_token) { create(:deploy_token, :group, read_package_registry: true) }
+
+ it { is_expected.to be_allowed(:read_package) }
+ it { is_expected.to be_allowed(:read_group) }
+ it { is_expected.to be_disallowed(:create_package) }
+ end
+
+ context 'a deploy token with write_package_registry scope' do
+ let(:deploy_token) { create(:deploy_token, :group, write_package_registry: true) }
+
+ it { is_expected.to be_allowed(:create_package) }
+ it { is_expected.to be_allowed(:read_group) }
+ it { is_expected.to be_disallowed(:destroy_package) }
+ end
+ end
+
+ it_behaves_like 'Self-managed Core resource access tokens'
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 0c457148b4d..d66ef81efca 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -941,4 +941,6 @@ RSpec.describe ProjectPolicy do
end
end
end
+
+ it_behaves_like 'Self-managed Core resource access tokens'
end
diff --git a/spec/policies/terraform/state_policy_spec.rb b/spec/policies/terraform/state_policy_spec.rb
new file mode 100644
index 00000000000..82152920997
--- /dev/null
+++ b/spec/policies/terraform/state_policy_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::StatePolicy do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:terraform_state) { create(:terraform_state, project: project)}
+
+ subject { described_class.new(user, terraform_state) }
+
+ describe 'rules' do
+ context 'no access' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:read_terraform_state) }
+ it { is_expected.to be_disallowed(:admin_terraform_state) }
+ end
+
+ context 'developer' do
+ let(:user) { create(:user, developer_projects: [project]) }
+
+ it { is_expected.to be_allowed(:read_terraform_state) }
+ it { is_expected.to be_disallowed(:admin_terraform_state) }
+ end
+
+ context 'maintainer' do
+ let(:user) { create(:user, maintainer_projects: [project]) }
+
+ it { is_expected.to be_allowed(:read_terraform_state) }
+ it { is_expected.to be_allowed(:admin_terraform_state) }
+ end
+ end
+end
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index 18f79bc930c..5cb9d340e06 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -5,17 +5,20 @@ require 'spec_helper'
RSpec.describe Ci::PipelinePresenter do
include Gitlab::Routing
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :test_repo) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
let(:current_user) { user }
- let(:project) { create(:project, :test_repo) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
subject(:presenter) do
described_class.new(pipeline)
end
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
allow(presenter).to receive(:current_user) { current_user }
end
@@ -184,8 +187,8 @@ RSpec.describe Ci::PipelinePresenter do
describe '#all_related_merge_request_text' do
subject { presenter.all_related_merge_request_text }
- let(:mr_1) { create(:merge_request) }
- let(:mr_2) { create(:merge_request) }
+ let_it_be(:mr_1) { create(:merge_request) }
+ let_it_be(:mr_2) { create(:merge_request) }
context 'with zero related merge requests (branch pipeline)' do
it { is_expected.to eq('No related merge requests found.') }
@@ -242,7 +245,7 @@ RSpec.describe Ci::PipelinePresenter do
end
context 'permissions' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) }
+ let_it_be_with_refind(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) }
let(:pipeline) { merge_request.all_pipelines.take }
shared_examples 'private merge requests' do
diff --git a/spec/presenters/event_presenter_spec.rb b/spec/presenters/event_presenter_spec.rb
index 6798be21d28..5a67fd92c9d 100644
--- a/spec/presenters/event_presenter_spec.rb
+++ b/spec/presenters/event_presenter_spec.rb
@@ -38,4 +38,34 @@ RSpec.describe EventPresenter do
it { is_expected.to eq([project, target]) }
end
end
+
+ describe '#target_type_name' do
+ it 'returns design for a design event' do
+ expect(build(:design_event).present).to have_attributes(target_type_name: 'design')
+ end
+
+ it 'returns project for a project event' do
+ expect(build(:project_created_event).present).to have_attributes(target_type_name: 'project')
+ end
+
+ it 'returns milestone for a milestone event' do
+ expect(group_event.present).to have_attributes(target_type_name: 'milestone')
+ end
+ end
+
+ describe '#note_target_type_name' do
+ it 'returns design for an event on a comment on a design' do
+ expect(build(:event, :commented, :for_design).present)
+ .to have_attributes(note_target_type_name: 'design')
+ end
+
+ it 'returns nil for an event without a target' do
+ expect(build(:event).present).to have_attributes(note_target_type_name: be_nil)
+ end
+
+ it 'returns issue for an issue comment event' do
+ expect(build(:event, :commented, target: build(:note_on_issue)).present)
+ .to have_attributes(note_target_type_name: 'issue')
+ end
+ end
end
diff --git a/spec/presenters/label_presenter_spec.rb b/spec/presenters/label_presenter_spec.rb
index cb6e991bd8e..44c68a6102f 100644
--- a/spec/presenters/label_presenter_spec.rb
+++ b/spec/presenters/label_presenter_spec.rb
@@ -91,4 +91,18 @@ RSpec.describe LabelPresenter do
it { is_expected.to eq(label.project.name) }
end
end
+
+ describe '#subject_full_name' do
+ context 'with group label' do
+ subject { group_label.subject_full_name }
+
+ it { is_expected.to eq(group_label.group.full_name) }
+ end
+
+ context 'with project label' do
+ subject { label.subject_full_name }
+
+ it { is_expected.to eq(label.project.full_name) }
+ end
+ end
end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index f1e581efd44..76b77ee0de2 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe MergeRequestPresenter do
- let(:resource) { create(:merge_request, source_project: project) }
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:resource) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user) }
describe '#ci_status' do
subject { described_class.new(resource).ci_status }
@@ -73,8 +73,6 @@ RSpec.describe MergeRequestPresenter do
end
describe '#conflict_resolution_path' do
- let(:project) { create :project }
- let(:user) { create :user }
let(:presenter) { described_class.new(resource, current_user: user) }
let(:path) { presenter.conflict_resolution_path }
@@ -107,18 +105,21 @@ RSpec.describe MergeRequestPresenter do
end
context 'issues links' do
- let(:project) { create(:project, :private, :repository, creator: user, namespace: user.namespace) }
- let(:issue_a) { create(:issue, project: project) }
- let(:issue_b) { create(:issue, project: project) }
+ let_it_be(:project) { create(:project, :private, :repository, creator: user, namespace: user.namespace) }
+ let_it_be(:issue_a) { create(:issue, project: project) }
+ let_it_be(:issue_b) { create(:issue, project: project) }
- let(:resource) do
+ let_it_be(:resource) do
create(:merge_request,
source_project: project, target_project: project,
description: "Fixes #{issue_a.to_reference} Related #{issue_b.to_reference}")
end
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
allow(resource.project).to receive(:default_branch)
.and_return(resource.target_branch)
resource.cache_merge_request_closes_issues!
diff --git a/spec/presenters/packages/detail/package_presenter_spec.rb b/spec/presenters/packages/detail/package_presenter_spec.rb
index 3a13aca6c7a..8ece27e9b5f 100644
--- a/spec/presenters/packages/detail/package_presenter_spec.rb
+++ b/spec/presenters/packages/detail/package_presenter_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
context 'with conan metadata' do
let(:package) { create(:conan_package, project: project) }
- let(:expected_package_details) { super().merge(conan_metadatum: package.conan_metadatum) }
+ let(:expected_package_details) { super().merge(conan_metadatum: package.conan_metadatum, conan_package_name: package.name, name: package.conan_recipe) }
it 'returns conan_metadatum' do
expect(presenter.detail_view).to eq expected_package_details
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 4b4d8ee85db..b7fee5253f8 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -38,14 +38,33 @@ RSpec.describe ProjectPresenter do
context 'when repository is empty' do
let_it_be(:project) { create(:project_empty_repo, :public) }
- it 'returns activity if user has repository access' do
+ it 'returns wiki if user has repository access and can read wiki, which exists' do
+ allow(project).to receive(:wiki_repository_exists?).and_return(true)
allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true)
+ allow(presenter).to receive(:can?).with(nil, :read_wiki, project).and_return(true)
+ allow(presenter).to receive(:can?).with(nil, :read_issue, project).and_return(false)
+
+ expect(presenter.default_view).to eq('wiki')
+ end
+
+ it 'returns activity if user has repository access and can read wiki, which does not exist' do
+ allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(true)
+ allow(presenter).to receive(:can?).with(nil, :read_wiki, project).and_return(true)
+ allow(presenter).to receive(:can?).with(nil, :read_issue, project).and_return(false)
expect(presenter.default_view).to eq('activity')
end
- it 'returns activity if user does not have repository access' do
- allow(project).to receive(:can?).with(nil, :download_code, project).and_return(false)
+ it 'returns issues if user does not have repository access, but can read issues' do
+ allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(false)
+ allow(presenter).to receive(:can?).with(nil, :read_issue, project).and_call_original
+
+ expect(presenter.default_view).to eq('projects/issues/issues')
+ end
+
+ it 'returns activity if user can read neither wiki nor issues' do
+ allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(false)
+ allow(presenter).to receive(:can?).with(nil, :read_issue, project).and_return(false)
expect(presenter.default_view).to eq('activity')
end
@@ -61,8 +80,18 @@ RSpec.describe ProjectPresenter do
expect(presenter.default_view).to eq('files')
end
- it 'returns activity if user does not have repository access' do
+ it 'returns wiki if user does not have repository access and can read wiki, which exists' do
+ allow(project).to receive(:wiki_repository_exists?).and_return(true)
allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(false)
+ allow(presenter).to receive(:can?).with(nil, :read_wiki, project).and_return(true)
+
+ expect(presenter.default_view).to eq('wiki')
+ end
+
+ it 'returns activity if user does not have repository or wiki access' do
+ allow(presenter).to receive(:can?).with(nil, :download_code, project).and_return(false)
+ allow(presenter).to receive(:can?).with(nil, :read_issue, project).and_return(false)
+ allow(presenter).to receive(:can?).with(nil, :read_wiki, project).and_return(false)
expect(presenter.default_view).to eq('activity')
end
@@ -96,22 +125,25 @@ RSpec.describe ProjectPresenter do
allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(false)
end
- it 'returns wiki if the user has the right policy' do
+ it 'returns wiki if the user has the right policy and the wiki exists' do
+ allow(project).to receive(:wiki_repository_exists?).and_return(true)
allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(true)
expect(presenter.default_view).to eq('wiki')
end
- it 'returns customize_workflow if the user does not have the right policy' do
+ it 'returns activity if the user does not have the right policy' do
allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(false)
+ allow(presenter).to receive(:can?).with(user, :read_issue, project).and_return(false)
- expect(presenter.default_view).to eq('customize_workflow')
+ expect(presenter.default_view).to eq('activity')
end
end
context 'with issues as a feature available' do
it 'return issues' do
allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(false)
+ allow(presenter).to receive(:can?).with(user, :read_issue, project).and_return(true)
allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(false)
expect(presenter.default_view).to eq('projects/issues/issues')
@@ -119,12 +151,13 @@ RSpec.describe ProjectPresenter do
end
context 'with no activity, no wikies and no issues' do
- it 'returns customize_workflow as default' do
+ it 'returns activity as default' do
project.project_feature.update_attribute(:issues_access_level, 0)
allow(presenter).to receive(:can?).with(user, :download_code, project).and_return(false)
allow(presenter).to receive(:can?).with(user, :read_wiki, project).and_return(false)
+ allow(presenter).to receive(:can?).with(user, :read_issue, project).and_return(false)
- expect(presenter.default_view).to eq('customize_workflow')
+ expect(presenter.default_view).to eq('activity')
end
end
end
diff --git a/spec/presenters/projects/prometheus/alert_presenter_spec.rb b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
deleted file mode 100644
index 98dba28829e..00000000000
--- a/spec/presenters/projects/prometheus/alert_presenter_spec.rb
+++ /dev/null
@@ -1,346 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Prometheus::AlertPresenter do
- include Gitlab::Routing.url_helpers
-
- let_it_be(:project, reload: true) { create(:project) }
-
- let(:presenter) { described_class.new(alert) }
- let(:payload) { {} }
- let(:alert) { create(:alerting_alert, project: project, payload: payload) }
-
- shared_context 'gitlab alert' do
- let(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let(:metric_id) { gitlab_alert.prometheus_metric_id }
-
- let(:alert) do
- create(:alerting_alert, project: project, metric_id: metric_id, payload: payload)
- end
- end
-
- describe '#project_full_path' do
- subject { presenter.project_full_path }
-
- it { is_expected.to eq(project.full_path) }
- end
-
- describe '#start_time' do
- subject { presenter.start_time }
-
- let(:starts_at) { '2020-10-31T14:02:04Z' }
-
- before do
- payload['startsAt'] = starts_at
- end
-
- context 'with valid utc datetime' do
- it { is_expected.to eq('31 October 2020, 2:02PM (UTC)') }
-
- context 'with admin time zone not UTC' do
- before do
- allow(Time).to receive(:zone).and_return(ActiveSupport::TimeZone.new('Perth'))
- end
-
- it { is_expected.to eq('31 October 2020, 2:02PM (UTC)') }
- end
- end
-
- context 'with invalid datetime' do
- let(:starts_at) { 'invalid' }
-
- it { is_expected.to be_nil }
- end
- end
-
- describe '#issue_summary_markdown' do
- let(:markdown_line_break) { ' ' }
-
- subject { presenter.issue_summary_markdown }
-
- context 'without default payload' do
- it do
- is_expected.to eq(
- <<~MARKDOWN.chomp
- **Start time:** #{presenter.start_time}
-
- MARKDOWN
- )
- end
- end
-
- context 'with optional attributes' do
- before do
- payload['annotations'] = {
- 'title' => 'Alert Title',
- 'foo' => 'value1',
- 'bar' => 'value2',
- 'description' => 'Alert Description',
- 'monitoring_tool' => 'monitoring_tool_name',
- 'service' => 'service_name',
- 'hosts' => ['http://localhost:3000', 'http://localhost:3001']
- }
- payload['generatorURL'] = 'http://host?g0.expr=query'
- end
-
- it do
- is_expected.to eq(
- <<~MARKDOWN.chomp
- **Start time:** #{presenter.start_time}#{markdown_line_break}
- **full_query:** `query`#{markdown_line_break}
- **Service:** service_name#{markdown_line_break}
- **Monitoring tool:** monitoring_tool_name#{markdown_line_break}
- **Hosts:** http://localhost:3000 http://localhost:3001
-
- MARKDOWN
- )
- end
- end
-
- context 'when hosts is a string' do
- before do
- payload['annotations'] = { 'hosts' => 'http://localhost:3000' }
- end
-
- it do
- is_expected.to eq(
- <<~MARKDOWN.chomp
- **Start time:** #{presenter.start_time}#{markdown_line_break}
- **Hosts:** http://localhost:3000
-
- MARKDOWN
- )
- end
- end
-
- context 'with embedded metrics' do
- let(:starts_at) { '2018-03-12T09:06:00Z' }
-
- shared_examples_for 'markdown with metrics embed' do
- let(:embed_regex) { /\n\[\]\(#{Regexp.quote(presenter.metrics_dashboard_url)}\)\z/ }
-
- context 'without a starting time available' do
- around do |example|
- Timecop.freeze(starts_at) { example.run }
- end
-
- before do
- payload.delete('startsAt')
- end
-
- it { is_expected.to match(embed_regex) }
- end
-
- context 'with a starting time available' do
- it { is_expected.to match(embed_regex) }
- end
- end
-
- context 'for gitlab-managed prometheus alerts' do
- include_context 'gitlab-managed prometheus alert attributes'
-
- let(:alert) do
- create(:alerting_alert, project: project, metric_id: prometheus_metric_id, payload: payload)
- end
-
- it_behaves_like 'markdown with metrics embed'
- end
-
- context 'for alerts from a self-managed prometheus' do
- include_context 'self-managed prometheus alert attributes'
-
- it_behaves_like 'markdown with metrics embed'
-
- context 'without y_label' do
- let(:y_label) { title }
-
- before do
- payload['annotations'].delete('gitlab_y_label')
- end
-
- it_behaves_like 'markdown with metrics embed'
- end
-
- context 'when not enough information is present for an embed' do
- shared_examples_for 'does not include an embed' do
- it { is_expected.not_to match(/\[\]\(.+\)/) }
- end
-
- context 'without title' do
- before do
- payload['annotations'].delete('title')
- end
-
- it_behaves_like 'does not include an embed'
- end
-
- context 'without environment' do
- before do
- payload['labels'].delete('gitlab_environment_name')
- end
-
- it_behaves_like 'does not include an embed'
- end
-
- context 'without full_query' do
- before do
- payload.delete('generatorURL')
- end
-
- it_behaves_like 'does not include an embed'
- end
- end
- end
- end
- end
-
- describe '#show_performance_dashboard_link?' do
- subject { presenter.show_performance_dashboard_link? }
-
- it { is_expected.to be_falsey }
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- it { is_expected.to eq(true) }
- end
- end
-
- describe '#show_incident_issues_link?' do
- subject { presenter.show_incident_issues_link? }
-
- it { is_expected.to be_falsey }
-
- context 'create issue setting enabled' do
- before do
- create(:project_incident_management_setting, project: project, create_issue: true)
- end
-
- it { is_expected.to eq(true) }
- end
- end
-
- describe '#details_url' do
- subject { presenter.details_url }
-
- it { is_expected.to eq(nil) }
-
- context 'alert management alert present' do
- let_it_be(:am_alert) { create(:alert_management_alert, project: project) }
- let(:alert) { create(:alerting_alert, project: project, payload: payload, am_alert: am_alert) }
-
- it { is_expected.to eq("http://localhost/#{project.full_path}/-/alert_management/#{am_alert.iid}/details") }
- end
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- describe '#full_title' do
- let(:query_title) do
- "#{gitlab_alert.title} #{gitlab_alert.computed_operator} #{gitlab_alert.threshold} for 5 minutes"
- end
-
- let(:expected_subject) do
- "#{alert.environment.name}: #{query_title}"
- end
-
- subject { presenter.full_title }
-
- it { is_expected.to eq(expected_subject) }
- end
-
- describe '#metric_query' do
- subject { presenter.metric_query }
-
- it { is_expected.to eq(gitlab_alert.full_query) }
- end
-
- describe '#environment_name' do
- subject { presenter.environment_name }
-
- it { is_expected.to eq(alert.environment.name) }
- end
-
- describe '#performance_dashboard_link' do
- let(:expected_link) { metrics_project_environment_url(project, alert.environment) }
-
- subject { presenter.performance_dashboard_link }
-
- it { is_expected.to eq(expected_link) }
- end
-
- describe '#incident_issues_link' do
- let(:expected_link) { project_issues_url(project, label_name: described_class::INCIDENT_LABEL_NAME) }
-
- subject { presenter.incident_issues_link }
-
- it { is_expected.to eq(expected_link) }
- end
- end
-
- context 'without gitlab alert' do
- describe '#full_title' do
- subject { presenter.full_title }
-
- context 'with title' do
- let(:title) { 'some title' }
-
- before do
- expect(alert).to receive(:title).and_return(title)
- end
-
- it { is_expected.to eq(title) }
- end
-
- context 'without title' do
- it { is_expected.to eq('') }
- end
- end
-
- describe '#metric_query' do
- subject { presenter.metric_query }
-
- it { is_expected.to be_nil }
- end
-
- describe '#environment_name' do
- subject { presenter.environment_name }
-
- it { is_expected.to be_nil }
- end
-
- describe '#performance_dashboard_link' do
- let(:expected_link) { metrics_project_environments_url(project) }
-
- subject { presenter.performance_dashboard_link }
-
- it { is_expected.to eq(expected_link) }
- end
- end
-
- describe '#metrics_dashboard_url' do
- subject { presenter.metrics_dashboard_url }
-
- context 'for a non-prometheus alert' do
- it { is_expected.to be_nil }
- end
-
- context 'for a self-managed prometheus alert' do
- include_context 'self-managed prometheus alert attributes'
-
- let(:prometheus_payload) { payload }
-
- it { is_expected.to eq(dashboard_url_for_alert) }
- end
-
- context 'for a gitlab-managed prometheus alert' do
- include_context 'gitlab-managed prometheus alert attributes'
-
- let(:prometheus_payload) { payload }
-
- it { is_expected.to eq(dashboard_url_for_alert) }
- end
- end
-end
diff --git a/spec/presenters/release_presenter_spec.rb b/spec/presenters/release_presenter_spec.rb
index 5577b3ad2e8..eb4d755205b 100644
--- a/spec/presenters/release_presenter_spec.rb
+++ b/spec/presenters/release_presenter_spec.rb
@@ -57,14 +57,6 @@ RSpec.describe ReleasePresenter do
it 'returns its own url' do
is_expected.to match /#{project_release_url(project, release)}/
end
-
- context 'when release_show_page feature flag is disabled' do
- before do
- stub_feature_flags(release_show_page: false)
- end
-
- it { is_expected.to be_nil }
- end
end
describe '#merge_requests_url' do
diff --git a/spec/presenters/sentry_error_presenter_spec.rb b/spec/presenters/sentry_error_presenter_spec.rb
index af9e7c8a2b2..86e43be1fa7 100644
--- a/spec/presenters/sentry_error_presenter_spec.rb
+++ b/spec/presenters/sentry_error_presenter_spec.rb
@@ -26,4 +26,12 @@ RSpec.describe SentryErrorPresenter do
expect(count).to eq error.frequency.first[1]
end
end
+
+ describe '#project_id' do
+ subject { presenter.project_id }
+
+ it 'returns a global ID of the correct type' do
+ expect(subject).to eq(Gitlab::GlobalId.build(model_name: 'SentryProject', id: error.project_id).to_s)
+ end
+ end
end
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 915f43fe572..83fe37effc0 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -3,70 +3,75 @@
require 'spec_helper'
RSpec.describe SnippetBlobPresenter do
+ let_it_be(:snippet) { create(:personal_snippet, :repository) }
+
+ let(:branch) { snippet.default_branch }
+ let(:blob) { snippet.blobs.first }
+
describe '#rich_data' do
+ let(:data_endpoint_url) { "/-/snippets/#{snippet.id}/raw/#{branch}/#{file}" }
+
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:current_user).and_return(nil)
end
+
+ blob.name = File.basename(file)
+ blob.path = file
end
- subject { described_class.new(snippet.blob).rich_data }
+ subject { described_class.new(blob).rich_data }
context 'with PersonalSnippet' do
- let(:snippet) { create(:personal_snippet, :repository) }
-
context 'when blob is binary' do
- it 'returns the HTML associated with the binary' do
- allow(snippet).to receive(:blob).and_return(snippet.repository.blob_at('master', 'files/images/logo-black.png'))
+ let(:file) { 'files/images/logo-black.png' }
+ let(:blob) { blob_at(file) }
+ it 'returns the HTML associated with the binary' do
expect(subject).to include('file-content image_file')
end
end
context 'with markdown format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test.md', content: '*foo*') }
+ let(:file) { 'README.md' }
+ let(:blob) { blob_at(file) }
it 'returns rich markdown content' do
- expected = <<~HTML
- <div class="file-content md">
- <p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>
- </div>
- HTML
-
- expect(subject).to eq(expected)
+ expect(subject).to include('file-content md')
end
end
context 'with notebook format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
+ let(:file) { 'test.ipynb' }
it 'returns rich notebook content' do
- expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/-/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
+ expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="#{data_endpoint_url}" id="js-notebook-viewer"></div>)
end
end
context 'with openapi format' do
- let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
+ let(:file) { 'openapi.yml' }
it 'returns rich openapi content' do
- expect(subject).to eq %Q(<div class="file-content" data-endpoint="/-/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
+ expect(subject).to eq %Q(<div class="file-content" data-endpoint="#{data_endpoint_url}" id="js-openapi-viewer"></div>\n)
end
end
context 'with svg format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test.svg') }
+ let(:file) { 'files/images/wm.svg' }
+ let(:blob) { blob_at(file) }
it 'returns rich svg content' do
result = Nokogiri::HTML::DocumentFragment.parse(subject)
image_tag = result.search('img').first
- expect(image_tag.attr('src')).to include("data:#{snippet.blob.mime_type};base64")
- expect(image_tag.attr('alt')).to eq('test.svg')
+ expect(image_tag.attr('src')).to include("data:#{blob.mime_type};base64")
+ expect(image_tag.attr('alt')).to eq(File.basename(file))
end
end
context 'with other format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test') }
+ let(:file) { 'test' }
it 'does not return no rich content' do
expect(subject).to be_nil
@@ -76,36 +81,41 @@ RSpec.describe SnippetBlobPresenter do
end
describe '#plain_data' do
- let(:snippet) { build(:personal_snippet) }
+ let(:blob) { blob_at(file) }
- subject { described_class.new(snippet.blob).plain_data }
+ subject { described_class.new(blob).plain_data }
- it 'returns nil when the snippet blob is binary' do
- allow(snippet.blob).to receive(:binary?).and_return(true)
+ context 'when blob is binary' do
+ let(:file) { 'files/images/logo-black.png' }
- expect(subject).to be_nil
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
end
- it 'returns plain content when snippet file is markup' do
- snippet.file_name = 'test.md'
- snippet.content = '*foo*'
+ context 'when blob is markup' do
+ let(:file) { 'README.md' }
- expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
+ it 'returns plain content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
+ end
end
- it 'returns highlighted syntax content' do
- snippet.file_name = 'test.rb'
- snippet.content = 'class Foo;end'
+ context 'when blob has syntax' do
+ let(:file) { 'files/ruby/regex.rb' }
- expect(subject)
- .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
+ it 'returns highlighted syntax content' do
+ expect(subject)
+ .to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
+ end
end
- it 'returns plain text highlighted content' do
- snippet.file_name = 'test'
- snippet.content = 'foo'
+ context 'when blob has plain data' do
+ let(:file) { 'LICENSE' }
- expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
+ it 'returns plain text highlighted content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
+ end
end
end
@@ -115,40 +125,42 @@ RSpec.describe SnippetBlobPresenter do
let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project, author: user) }
+ let(:blob) { snippet.blobs.first }
+
before do
project.add_developer(user)
end
describe '#raw_path' do
- subject { described_class.new(snippet.blobs.first, current_user: user).raw_path }
+ subject { described_class.new(blob, current_user: user).raw_path }
it_behaves_like 'snippet blob raw path'
- context 'with snippet_multiple_files feature disabled' do
- before do
- stub_feature_flags(snippet_multiple_files: false)
- end
+ context 'with a snippet without a repository' do
+ let(:personal_snippet) { build(:personal_snippet, author: user, id: 1) }
+ let(:project_snippet) { build(:project_snippet, project: project, author: user, id: 1) }
+ let(:blob) { snippet.blob }
context 'with ProjectSnippet' do
let(:snippet) { project_snippet }
- it 'returns the raw path' do
- expect(subject).to eq "/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw"
+ it 'returns the raw project snippet path' do
+ expect(subject).to eq("/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}/raw")
end
end
context 'with PersonalSnippet' do
let(:snippet) { personal_snippet }
- it 'returns the raw path' do
- expect(subject).to eq "/-/snippets/#{snippet.id}/raw"
+ it 'returns the raw personal snippet path' do
+ expect(subject).to eq("/-/snippets/#{personal_snippet.id}/raw")
end
end
end
end
describe '#raw_url' do
- subject { described_class.new(snippet.blobs.first, current_user: user).raw_url }
+ subject { described_class.new(blob, current_user: user).raw_url }
before do
stub_default_url_options(host: 'test.host')
@@ -156,10 +168,10 @@ RSpec.describe SnippetBlobPresenter do
it_behaves_like 'snippet blob raw url'
- context 'with snippet_multiple_files feature disabled' do
- before do
- stub_feature_flags(snippet_multiple_files: false)
- end
+ context 'with a snippet without a repository' do
+ let(:personal_snippet) { build(:personal_snippet, author: user, id: 1) }
+ let(:project_snippet) { build(:project_snippet, project: project, author: user, id: 1) }
+ let(:blob) { snippet.blob }
context 'with ProjectSnippet' do
let(:snippet) { project_snippet }
@@ -179,4 +191,8 @@ RSpec.describe SnippetBlobPresenter do
end
end
end
+
+ def blob_at(path)
+ snippet.repository.blob_at(branch, path)
+ end
end
diff --git a/spec/presenters/snippet_presenter_spec.rb b/spec/presenters/snippet_presenter_spec.rb
index 681564ed2b0..66c6ba8fa0e 100644
--- a/spec/presenters/snippet_presenter_spec.rb
+++ b/spec/presenters/snippet_presenter_spec.rb
@@ -163,25 +163,4 @@ RSpec.describe SnippetPresenter do
end
end
end
-
- describe '#blobs' do
- let(:snippet) { personal_snippet }
-
- subject { presenter.blobs }
-
- context 'when snippet does not have a repository' do
- it 'returns an array with one SnippetBlob' do
- expect(subject.size).to eq(1)
- expect(subject.first).to eq(snippet.blob)
- end
- end
-
- context 'when snippet has a repository' do
- let(:snippet) { create(:snippet, :repository, author: user) }
-
- it 'returns an array with all repository blobs' do
- expect(subject).to match_array(snippet.blobs)
- end
- end
- end
end
diff --git a/spec/requests/api/admin/instance_clusters_spec.rb b/spec/requests/api/admin/instance_clusters_spec.rb
index b68541b5d92..9d0661089a9 100644
--- a/spec/requests/api/admin/instance_clusters_spec.rb
+++ b/spec/requests/api/admin/instance_clusters_spec.rb
@@ -162,6 +162,7 @@ RSpec.describe ::API::Admin::InstanceClusters do
name: 'test-instance-cluster',
domain: 'domain.example.com',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: platform_kubernetes_attributes,
clusterable: clusterable
}
@@ -206,6 +207,7 @@ RSpec.describe ::API::Admin::InstanceClusters do
expect(cluster_result.enabled).to eq(true)
expect(platform_kubernetes.authorization_type).to eq('rbac')
expect(cluster_result.managed).to be_falsy
+ expect(cluster_result.namespace_per_environment).to eq(false)
expect(platform_kubernetes.api_url).to eq("https://example.com")
expect(platform_kubernetes.token).to eq('sample-token')
end
@@ -235,6 +237,22 @@ RSpec.describe ::API::Admin::InstanceClusters do
end
end
+ context 'when namespace_per_environment is not set' do
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'defaults to true' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result).to be_namespace_per_environment
+ end
+ end
+
context 'when an instance cluster already exists' do
it 'allows user to add multiple clusters' do
post api('/admin/clusters/add', admin_user), params: multiple_cluster_params
diff --git a/spec/requests/api/api_guard/response_coercer_middleware_spec.rb b/spec/requests/api/api_guard/response_coercer_middleware_spec.rb
new file mode 100644
index 00000000000..6f3f97fe846
--- /dev/null
+++ b/spec/requests/api/api_guard/response_coercer_middleware_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::APIGuard::ResponseCoercerMiddleware do
+ using RSpec::Parameterized::TableSyntax
+
+ it 'is loaded' do
+ expect(API::API.middleware).to include([:use, described_class])
+ end
+
+ describe '#call' do
+ let(:app) do
+ Class.new(API::API)
+ end
+
+ [
+ nil, 201, 10.5, "test"
+ ].each do |val|
+ it 'returns a String body' do
+ app.get 'bodytest' do
+ status 200
+ env['api.format'] = :binary
+ body val
+ end
+
+ unless val.is_a?(String)
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(instance_of(ArgumentError))
+ end
+
+ get api('/bodytest')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(val.to_s)
+ end
+ end
+
+ [100, 204, 304].each do |status|
+ it 'allows nil body' do
+ app.get 'statustest' do
+ status status
+ env['api.format'] = :binary
+ body nil
+ end
+
+ expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
+
+ get api('/statustest')
+
+ expect(response.status).to eq(status)
+ expect(response.body).to eq('')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 97110b63ff6..71be0c30f5a 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -227,10 +227,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'authorize uploading of an lsif artifact' do
- before do
- stub_feature_flags(code_navigation: job.project)
- end
-
it 'adds ProcessLsif header' do
authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
@@ -249,32 +245,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
.to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(tracking_params) }
.by(1)
end
-
- context 'code_navigation feature flag is disabled' do
- before do
- stub_feature_flags(code_navigation: false)
- end
-
- it 'responds with a forbidden error' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['ProcessLsif']).to be_falsy
- end
- end
-
- it 'does not track code_intelligence usage ping' do
- tracking_params = {
- event_names: 'i_source_code_code_intelligence',
- start_date: Date.yesterday,
- end_date: Date.today
- }
-
- expect { authorize_artifacts_with_token_in_headers(artifact_type: :lsif) }
- .not_to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(tracking_params) }
- end
- end
end
def authorize_artifacts(params = {}, request_headers = headers)
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
index 183a3b26e00..cbefaa2c321 100644
--- a/spec/requests/api/ci/runner/jobs_put_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -46,64 +46,59 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'when status is given' do
- it 'mark job as succeeded' do
+ it 'marks job as succeeded' do
update_job(state: 'success')
- job.reload
- expect(job).to be_success
+ expect(job.reload).to be_success
+ expect(response.header).not_to have_key('X-GitLab-Trace-Update-Interval')
end
- it 'mark job as failed' do
+ it 'marks job as failed' do
update_job(state: 'failed')
- job.reload
- expect(job).to be_failed
+ expect(job.reload).to be_failed
expect(job).to be_unknown_failure
+ expect(response.header).not_to have_key('X-GitLab-Trace-Update-Interval')
end
context 'when failure_reason is script_failure' do
before do
update_job(state: 'failed', failure_reason: 'script_failure')
- job.reload
end
- it { expect(job).to be_script_failure }
+ it { expect(job.reload).to be_script_failure }
end
context 'when failure_reason is runner_system_failure' do
before do
update_job(state: 'failed', failure_reason: 'runner_system_failure')
- job.reload
end
- it { expect(job).to be_runner_system_failure }
+ it { expect(job.reload).to be_runner_system_failure }
end
context 'when failure_reason is unrecognized value' do
before do
update_job(state: 'failed', failure_reason: 'what_is_this')
- job.reload
end
- it { expect(job).to be_unknown_failure }
+ it { expect(job.reload).to be_unknown_failure }
end
context 'when failure_reason is job_execution_timeout' do
before do
update_job(state: 'failed', failure_reason: 'job_execution_timeout')
- job.reload
end
- it { expect(job).to be_job_execution_timeout }
+ it { expect(job.reload).to be_job_execution_timeout }
end
context 'when failure_reason is unmet_prerequisites' do
before do
update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
- job.reload
end
- it { expect(job).to be_unmet_prerequisites }
+ it { expect(job.reload).to be_unmet_prerequisites }
end
context 'when unmigrated live trace chunks exist' do
@@ -119,24 +114,21 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(job.pending_state).to be_present
expect(response).to have_gitlab_http_status(:accepted)
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to be > 0
end
end
context 'when runner retries request after receiving 202' do
it 'responds with 202 and then with 200', :sidekiq_inline do
- perform_enqueued_jobs do
- update_job(state: 'success', checksum: 'crc32:12345678')
- end
+ update_job(state: 'success', checksum: 'crc32:12345678')
- expect(job.reload.pending_state).to be_present
expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.pending_state).to be_present
- perform_enqueued_jobs do
- update_job(state: 'success', checksum: 'crc32:12345678')
- end
+ update_job(state: 'success', checksum: 'crc32:12345678')
- expect(job.reload.pending_state).not_to be_present
expect(response).to have_gitlab_http_status(:ok)
+ expect(job.reload.pending_state).not_to be_present
end
end
@@ -149,8 +141,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
update_job(state: 'success', checksum: 'crc:12345678')
expect(job.reload).to be_success
- expect(job.pending_state).not_to be_present
+ expect(job.pending_state).to be_present
expect(response).to have_gitlab_http_status(:ok)
+ expect(response.header).not_to have_key('X-GitLab-Trace-Update-Interval')
end
end
end
@@ -248,7 +241,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
def update_job_after_time(update_interval = 20.minutes, state = 'running')
- Timecop.travel(job.updated_at + update_interval) do
+ travel_to(job.updated_at + update_interval) do
update_job(job.token, state: state)
end
end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 4fa95f8ebb2..2dc92417892 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -194,7 +194,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
[{ 'key' => 'cache_key',
'untracked' => false,
'paths' => ['vendor/*'],
- 'policy' => 'pull-push' }]
+ 'policy' => 'pull-push',
+ 'when' => 'on_success' }]
end
let(:expected_features) { { 'trace_sections' => true } }
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index d34244771ad..d455ed9c194 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -36,13 +36,9 @@ RSpec.describe API::Commits do
end
it 'include correct pagination headers' do
- commit_count = project.repository.count_commits(ref: 'master').to_s
-
get api(route, current_user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
- expect(response.headers['X-Page']).to eql('1')
+ expect(response).to include_limited_pagination_headers
end
end
@@ -79,12 +75,10 @@ RSpec.describe API::Commits do
it 'include correct pagination headers' do
commits = project.repository.commits("master", limit: 2)
after = commits.second.created_at
- commit_count = project.repository.count_commits(ref: 'master', after: after).to_s
get api("/projects/#{project_id}/repository/commits?since=#{after.utc.iso8601}", user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
expect(response.headers['X-Page']).to eql('1')
end
end
@@ -109,12 +103,10 @@ RSpec.describe API::Commits do
it 'include correct pagination headers' do
commits = project.repository.commits("master", limit: 2)
before = commits.second.created_at
- commit_count = project.repository.count_commits(ref: 'master', before: before).to_s
get api("/projects/#{project_id}/repository/commits?until=#{before.utc.iso8601}", user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
expect(response.headers['X-Page']).to eql('1')
end
end
@@ -137,49 +129,49 @@ RSpec.describe API::Commits do
context "path optional parameter" do
it "returns project commits matching provided path parameter" do
path = 'files/ruby/popen.rb'
- commit_count = project.repository.count_commits(ref: 'master', path: path).to_s
get api("/projects/#{project_id}/repository/commits?path=#{path}", user)
expect(json_response.size).to eq(3)
expect(json_response.first["id"]).to eq("570e7b2abdd848b95f2f578043fc23bd6f6fd24d")
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
end
it 'include correct pagination headers' do
path = 'files/ruby/popen.rb'
- commit_count = project.repository.count_commits(ref: 'master', path: path).to_s
get api("/projects/#{project_id}/repository/commits?path=#{path}", user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
expect(response.headers['X-Page']).to eql('1')
end
end
context 'all optional parameter' do
it 'returns all project commits' do
- commit_count = project.repository.count_commits(all: true)
+ expected_commit_ids = project.repository.commits(nil, all: true, limit: 50).map(&:id)
+
+ get api("/projects/#{project_id}/repository/commits?all=true&per_page=50", user)
- get api("/projects/#{project_id}/repository/commits?all=true", user)
+ commit_ids = json_response.map { |c| c['id'] }
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count.to_s)
+ expect(response).to include_limited_pagination_headers
+ expect(commit_ids).to eq(expected_commit_ids)
expect(response.headers['X-Page']).to eql('1')
end
end
context 'first_parent optional parameter' do
it 'returns all first_parent commits' do
- commit_count = project.repository.count_commits(ref: SeedRepo::Commit::ID, first_parent: true)
+ expected_commit_ids = project.repository.commits(SeedRepo::Commit::ID, limit: 50, first_parent: true).map(&:id)
- get api("/projects/#{project_id}/repository/commits", user), params: { ref_name: SeedRepo::Commit::ID, first_parent: 'true' }
+ get api("/projects/#{project_id}/repository/commits?per_page=50", user), params: { ref_name: SeedRepo::Commit::ID, first_parent: 'true' }
- expect(response).to include_pagination_headers
- expect(commit_count).to eq(12)
- expect(response.headers['X-Total']).to eq(commit_count.to_s)
+ commit_ids = json_response.map { |c| c['id'] }
+
+ expect(response).to include_limited_pagination_headers
+ expect(expected_commit_ids.size).to eq(12)
+ expect(commit_ids).to eq(expected_commit_ids)
end
end
@@ -209,11 +201,7 @@ RSpec.describe API::Commits do
end
it 'returns correct headers' do
- commit_count = project.repository.count_commits(ref: ref_name).to_s
-
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
- expect(response.headers['X-Page']).to eq('1')
+ expect(response).to include_limited_pagination_headers
expect(response.headers['Link']).to match(/page=1&per_page=5/)
expect(response.headers['Link']).to match(/page=2&per_page=5/)
end
@@ -972,7 +960,7 @@ RSpec.describe API::Commits do
refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
end
@@ -1262,7 +1250,7 @@ RSpec.describe API::Commits do
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response.size).to be >= 1
expect(json_response.first.keys).to include 'diff'
end
@@ -1276,7 +1264,7 @@ RSpec.describe API::Commits do
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response.size).to be <= 1
end
end
@@ -1914,7 +1902,7 @@ RSpec.describe API::Commits do
get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response[0]['id']).to eq(merged_mr.id)
end
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index f5279af0483..ef4682466d5 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -289,6 +289,34 @@ RSpec.describe API::ComposerPackages do
it_behaves_like 'process Composer api request', :developer, :not_found
end
end
+
+ context 'with invalid composer.json' do
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
+ let(:params) { { tag: 'v1.2.99' } }
+ let(:project) { create(:project, :custom_repo, files: files, group: group) }
+
+ before do
+ project.repository.add_tag(user, 'v1.2.99', 'master')
+ end
+
+ context 'with a missing composer.json file' do
+ let(:files) { { 'some_other_file' => '' } }
+
+ it_behaves_like 'process Composer api request', :developer, :unprocessable_entity
+ end
+
+ context 'with an empty composer.json file' do
+ let(:files) { { 'composer.json' => '' } }
+
+ it_behaves_like 'process Composer api request', :developer, :unprocessable_entity
+ end
+
+ context 'with a malformed composer.json file' do
+ let(:files) { { 'composer.json' => 'not_valid_JSON' } }
+
+ it_behaves_like 'process Composer api request', :developer, :unprocessable_entity
+ end
+ end
end
describe 'GET /api/v4/projects/:id/packages/composer/archives/*package_name?sha=:sha' do
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
new file mode 100644
index 00000000000..8a05d20fb33
--- /dev/null
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::DebianGroupPackages do
+ include HttpBasicAuthHelpers
+ include WorkhorseHelpers
+
+ include_context 'Debian repository shared context', :group do
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/Release" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Release'
+ end
+
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/InRelease" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Packages'
+ end
+
+ describe 'GET groups/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO File'
+ end
+ end
+end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
new file mode 100644
index 00000000000..d2f208d0079
--- /dev/null
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::DebianProjectPackages do
+ include HttpBasicAuthHelpers
+ include WorkhorseHelpers
+
+ include_context 'Debian repository shared context', :project do
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/Release" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Release'
+ end
+
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/InRelease" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Packages'
+ end
+
+ describe 'GET projects/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO File'
+ end
+
+ describe 'PUT projects/:id/-/packages/debian/incoming/:file_name' do
+ let(:method) { :put }
+ let(:url) { "/projects/#{project.id}/-/packages/debian/incoming/#{file_name}" }
+
+ it_behaves_like 'Debian project repository PUT endpoint', :created, nil
+ end
+ end
+end
diff --git a/spec/requests/api/doorkeeper_access_spec.rb b/spec/requests/api/doorkeeper_access_spec.rb
index f16cd58bb34..77f1dadff46 100644
--- a/spec/requests/api/doorkeeper_access_spec.rb
+++ b/spec/requests/api/doorkeeper_access_spec.rb
@@ -71,4 +71,12 @@ RSpec.describe 'doorkeeper access' do
it_behaves_like 'forbidden request'
end
+
+ context 'when user is blocked pending approval' do
+ before do
+ user.block_pending_approval
+ end
+
+ it_behaves_like 'forbidden request'
+ end
end
diff --git a/spec/requests/api/feature_flag_scopes_spec.rb b/spec/requests/api/feature_flag_scopes_spec.rb
new file mode 100644
index 00000000000..da5b2cbb7ae
--- /dev/null
+++ b/spec/requests/api/feature_flag_scopes_spec.rb
@@ -0,0 +1,319 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::FeatureFlagScopes do
+ include FeatureFlagHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:developer) { create(:user) }
+ let(:reporter) { create(:user) }
+ let(:user) { developer }
+
+ before do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ shared_examples_for 'check user permission' do
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'forbids the request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ shared_examples_for 'not found' do
+ it 'returns Not Found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ describe 'GET /projects/:id/feature_flag_scopes' do
+ subject do
+ get api("/projects/#{project.id}/feature_flag_scopes", user),
+ params: params
+ end
+
+ let(:feature_flag_1) { create_flag(project, 'flag_1', true) }
+ let(:feature_flag_2) { create_flag(project, 'flag_2', true) }
+
+ before do
+ create_scope(feature_flag_1, 'staging', false)
+ create_scope(feature_flag_1, 'production', true)
+ create_scope(feature_flag_2, 'review/*', false)
+ end
+
+ context 'when environment is production' do
+ let(:params) { { environment: 'production' } }
+
+ it_behaves_like 'check user permission'
+
+ it 'returns all effective feature flags under the environment' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag_detailed_scopes')
+ expect(json_response.second).to include({ 'name' => 'flag_1', 'active' => true })
+ expect(json_response.first).to include({ 'name' => 'flag_2', 'active' => true })
+ end
+ end
+
+ context 'when environment is staging' do
+ let(:params) { { environment: 'staging' } }
+
+ it 'returns all effective feature flags under the environment' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.second).to include({ 'name' => 'flag_1', 'active' => false })
+ expect(json_response.first).to include({ 'name' => 'flag_2', 'active' => true })
+ end
+ end
+
+ context 'when environment is review/feature X' do
+ let(:params) { { environment: 'review/feature X' } }
+
+ it 'returns all effective feature flags under the environment' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.second).to include({ 'name' => 'flag_1', 'active' => true })
+ expect(json_response.first).to include({ 'name' => 'flag_2', 'active' => false })
+ end
+ end
+ end
+
+ describe 'GET /projects/:id/feature_flags/:name/scopes' do
+ subject do
+ get api("/projects/#{project.id}/feature_flags/#{feature_flag.name}/scopes", user)
+ end
+
+ context 'when there are two scopes' do
+ let(:feature_flag) { create_flag(project, 'test') }
+ let!(:additional_scope) { create_scope(feature_flag, 'production', false) }
+
+ it_behaves_like 'check user permission'
+
+ it 'returns scopes of the feature flag' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag_scopes')
+ expect(json_response.count).to eq(2)
+ expect(json_response.first['environment_scope']).to eq(feature_flag.scopes[0].environment_scope)
+ expect(json_response.second['environment_scope']).to eq(feature_flag.scopes[1].environment_scope)
+ end
+ end
+
+ context 'when there are no feature flags' do
+ let(:feature_flag) { double(:feature_flag, name: 'test') }
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ describe 'POST /projects/:id/feature_flags/:name/scopes' do
+ subject do
+ post api("/projects/#{project.id}/feature_flags/#{feature_flag.name}/scopes", user),
+ params: params
+ end
+
+ let(:params) do
+ {
+ environment_scope: 'staging',
+ active: true,
+ strategies: [{ name: 'userWithId', parameters: { 'userIds': 'a,b,c' } }].to_json
+ }
+ end
+
+ context 'when there is a corresponding feature flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+
+ it_behaves_like 'check user permission'
+
+ it 'creates a new scope' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag_scope')
+ expect(json_response['environment_scope']).to eq(params[:environment_scope])
+ expect(json_response['active']).to eq(params[:active])
+ expect(json_response['strategies']).to eq(Gitlab::Json.parse(params[:strategies]))
+ end
+
+ context 'when the scope already exists' do
+ before do
+ create_scope(feature_flag, params[:environment_scope])
+ end
+
+ it 'returns error' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('Scopes environment scope (staging) has already been taken')
+ end
+ end
+ end
+
+ context 'when feature flag is not found' do
+ let(:feature_flag) { double(:feature_flag, name: 'test') }
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ describe 'GET /projects/:id/feature_flags/:name/scopes/:environment_scope' do
+ subject do
+ get api("/projects/#{project.id}/feature_flags/#{feature_flag.name}/scopes/#{environment_scope}",
+ user)
+ end
+
+ let(:environment_scope) { scope.environment_scope }
+
+ shared_examples_for 'successful response' do
+ it 'returns a scope' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag_scope')
+ expect(json_response['id']).to eq(scope.id)
+ expect(json_response['active']).to eq(scope.active)
+ expect(json_response['environment_scope']).to eq(scope.environment_scope)
+ end
+ end
+
+ context 'when there is a feature flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:scope) { feature_flag.default_scope }
+
+ it_behaves_like 'check user permission'
+ it_behaves_like 'successful response'
+
+ context 'when environment scope includes slash' do
+ let!(:scope) { create_scope(feature_flag, 'review/*', false) }
+
+ it_behaves_like 'not found'
+
+ context 'when URL-encoding the environment scope parameter' do
+ let(:environment_scope) { CGI.escape(scope.environment_scope) }
+
+ it_behaves_like 'successful response'
+ end
+ end
+ end
+
+ context 'when there are no feature flags' do
+ let(:feature_flag) { double(:feature_flag, name: 'test') }
+ let(:scope) { double(:feature_flag_scope, environment_scope: 'prd') }
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ describe 'PUT /projects/:id/feature_flags/:name/scopes/:environment_scope' do
+ subject do
+ put api("/projects/#{project.id}/feature_flags/#{feature_flag.name}/scopes/#{environment_scope}",
+ user), params: params
+ end
+
+ let(:environment_scope) { scope.environment_scope }
+
+ let(:params) do
+ {
+ active: true,
+ strategies: [{ name: 'userWithId', parameters: { 'userIds': 'a,b,c' } }].to_json
+ }
+ end
+
+ context 'when there is a corresponding feature flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:scope) { create_scope(feature_flag, 'staging', false, [{ name: "default", parameters: {} }]) }
+
+ it_behaves_like 'check user permission'
+
+ it 'returns the updated scope' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag_scope')
+ expect(json_response['id']).to eq(scope.id)
+ expect(json_response['active']).to eq(params[:active])
+ expect(json_response['strategies']).to eq(Gitlab::Json.parse(params[:strategies]))
+ end
+
+ context 'when there are no corresponding feature flag scopes' do
+ let(:scope) { double(:feature_flag_scope, environment_scope: 'prd') }
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ context 'when there are no corresponding feature flags' do
+ let(:feature_flag) { double(:feature_flag, name: 'test') }
+ let(:scope) { double(:feature_flag_scope, environment_scope: 'prd') }
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ describe 'DELETE /projects/:id/feature_flags/:name/scopes/:environment_scope' do
+ subject do
+ delete api("/projects/#{project.id}/feature_flags/#{feature_flag.name}/scopes/#{environment_scope}",
+ user)
+ end
+
+ let(:environment_scope) { scope.environment_scope }
+
+ shared_examples_for 'successful response' do
+ it 'destroys the scope' do
+ expect { subject }
+ .to change { Operations::FeatureFlagScope.exists?(environment_scope: scope.environment_scope) }
+ .from(true).to(false)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'when there is a feature flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+
+ context 'when there is a targeted scope' do
+ let!(:scope) { create_scope(feature_flag, 'production', false) }
+
+ it_behaves_like 'check user permission'
+ it_behaves_like 'successful response'
+
+ context 'when environment scope includes slash' do
+ let!(:scope) { create_scope(feature_flag, 'review/*', false) }
+
+ it_behaves_like 'not found'
+
+ context 'when URL-encoding the environment scope parameter' do
+ let(:environment_scope) { CGI.escape(scope.environment_scope) }
+
+ it_behaves_like 'successful response'
+ end
+ end
+ end
+
+ context 'when there are no targeted scopes' do
+ let!(:scope) { double(:feature_flag_scope, environment_scope: 'production') }
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ context 'when there are no feature flags' do
+ let(:feature_flag) { double(:feature_flag, name: 'test') }
+ let(:scope) { double(:feature_flag_scope, environment_scope: 'prd') }
+
+ it_behaves_like 'not found'
+ end
+ end
+end
diff --git a/spec/requests/api/feature_flags_spec.rb b/spec/requests/api/feature_flags_spec.rb
new file mode 100644
index 00000000000..90d4a7b8b21
--- /dev/null
+++ b/spec/requests/api/feature_flags_spec.rb
@@ -0,0 +1,1130 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::FeatureFlags do
+ include FeatureFlagHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:non_project_member) { create(:user) }
+ let(:user) { developer }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ shared_examples_for 'check user permission' do
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'forbids the request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ shared_examples_for 'not found' do
+ it 'returns Not Found' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ describe 'GET /projects/:id/feature_flags' do
+ subject { get api("/projects/#{project.id}/feature_flags", user) }
+
+ context 'when there are two feature flags' do
+ let!(:feature_flag_1) do
+ create(:operations_feature_flag, project: project)
+ end
+
+ let!(:feature_flag_2) do
+ create(:operations_feature_flag, project: project)
+ end
+
+ it 'returns feature flags ordered by name' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response.count).to eq(2)
+ expect(json_response.first['name']).to eq(feature_flag_1.name)
+ expect(json_response.second['name']).to eq(feature_flag_2.name)
+ end
+
+ it 'returns the legacy flag version' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response.map { |f| f['version'] }).to eq(%w[legacy_flag legacy_flag])
+ end
+
+ it 'does not return the legacy flag version when the feature flag is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response.select { |f| f.key?('version') }).to eq([])
+ end
+
+ it 'does not return strategies if the new flag is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response.select { |f| f.key?('strategies') }).to eq([])
+ end
+
+ it 'does not have N+1 problem' do
+ control_count = ActiveRecord::QueryRecorder.new { subject }
+
+ create_list(:operations_feature_flag, 3, project: project)
+
+ expect { get api("/projects/#{project.id}/feature_flags", user) }
+ .not_to exceed_query_limit(control_count)
+ end
+
+ it_behaves_like 'check user permission'
+ end
+
+ context 'with version 2 feature flags' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, :new_version_flag, project: project, name: 'feature1')
+ end
+
+ let!(:strategy) do
+ create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ end
+
+ let!(:scope) do
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ end
+
+ it 'returns the feature flags' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response).to eq([{
+ 'name' => 'feature1',
+ 'description' => nil,
+ 'active' => true,
+ 'version' => 'new_version_flag',
+ 'updated_at' => feature_flag.updated_at.as_json,
+ 'created_at' => feature_flag.created_at.as_json,
+ 'scopes' => [],
+ 'strategies' => [{
+ 'id' => strategy.id,
+ 'name' => 'default',
+ 'parameters' => {},
+ 'scopes' => [{
+ 'id' => scope.id,
+ 'environment_scope' => 'production'
+ }]
+ }]
+ }])
+ end
+
+ it 'does not return a version 2 flag when the feature flag is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response).to eq([])
+ end
+ end
+
+ context 'with version 1 and 2 feature flags' do
+ it 'returns both versions of flags ordered by name' do
+ create(:operations_feature_flag, project: project, name: 'legacy_flag')
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project, name: 'new_version_flag')
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response.map { |f| f['name'] }).to eq(%w[legacy_flag new_version_flag])
+ end
+
+ it 'returns only version 1 flags when the feature flag is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+ create(:operations_feature_flag, project: project, name: 'legacy_flag')
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project, name: 'new_version_flag')
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flags')
+ expect(json_response.map { |f| f['name'] }).to eq(['legacy_flag'])
+ end
+ end
+ end
+
+ describe 'GET /projects/:id/feature_flags/:name' do
+ subject { get api("/projects/#{project.id}/feature_flags/#{feature_flag.name}", user) }
+
+ context 'when there is a feature flag' do
+ let!(:feature_flag) { create_flag(project, 'awesome-feature') }
+
+ it 'returns a feature flag entry' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response['name']).to eq(feature_flag.name)
+ expect(json_response['description']).to eq(feature_flag.description)
+ expect(json_response['version']).to eq('legacy_flag')
+ end
+
+ it_behaves_like 'check user permission'
+ end
+
+ context 'with a version 2 feature_flag' do
+ it 'returns the feature flag' do
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project, name: 'feature1')
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ scope = create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api("/projects/#{project.id}/feature_flags/feature1", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response).to eq({
+ 'name' => 'feature1',
+ 'description' => nil,
+ 'active' => true,
+ 'version' => 'new_version_flag',
+ 'updated_at' => feature_flag.updated_at.as_json,
+ 'created_at' => feature_flag.created_at.as_json,
+ 'scopes' => [],
+ 'strategies' => [{
+ 'id' => strategy.id,
+ 'name' => 'default',
+ 'parameters' => {},
+ 'scopes' => [{
+ 'id' => scope.id,
+ 'environment_scope' => 'production'
+ }]
+ }]
+ })
+ end
+
+ it 'returns a 404 when the feature is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project, name: 'feature1')
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api("/projects/#{project.id}/feature_flags/feature1", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response).to eq({ 'message' => '404 Not found' })
+ end
+ end
+ end
+
+ describe 'POST /projects/:id/feature_flags' do
+ def scope_default
+ {
+ environment_scope: '*',
+ active: false,
+ strategies: [{ name: 'default', parameters: {} }].to_json
+ }
+ end
+
+ subject do
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+ end
+
+ let(:params) do
+ {
+ name: 'awesome-feature',
+ scopes: [scope_default]
+ }
+ end
+
+ it 'creates a new feature flag' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.description).to eq(params[:description])
+ end
+
+ it 'defaults to a version 1 (legacy) feature flag' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.version).to eq('legacy_flag')
+ end
+
+ it_behaves_like 'check user permission'
+
+ it 'returns version' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response['version']).to eq('legacy_flag')
+ end
+
+ it 'does not return version when new version flags are disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response.key?('version')).to eq(false)
+ end
+
+ context 'with active set to false in the params for a legacy flag' do
+ let(:params) do
+ {
+ name: 'awesome-feature',
+ version: 'legacy_flag',
+ active: 'false',
+ scopes: [scope_default]
+ }
+ end
+
+ it 'creates an inactive feature flag' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response['active']).to eq(false)
+ end
+ end
+
+ context 'when no scopes passed in parameters' do
+ let(:params) { { name: 'awesome-feature' } }
+
+ it 'creates a new feature flag with active default scope' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.default_scope).to be_active
+ end
+ end
+
+ context 'when there is a feature flag with the same name already' do
+ before do
+ create_flag(project, 'awesome-feature')
+ end
+
+ it 'fails to create a new feature flag' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when create a feature flag with two scopes' do
+ let(:params) do
+ {
+ name: 'awesome-feature',
+ description: 'this is awesome',
+ scopes: [
+ scope_default,
+ scope_with_user_with_id
+ ]
+ }
+ end
+
+ let(:scope_with_user_with_id) do
+ {
+ environment_scope: 'production',
+ active: true,
+ strategies: [{
+ name: 'userWithId',
+ parameters: { userIds: 'user:1' }
+ }].to_json
+ }
+ end
+
+ it 'creates a new feature flag with two scopes' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+
+ feature_flag = project.operations_feature_flags.last
+ feature_flag.scopes.ordered.each_with_index do |scope, index|
+ expect(scope.environment_scope).to eq(params[:scopes][index][:environment_scope])
+ expect(scope.active).to eq(params[:scopes][index][:active])
+ expect(scope.strategies).to eq(Gitlab::Json.parse(params[:scopes][index][:strategies]))
+ end
+ end
+ end
+
+ context 'when creating a version 2 feature flag' do
+ it 'creates a new feature flag' do
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag'
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response).to match(hash_including({
+ 'name' => 'new-feature',
+ 'description' => nil,
+ 'active' => true,
+ 'version' => 'new_version_flag',
+ 'scopes' => [],
+ 'strategies' => []
+ }))
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.version).to eq('new_version_flag')
+ end
+
+ it 'creates a new feature flag that is inactive' do
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag',
+ active: false
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response['active']).to eq(false)
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.active).to eq(false)
+ end
+
+ it 'creates a new feature flag with strategies' do
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag',
+ strategies: [{
+ name: 'userWithId',
+ parameters: { 'userIds': 'user1' }
+ }]
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.version).to eq('new_version_flag')
+ expect(feature_flag.strategies.map { |s| s.slice(:name, :parameters).deep_symbolize_keys }).to eq([{
+ name: 'userWithId',
+ parameters: { userIds: 'user1' }
+ }])
+ end
+
+ it 'creates a new feature flag with gradual rollout strategy with scopes' do
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag',
+ strategies: [{
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '50' },
+ scopes: [{
+ environment_scope: 'staging'
+ }]
+ }]
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.version).to eq('new_version_flag')
+ expect(feature_flag.strategies.map { |s| s.slice(:name, :parameters).deep_symbolize_keys }).to eq([{
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '50' }
+ }])
+ expect(feature_flag.strategies.first.scopes.map { |s| s.slice(:environment_scope).deep_symbolize_keys }).to eq([{
+ environment_scope: 'staging'
+ }])
+ end
+
+ it 'creates a new feature flag with flexible rollout strategy with scopes' do
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag',
+ strategies: [{
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '50', stickiness: 'DEFAULT' },
+ scopes: [{
+ environment_scope: 'staging'
+ }]
+ }]
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+
+ feature_flag = project.operations_feature_flags.last
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.version).to eq('new_version_flag')
+ expect(feature_flag.strategies.map { |s| s.slice(:name, :parameters).deep_symbolize_keys }).to eq([{
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '50', stickiness: 'DEFAULT' }
+ }])
+ expect(feature_flag.strategies.first.scopes.map { |s| s.slice(:environment_scope).deep_symbolize_keys }).to eq([{
+ environment_scope: 'staging'
+ }])
+ end
+
+ it 'returns a 422 when the feature flag is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+ params = {
+ name: 'new-feature',
+ version: 'new_version_flag'
+ }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response).to eq({ 'message' => 'Version 2 flags are not enabled for this project' })
+ expect(project.operations_feature_flags.count).to eq(0)
+ end
+ end
+
+ context 'when given invalid parameters' do
+ it 'responds with a 400 when given an invalid version' do
+ params = { name: 'new-feature', version: 'bad_value' }
+
+ post api("/projects/#{project.id}/feature_flags", user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'message' => 'Version is invalid' })
+ end
+ end
+ end
+
+ describe 'POST /projects/:id/feature_flags/:name/enable' do
+ subject do
+ post api("/projects/#{project.id}/feature_flags/#{params[:name]}/enable", user),
+ params: params
+ end
+
+ let(:params) do
+ {
+ name: 'awesome-feature',
+ environment_scope: 'production',
+ strategy: { name: 'userWithId', parameters: { userIds: 'Project:1' } }.to_json
+ }
+ end
+
+ context 'when feature flag does not exist yet' do
+ it 'creates a new feature flag with the specified scope and strategy' do
+ subject
+
+ feature_flag = project.operations_feature_flags.last
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(feature_flag.name).to eq(params[:name])
+ expect(scope.strategies).to eq([Gitlab::Json.parse(params[:strategy])])
+ expect(feature_flag.version).to eq('legacy_flag')
+ end
+
+ it 'returns the flag version and strategies in the json response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response.slice('version', 'strategies')).to eq({
+ 'version' => 'legacy_flag',
+ 'strategies' => []
+ })
+ end
+
+ it_behaves_like 'check user permission'
+ end
+
+ context 'when feature flag exists already' do
+ let!(:feature_flag) { create_flag(project, params[:name]) }
+
+ context 'when feature flag scope does not exist yet' do
+ it 'creates a new scope with the specified strategy' do
+ subject
+
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(scope.strategies).to eq([Gitlab::Json.parse(params[:strategy])])
+ end
+
+ it_behaves_like 'check user permission'
+ end
+
+ context 'when feature flag scope exists already' do
+ let(:defined_strategy) { { name: 'userWithId', parameters: { userIds: 'Project:2' } } }
+
+ before do
+ create_scope(feature_flag, params[:environment_scope], true, [defined_strategy])
+ end
+
+ it 'adds an additional strategy to the scope' do
+ subject
+
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(scope.strategies).to eq([defined_strategy.deep_stringify_keys, Gitlab::Json.parse(params[:strategy])])
+ end
+
+ context 'when the specified strategy exists already' do
+ let(:defined_strategy) { Gitlab::Json.parse(params[:strategy]) }
+
+ it 'does not add a duplicate strategy' do
+ subject
+
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ strategy_count = scope.strategies.count { |strategy| strategy['name'] == 'userWithId' }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(strategy_count).to eq(1)
+ end
+ end
+ end
+ end
+
+ context 'with a version 2 flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, :new_version_flag, project: project, name: params[:name]) }
+
+ it 'does not change the flag and returns an unprocessable_entity response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response).to eq({ 'message' => 'Version 2 flags not supported' })
+ feature_flag.reload
+ expect(feature_flag.scopes).to eq([])
+ expect(feature_flag.strategies).to eq([])
+ end
+ end
+ end
+
+ describe 'POST /projects/:id/feature_flags/:name/disable' do
+ subject do
+ post api("/projects/#{project.id}/feature_flags/#{params[:name]}/disable", user),
+ params: params
+ end
+
+ let(:params) do
+ {
+ name: 'awesome-feature',
+ environment_scope: 'production',
+ strategy: { name: 'userWithId', parameters: { userIds: 'Project:1' } }.to_json
+ }
+ end
+
+ context 'when feature flag does not exist yet' do
+ it_behaves_like 'not found'
+ end
+
+ context 'when feature flag exists already' do
+ let!(:feature_flag) { create_flag(project, params[:name]) }
+
+ context 'when feature flag scope does not exist yet' do
+ it_behaves_like 'not found'
+ end
+
+ context 'when feature flag scope exists already and has the specified strategy' do
+ let(:defined_strategies) do
+ [
+ { name: 'userWithId', parameters: { userIds: 'Project:1' } },
+ { name: 'userWithId', parameters: { userIds: 'Project:2' } }
+ ]
+ end
+
+ before do
+ create_scope(feature_flag, params[:environment_scope], true, defined_strategies)
+ end
+
+ it 'removes the strategy from the scope' do
+ subject
+
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(scope.strategies)
+ .to eq([{ name: 'userWithId', parameters: { userIds: 'Project:2' } }.deep_stringify_keys])
+ end
+
+ it 'returns the flag version and strategies in the json response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response.slice('version', 'strategies')).to eq({
+ 'version' => 'legacy_flag',
+ 'strategies' => []
+ })
+ end
+
+ it_behaves_like 'check user permission'
+
+ context 'when strategies become empty array after the removal' do
+ let(:defined_strategies) do
+ [{ name: 'userWithId', parameters: { userIds: 'Project:1' } }]
+ end
+
+ it 'destroys the scope' do
+ subject
+
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(scope).to be_nil
+ end
+
+ it_behaves_like 'check user permission'
+ end
+ end
+
+ context 'when scope exists already but cannot find the corresponding strategy' do
+ let(:defined_strategy) { { name: 'userWithId', parameters: { userIds: 'Project:2' } } }
+
+ before do
+ create_scope(feature_flag, params[:environment_scope], true, [defined_strategy])
+ end
+
+ it_behaves_like 'not found'
+ end
+ end
+
+ context 'with a version 2 feature flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, :new_version_flag, project: project, name: params[:name]) }
+
+ it 'does not change the flag and returns an unprocessable_entity response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response).to eq({ 'message' => 'Version 2 flags not supported' })
+ feature_flag.reload
+ expect(feature_flag.scopes).to eq([])
+ expect(feature_flag.strategies).to eq([])
+ end
+ end
+ end
+
+ describe 'PUT /projects/:id/feature_flags/:name' do
+ context 'with a legacy feature flag' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, :legacy_flag, project: project,
+ name: 'feature1', description: 'old description')
+ end
+
+ it 'returns a 404 if the feature is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(feature_flag.reload.description).to eq('old description')
+ end
+
+ it 'returns a 422' do
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response).to eq({ 'message' => 'PUT operations are not supported for legacy feature flags' })
+ expect(feature_flag.reload.description).to eq('old description')
+ end
+ end
+
+ context 'with a version 2 feature flag' do
+ let!(:feature_flag) do
+ create(:operations_feature_flag, :new_version_flag, project: project, active: true,
+ name: 'feature1', description: 'old description')
+ end
+
+ it 'returns a 404 if the feature is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(feature_flag.reload.description).to eq('old description')
+ end
+
+ it 'returns a 404 if the feature flag does not exist' do
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/other_flag_name", user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(feature_flag.reload.description).to eq('old description')
+ end
+
+ it 'forbids a request for a reporter' do
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", reporter), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(feature_flag.reload.description).to eq('old description')
+ end
+
+ it 'returns an error for an invalid update of gradual rollout' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ id: strategy.id,
+ name: 'gradualRolloutUserId',
+ parameters: { bad: 'params' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).not_to be_nil
+ result = feature_flag.reload.strategies.map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ expect(result).to eq([{
+ id: strategy.id,
+ name: 'default',
+ parameters: {}
+ }])
+ end
+
+ it 'returns an error for an invalid update of flexible rollout' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ id: strategy.id,
+ name: 'flexibleRollout',
+ parameters: { bad: 'params' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).not_to be_nil
+ result = feature_flag.reload.strategies.map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ expect(result).to eq([{
+ id: strategy.id,
+ name: 'default',
+ parameters: {}
+ }])
+ end
+
+ it 'updates the feature flag' do
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(feature_flag.reload.description).to eq('new description')
+ end
+
+ it 'updates the flag active value' do
+ params = { active: false }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response['active']).to eq(false)
+ expect(feature_flag.reload.active).to eq(false)
+ end
+
+ it 'updates the feature flag name' do
+ params = { name: 'new-name' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(json_response['name']).to eq('new-name')
+ expect(feature_flag.reload.name).to eq('new-name')
+ end
+
+ it 'ignores a provided version parameter' do
+ params = { description: 'other description', version: 'bad_value' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(feature_flag.reload.description).to eq('other description')
+ end
+
+ it 'returns the feature flag json' do
+ params = { description: 'new description' }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ feature_flag.reload
+ expect(json_response).to eq({
+ 'name' => 'feature1',
+ 'description' => 'new description',
+ 'active' => true,
+ 'created_at' => feature_flag.created_at.as_json,
+ 'updated_at' => feature_flag.updated_at.as_json,
+ 'scopes' => [],
+ 'strategies' => [],
+ 'version' => 'new_version_flag'
+ })
+ end
+
+ it 'updates an existing feature flag strategy to be gradual rollout strategy' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ id: strategy.id,
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '10' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies.map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ expect(result).to eq([{
+ id: strategy.id,
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '10' }
+ }])
+ end
+
+ it 'updates an existing feature flag strategy to be flexible rollout strategy' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ id: strategy.id,
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies.map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ expect(result).to eq([{
+ id: strategy.id,
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ }])
+ end
+
+ it 'adds a new gradual rollout strategy to a feature flag' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '10' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies
+ .map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ .sort_by { |s| s[:name] }
+ expect(result.first[:id]).to eq(strategy.id)
+ expect(result.map { |s| s.slice(:name, :parameters) }).to eq([{
+ name: 'default',
+ parameters: {}
+ }, {
+ name: 'gradualRolloutUserId',
+ parameters: { groupId: 'default', percentage: '10' }
+ }])
+ end
+
+ it 'adds a new gradual flexible strategy to a feature flag' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ params = {
+ strategies: [{
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies
+ .map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ .sort_by { |s| s[:name] }
+ expect(result.first[:id]).to eq(strategy.id)
+ expect(result.map { |s| s.slice(:name, :parameters) }).to eq([{
+ name: 'default',
+ parameters: {}
+ }, {
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default', rollout: '10', stickiness: 'DEFAULT' }
+ }])
+ end
+
+ it 'deletes a feature flag strategy' do
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'userA,userB' })
+ params = {
+ strategies: [{
+ id: strategy_a.id,
+ name: 'default',
+ parameters: {},
+ _destroy: true
+ }, {
+ id: strategy_b.id,
+ name: 'userWithId',
+ parameters: { userIds: 'userB' }
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies
+ .map { |s| s.slice(:id, :name, :parameters).deep_symbolize_keys }
+ .sort_by { |s| s[:name] }
+ expect(result).to eq([{
+ id: strategy_b.id,
+ name: 'userWithId',
+ parameters: { userIds: 'userB' }
+ }])
+ end
+
+ it 'updates an existing feature flag scope' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ scope = create(:operations_scope, strategy: strategy, environment_scope: '*')
+ params = {
+ strategies: [{
+ id: strategy.id,
+ scopes: [{
+ id: scope.id,
+ environment_scope: 'production'
+ }]
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ result = feature_flag.reload.strategies.first.scopes.map { |s| s.slice(:id, :environment_scope).deep_symbolize_keys }
+ expect(result).to eq([{
+ id: scope.id,
+ environment_scope: 'production'
+ }])
+ end
+
+ it 'deletes an existing feature flag scope' do
+ strategy = create(:operations_strategy, feature_flag: feature_flag, name: 'default', parameters: {})
+ scope = create(:operations_scope, strategy: strategy, environment_scope: '*')
+ params = {
+ strategies: [{
+ id: strategy.id,
+ scopes: [{
+ id: scope.id,
+ _destroy: true
+ }]
+ }]
+ }
+
+ put api("/projects/#{project.id}/feature_flags/feature1", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/feature_flag')
+ expect(feature_flag.reload.strategies.first.scopes.count).to eq(0)
+ end
+ end
+ end
+
+ describe 'DELETE /projects/:id/feature_flags/:name' do
+ subject do
+ delete api("/projects/#{project.id}/feature_flags/#{feature_flag.name}", user),
+ params: params
+ end
+
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:params) { {} }
+
+ it 'destroys the feature flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns version' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['version']).to eq('legacy_flag')
+ end
+
+ it 'does not return version when new version flags are disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.key?('version')).to eq(false)
+ end
+
+ context 'with a version 2 feature flag' do
+ let!(:feature_flag) { create(:operations_feature_flag, :new_version_flag, project: project) }
+
+ it 'destroys the flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns a 404 if the feature is disabled' do
+ stub_feature_flags(feature_flags_new_version: false)
+
+ expect { subject }.not_to change { Operations::FeatureFlag.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/feature_flags_user_lists_spec.rb b/spec/requests/api/feature_flags_user_lists_spec.rb
new file mode 100644
index 00000000000..469210040dd
--- /dev/null
+++ b/spec/requests/api/feature_flags_user_lists_spec.rb
@@ -0,0 +1,371 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::FeatureFlagsUserLists do
+ let_it_be(:project, refind: true) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ def create_list(name: 'mylist', user_xids: 'user1')
+ create(:operations_feature_flag_user_list, project: project, name: name, user_xids: user_xids)
+ end
+
+ def disable_repository(project)
+ project.project_feature.update!(
+ repository_access_level: ::ProjectFeature::DISABLED,
+ merge_requests_access_level: ::ProjectFeature::DISABLED,
+ builds_access_level: ::ProjectFeature::DISABLED
+ )
+ end
+
+ describe 'GET /projects/:id/feature_flags_user_lists' do
+ it 'forbids the request for a reporter' do
+ get api("/projects/#{project.id}/feature_flags_user_lists", reporter)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns forbidden if the feature is unavailable' do
+ disable_repository(project)
+
+ get api("/projects/#{project.id}/feature_flags_user_lists", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns all the user lists' do
+ create_list(name: 'list_a', user_xids: 'user1')
+ create_list(name: 'list_b', user_xids: 'user1,user2,user3')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.map { |list| list['name'] }.sort).to eq(%w[list_a list_b])
+ end
+
+ it 'returns all the data for a user list' do
+ user_list = create_list(name: 'list_a', user_xids: 'user1')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq([{
+ 'id' => user_list.id,
+ 'iid' => user_list.iid,
+ 'project_id' => project.id,
+ 'created_at' => user_list.created_at.as_json,
+ 'updated_at' => user_list.updated_at.as_json,
+ 'name' => 'list_a',
+ 'user_xids' => 'user1',
+ 'path' => project_feature_flags_user_list_path(user_list.project, user_list),
+ 'edit_path' => edit_project_feature_flags_user_list_path(user_list.project, user_list)
+ }])
+ end
+
+ it 'paginates user lists' do
+ create_list(name: 'list_a', user_xids: 'user1')
+ create_list(name: 'list_b', user_xids: 'user1,user2,user3')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists?page=2&per_page=1", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.map { |list| list['name'] }).to eq(['list_b'])
+ end
+
+ it 'returns the user lists for only the specified project' do
+ create(:operations_feature_flag_user_list, project: project, name: 'list')
+ other_project = create(:project)
+ create(:operations_feature_flag_user_list, project: other_project, name: 'other_list')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.map { |list| list['name'] }).to eq(['list'])
+ end
+
+ it 'returns an empty list' do
+ get api("/projects/#{project.id}/feature_flags_user_lists", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq([])
+ end
+ end
+
+ describe 'GET /projects/:id/feature_flags_user_lists/:iid' do
+ it 'forbids the request for a reporter' do
+ list = create_list
+
+ get api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", reporter)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns forbidden if the feature is unavailable' do
+ disable_repository(project)
+ list = create_list
+
+ get api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns the user list' do
+ list = create_list(name: 'testers', user_xids: 'test1,test2')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq({
+ 'name' => 'testers',
+ 'user_xids' => 'test1,test2',
+ 'id' => list.id,
+ 'iid' => list.iid,
+ 'project_id' => project.id,
+ 'created_at' => list.created_at.as_json,
+ 'updated_at' => list.updated_at.as_json,
+ 'path' => project_feature_flags_user_list_path(list.project, list),
+ 'edit_path' => edit_project_feature_flags_user_list_path(list.project, list)
+ })
+ end
+
+ it 'returns the correct user list identified by the iid' do
+ create_list(name: 'list_a', user_xids: 'test1')
+ list_b = create_list(name: 'list_b', user_xids: 'test2')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists/#{list_b.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq('list_b')
+ end
+
+ it 'scopes the iid search to the project' do
+ other_project = create(:project)
+ other_project.add_developer(developer)
+ create(:operations_feature_flag_user_list, project: other_project, name: 'other_list')
+ list = create(:operations_feature_flag_user_list, project: project, name: 'list')
+
+ get api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['name']).to eq('list')
+ end
+
+ it 'returns not found when the list does not exist' do
+ get api("/projects/#{project.id}/feature_flags_user_lists/1", developer)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response).to eq({ 'message' => '404 Not found' })
+ end
+ end
+
+ describe 'POST /projects/:id/feature_flags_user_lists' do
+ it 'forbids the request for a reporter' do
+ post api("/projects/#{project.id}/feature_flags_user_lists", reporter), params: {
+ name: 'mylist', user_xids: 'user1'
+ }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(project.operations_feature_flags_user_lists.count).to eq(0)
+ end
+
+ it 'returns forbidden if the feature is unavailable' do
+ disable_repository(project)
+
+ post api("/projects/#{project.id}/feature_flags_user_lists", developer), params: {
+ name: 'mylist', user_xids: 'user1'
+ }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'creates the flag' do
+ post api("/projects/#{project.id}/feature_flags_user_lists", developer), params: {
+ name: 'mylist', user_xids: 'user1'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.slice('name', 'user_xids', 'project_id', 'iid')).to eq({
+ 'name' => 'mylist',
+ 'user_xids' => 'user1',
+ 'project_id' => project.id,
+ 'iid' => 1
+ })
+ expect(project.operations_feature_flags_user_lists.count).to eq(1)
+ expect(project.operations_feature_flags_user_lists.last.name).to eq('mylist')
+ end
+
+ it 'requires name' do
+ post api("/projects/#{project.id}/feature_flags_user_lists", developer), params: {
+ user_xids: 'user1'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'message' => 'name is missing' })
+ expect(project.operations_feature_flags_user_lists.count).to eq(0)
+ end
+
+ it 'requires user_xids' do
+ post api("/projects/#{project.id}/feature_flags_user_lists", developer), params: {
+ name: 'empty_list'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'message' => 'user_xids is missing' })
+ expect(project.operations_feature_flags_user_lists.count).to eq(0)
+ end
+
+ it 'returns an error when name is already taken' do
+ create_list(name: 'myname')
+ post api("/projects/#{project.id}/feature_flags_user_lists", developer), params: {
+ name: 'myname', user_xids: 'a'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'message' => ['Name has already been taken'] })
+ expect(project.operations_feature_flags_user_lists.count).to eq(1)
+ end
+
+ it 'does not create a flag for a project of which the developer is not a member' do
+ other_project = create(:project)
+
+ post api("/projects/#{other_project.id}/feature_flags_user_lists", developer), params: {
+ name: 'mylist', user_xids: 'user1'
+ }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(other_project.operations_feature_flags_user_lists.count).to eq(0)
+ expect(project.operations_feature_flags_user_lists.count).to eq(0)
+ end
+ end
+
+ describe 'PUT /projects/:id/feature_flags_user_lists/:iid' do
+ it 'forbids the request for a reporter' do
+ list = create_list(name: 'original_name')
+
+ put api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", reporter), params: {
+ name: 'mylist'
+ }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(list.reload.name).to eq('original_name')
+ end
+
+ it 'returns forbidden if the feature is unavailable' do
+ list = create_list(name: 'original_name')
+ disable_repository(project)
+
+ put api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer), params: {
+ name: 'mylist', user_xids: '456,789'
+ }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'updates the list' do
+ list = create_list(name: 'original_name', user_xids: '123')
+
+ put api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer), params: {
+ name: 'mylist', user_xids: '456,789'
+ }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.slice('name', 'user_xids')).to eq({
+ 'name' => 'mylist',
+ 'user_xids' => '456,789'
+ })
+ expect(list.reload.name).to eq('mylist')
+ end
+
+ it 'preserves attributes not listed in the request' do
+ list = create_list(name: 'original_name', user_xids: '123')
+
+ put api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer), params: {}
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.slice('name', 'user_xids')).to eq({
+ 'name' => 'original_name',
+ 'user_xids' => '123'
+ })
+ expect(list.reload.name).to eq('original_name')
+ expect(list.reload.user_xids).to eq('123')
+ end
+
+ it 'returns an error when the update is invalid' do
+ create_list(name: 'taken', user_xids: '123')
+ list = create_list(name: 'original_name', user_xids: '123')
+
+ put api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer), params: {
+ name: 'taken'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({ 'message' => ['Name has already been taken'] })
+ end
+
+ it 'returns not found when the list does not exist' do
+ list = create_list(name: 'original_name', user_xids: '123')
+
+ put api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid + 1}", developer), params: {
+ name: 'new_name'
+ }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response).to eq({ 'message' => '404 Not found' })
+ end
+ end
+
+ describe 'DELETE /projects/:id/feature_flags_user_lists/:iid' do
+ it 'forbids the request for a reporter' do
+ list = create_list
+
+ delete api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", reporter)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(project.operations_feature_flags_user_lists.count).to eq(1)
+ end
+
+ it 'returns forbidden if the feature is unavailable' do
+ list = create_list
+ disable_repository(project)
+
+ delete api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns not found when the list does not exist' do
+ delete api("/projects/#{project.id}/feature_flags_user_lists/1", developer)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response).to eq({ 'message' => '404 Not found' })
+ end
+
+ it 'deletes the list' do
+ list = create_list
+
+ delete api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.body).to be_blank
+ expect(project.operations_feature_flags_user_lists.count).to eq(0)
+ end
+
+ it 'does not delete the list if it is associated with a strategy' do
+ list = create_list
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project)
+ create(:operations_strategy, feature_flag: feature_flag, name: 'gitlabUserList', user_list: list)
+
+ delete api("/projects/#{project.id}/feature_flags_user_lists/#{list.iid}", developer)
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response).to eq({ 'message' => ['User list is associated with a strategy'] })
+ expect(list.reload).to be_persisted
+ end
+ end
+end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 2746e777306..3f443b4f92b 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe API::Features, stub_feature_flags: false do
Flipper.register(:perf_team) do |actor|
actor.respond_to?(:admin) && actor.admin?
end
+
+ skip_feature_flags_yaml_validation
end
describe 'GET /features' do
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index bb4e88f97f8..f77f127ddc8 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -747,7 +747,7 @@ RSpec.describe API::Files do
it "updates existing file in project repo with accepts correct last commit id" do
last_commit = Gitlab::Git::Commit
- .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ .last_for_path(project.repository, 'master', Addressable::URI.unencode_component(file_path))
params_with_correct_id = params.merge(last_commit_id: last_commit.id)
put api(route(file_path), user), params: params_with_correct_id
@@ -757,7 +757,7 @@ RSpec.describe API::Files do
it "returns 400 when file path is invalid" do
last_commit = Gitlab::Git::Commit
- .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ .last_for_path(project.repository, 'master', Addressable::URI.unencode_component(file_path))
params_with_correct_id = params.merge(last_commit_id: last_commit.id)
put api(route(rouge_file_path), user), params: params_with_correct_id
@@ -769,7 +769,7 @@ RSpec.describe API::Files do
it_behaves_like 'when path is absolute' do
let(:last_commit) do
Gitlab::Git::Commit
- .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ .last_for_path(project.repository, 'master', Addressable::URI.unencode_component(file_path))
end
let(:params_with_correct_id) { params.merge(last_commit_id: last_commit.id) }
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index ed852fe75c7..2cb686167f1 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -4,79 +4,432 @@ require 'spec_helper'
RSpec.describe API::GenericPackages do
let_it_be(:personal_access_token) { create(:personal_access_token) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:workhorse_header) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
+ let(:user) { personal_access_token.user }
+ let(:ci_build) { create(:ci_build, :running, user: user) }
- describe 'GET /api/v4/projects/:id/packages/generic/ping' do
- let(:user) { personal_access_token.user }
- let(:auth_token) { personal_access_token.token }
+ def auth_header
+ return {} if user_role == :anonymous
+ case authenticate_with
+ when :personal_access_token
+ personal_access_token_header
+ when :job_token
+ job_token_header
+ when :invalid_personal_access_token
+ personal_access_token_header('wrong token')
+ when :invalid_job_token
+ job_token_header('wrong token')
+ end
+ end
+
+ def personal_access_token_header(value = nil)
+ { Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => value || personal_access_token.token }
+ end
+
+ def job_token_header(value = nil)
+ { Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER => value || ci_build.token }
+ end
+
+ shared_examples 'secure endpoint' do
before do
project.add_developer(user)
end
- context 'packages feature is disabled' do
- it 'responds with 404 Not Found' do
- stub_packages_setting(enabled: false)
+ it 'rejects malicious request' do
+ subject
- ping(personal_access_token: auth_token)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ describe 'PUT /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name/authorize' do
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
+ 'PUBLIC' | :developer | true | :personal_access_token | :success
+ 'PUBLIC' | :guest | true | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :guest | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :anonymous | false | :none | :unauthorized
+ 'PRIVATE' | :developer | true | :personal_access_token | :success
+ 'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :personal_access_token | :not_found
+ 'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :anonymous | false | :none | :unauthorized
+ 'PUBLIC' | :developer | true | :job_token | :success
+ 'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
+ 'PUBLIC' | :developer | false | :job_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | true | :job_token | :success
+ 'PRIVATE' | :developer | true | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | false | :job_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_job_token | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility, false))
+ project.send("add_#{user_role}", user) if member? && user_role != :anonymous
+ end
+
+ it "responds with #{params[:expected_status]}" do
+ authorize_upload_file(workhorse_header.merge(auth_header))
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+ end
+
+ context 'application security' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ subject { authorize_upload_file(workhorse_header.merge(personal_access_token_header), param_name => param_value) }
+
+ it_behaves_like 'secure endpoint'
end
end
context 'generic_packages feature flag is disabled' do
it 'responds with 404 Not Found' do
stub_feature_flags(generic_packages: false)
+ project.add_developer(user)
- ping(personal_access_token: auth_token)
+ authorize_upload_file(workhorse_header.merge(personal_access_token_header))
expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'generic_packages feature flag is enabled' do
+ def authorize_upload_file(request_headers, package_name: 'mypackage', file_name: 'myfile.tar.gz')
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/0.0.1/#{file_name}/authorize"
+
+ put api(url), headers: request_headers
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name' do
+ include WorkhorseHelpers
+
+ let(:file_upload) { fixture_file_upload('spec/fixtures/packages/generic/myfile.tar.gz') }
+ let(:params) { { file: file_upload } }
+
+ context 'authentication' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
+ 'PUBLIC' | :guest | true | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :guest | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :anonymous | false | :none | :unauthorized
+ 'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :personal_access_token | :not_found
+ 'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :anonymous | false | :none | :unauthorized
+ 'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
+ 'PUBLIC' | :developer | false | :job_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | true | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | false | :job_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_job_token | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility, false))
+ project.send("add_#{user_role}", user) if member? && user_role != :anonymous
+ end
+
+ it "responds with #{params[:expected_status]}" do
+ headers = workhorse_header.merge(auth_header)
+
+ upload_file(params, headers)
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+ end
+
+ context 'when user can upload packages and has valid credentials' do
before do
- stub_feature_flags(generic_packages: true)
+ project.add_developer(user)
end
- context 'authenticating using personal access token' do
- it 'responds with 200 OK when valid personal access token is provided' do
- ping(personal_access_token: auth_token)
+ it 'creates package and package file when valid personal access token is used' do
+ headers = workhorse_header.merge(personal_access_token_header)
+
+ expect { upload_file(params, headers) }
+ .to change { project.packages.generic.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
- expect(response).to have_gitlab_http_status(:ok)
+ package = project.packages.generic.last
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info).to be_nil
+
+ package_file = package.package_files.last
+ expect(package_file.file_name).to eq('myfile.tar.gz')
end
+ end
+
+ it 'creates package, package file, and package build info when valid job token is used' do
+ headers = workhorse_header.merge(job_token_header)
+
+ expect { upload_file(params, headers) }
+ .to change { project.packages.generic.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
- it 'responds with 401 Unauthorized when invalid personal access token provided' do
- ping(personal_access_token: 'invalid-token')
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
- expect(response).to have_gitlab_http_status(:unauthorized)
+ package = project.packages.generic.last
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info.pipeline).to eq(ci_build.pipeline)
+
+ package_file = package.package_files.last
+ expect(package_file.file_name).to eq('myfile.tar.gz')
end
end
- context 'authenticating using job token' do
- it 'responds with 200 OK when valid job token is provided' do
- job_token = create(:ci_build, :running, user: user).token
+ context 'event tracking' do
+ subject { upload_file(params, workhorse_header.merge(personal_access_token_header)) }
+
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ end
+
+ it 'rejects request without a file from workhorse' do
+ headers = workhorse_header.merge(personal_access_token_header)
+ upload_file({}, headers)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects request without an auth token' do
+ upload_file(params, workhorse_header)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'rejects request without workhorse rewritten fields' do
+ headers = workhorse_header.merge(personal_access_token_header)
+ upload_file(params, headers, send_rewritten_field: false)
- ping(job_token: job_token)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- expect(response).to have_gitlab_http_status(:ok)
+ it 'rejects request if file size is too large' do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:size).and_return(project.actual_limits.generic_packages_max_file_size + 1)
end
- it 'responds with 401 Unauthorized when invalid job token provided' do
- ping(job_token: 'invalid-token')
+ headers = workhorse_header.merge(personal_access_token_header)
+ upload_file(params, headers)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects request without workhorse header' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
- expect(response).to have_gitlab_http_status(:unauthorized)
+ upload_file(params, personal_access_token_header)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'application security' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ subject { upload_file(params, workhorse_header.merge(personal_access_token_header), param_name => param_value) }
+
+ it_behaves_like 'secure endpoint'
+ end
+ end
+
+ def upload_file(params, request_headers, send_rewritten_field: true, package_name: 'mypackage', file_name: 'myfile.tar.gz')
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/0.0.1/#{file_name}"
+
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: :file,
+ params: params,
+ headers: request_headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:package) { create(:generic_package, project: project) }
+ let_it_be(:package_file) { create(:package_file, :generic, package: package) }
+
+ context 'authentication' do
+ where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
+ 'PUBLIC' | :developer | true | :personal_access_token | :success
+ 'PUBLIC' | :guest | true | :personal_access_token | :success
+ 'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :personal_access_token | :success
+ 'PUBLIC' | :guest | false | :personal_access_token | :success
+ 'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :anonymous | false | :none | :unauthorized
+ 'PRIVATE' | :developer | true | :personal_access_token | :success
+ 'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :personal_access_token | :not_found
+ 'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :anonymous | false | :none | :unauthorized
+ 'PUBLIC' | :developer | true | :job_token | :success
+ 'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
+ 'PUBLIC' | :developer | false | :job_token | :success
+ 'PUBLIC' | :developer | false | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | true | :job_token | :success
+ 'PRIVATE' | :developer | true | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | false | :job_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_job_token | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility, false))
+ project.send("add_#{user_role}", user) if member? && user_role != :anonymous
+ end
+
+ it "responds with #{params[:expected_status]}" do
+ download_file(auth_header)
+
+ expect(response).to have_gitlab_http_status(expected_status)
end
end
end
- def ping(personal_access_token: nil, job_token: nil)
- headers = {
- Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => personal_access_token.presence,
- Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER => job_token.presence
- }.compact
+ context 'event tracking' do
+ before do
+ project.add_developer(user)
+ end
+
+ subject { download_file(personal_access_token_header) }
+
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ end
+
+ it 'rejects a malicious file name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, file_name: '../.ssh%2fauthorized_keys')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects a malicious file name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, file_name: '%2e%2e%2f.ssh%2fauthorized_keys')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects a malicious package name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, package_name: 'my-package/../')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects a malicious package name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, package_name: 'my-package%2f%2e%2e%2f')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ context 'application security' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ subject { download_file(personal_access_token_header, param_name => param_value) }
+
+ it_behaves_like 'secure endpoint'
+ end
+ end
+
+ it 'responds with 404 Not Found for non existing package' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, package_name: 'no-such-package')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'responds with 404 Not Found for non existing package file' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, file_name: 'no-such-file')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ def download_file(request_headers, package_name: nil, file_name: nil)
+ package_name ||= package.name
+ file_name ||= package_file.file_name
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/#{package.version}/#{file_name}"
- get api('/projects/%d/packages/generic/ping' % project.id), headers: headers
+ get api(url), headers: request_headers
end
end
end
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index 0838900eaba..5d5b963fed5 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe 'get board lists' do
let_it_be(:user) { create(:user) }
let_it_be(:unauth_user) { create(:user) }
- let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace ) }
let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, creator_id: user.id, group: group) }
let_it_be(:project_label) { create(:label, project: project, name: 'Development') }
let_it_be(:project_label2) { create(:label, project: project, name: 'Testing') }
let_it_be(:group_label) { create(:group_label, group: group, name: 'Development') }
@@ -111,12 +111,19 @@ RSpec.describe 'get board lists' do
board_parent.add_reporter(user)
end
- it 'finds the correct list' do
+ it 'returns the correct list with issue count for matching issue filters' do
label_list = create(:list, board: board, label: label, position: 10)
+ create(:issue, project: project, labels: [label, label2])
+ create(:issue, project: project, labels: [label])
- post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
+ post_graphql(query(id: global_id_of(label_list), issueFilters: { labelName: label2.title }), current_user: user)
- expect(lists_data[0]['node']['title']).to eq label_list.title
+ aggregate_failures do
+ list_node = lists_data[0]['node']
+
+ expect(list_node['title']).to eq label_list.title
+ expect(list_node['issuesCount']).to eq 1
+ end
end
end
end
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index ee7dba545be..fe1c7c15de2 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -190,7 +190,9 @@ RSpec.describe 'GitlabSchema configurations' do
variables: {}.to_s,
complexity: 181,
depth: 13,
- duration_s: 7
+ duration_s: 7,
+ used_fields: an_instance_of(Array),
+ used_deprecated_fields: an_instance_of(Array)
}
expect_any_instance_of(Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer).to receive(:duration).and_return(7)
diff --git a/spec/requests/api/graphql/group/merge_requests_spec.rb b/spec/requests/api/graphql/group/merge_requests_spec.rb
new file mode 100644
index 00000000000..e9a5e558b1d
--- /dev/null
+++ b/spec/requests/api/graphql/group/merge_requests_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# Based on ee/spec/requests/api/epics_spec.rb
+# Should follow closely in order to ensure all situations are covered
+RSpec.describe 'Query.group.mergeRequests' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:sub_group) { create(:group, parent: group) }
+
+ let_it_be(:project_a) { create(:project, :repository, group: group) }
+ let_it_be(:project_b) { create(:project, :repository, group: group) }
+ let_it_be(:project_c) { create(:project, :repository, group: sub_group) }
+ let_it_be(:project_x) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project_x]) }
+
+ let_it_be(:mr_attrs) do
+ { target_branch: 'master' }
+ end
+
+ let_it_be(:mr_traits) do
+ [:unique_branches, :unique_author]
+ end
+
+ let_it_be(:mrs_a, reload: true) { create_list(:merge_request, 2, *mr_traits, **mr_attrs, source_project: project_a) }
+ let_it_be(:mrs_b, reload: true) { create_list(:merge_request, 2, *mr_traits, **mr_attrs, source_project: project_b) }
+ let_it_be(:mrs_c, reload: true) { create_list(:merge_request, 2, *mr_traits, **mr_attrs, source_project: project_c) }
+ let_it_be(:other_mr) { create(:merge_request, source_project: project_x) }
+
+ let(:mrs_data) { graphql_data_at(:group, :merge_requests, :nodes) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ def expected_mrs(mrs)
+ mrs.map { |mr| a_hash_including('id' => global_id_of(mr)) }
+ end
+
+ describe 'not passing any arguments' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!) {
+ group(fullPath: $path) {
+ mergeRequests { nodes { id } }
+ }
+ }
+ GQL
+ end
+
+ it 'can find all merge requests in the group, excluding sub-groups' do
+ post_graphql(query, current_user: user, variables: { path: group.full_path })
+
+ expect(mrs_data).to match_array(expected_mrs(mrs_a + mrs_b))
+ end
+ end
+
+ describe 'restricting by author' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $user: String) {
+ group(fullPath: $path) {
+ mergeRequests(authorUsername: $user) { nodes { id author { username } } }
+ }
+ }
+ GQL
+ end
+
+ let(:author) { mrs_b.first.author }
+
+ it 'can find all merge requests with user as author' do
+ post_graphql(query, current_user: user, variables: { user: author.username, path: group.full_path })
+
+ expect(mrs_data).to match_array(expected_mrs([mrs_b.first]))
+ end
+ end
+
+ describe 'restricting by assignee' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $user: String) {
+ group(fullPath: $path) {
+ mergeRequests(assigneeUsername: $user) { nodes { id } }
+ }
+ }
+ GQL
+ end
+
+ let_it_be(:assignee) { create(:user) }
+
+ before_all do
+ mrs_b.second.assignees << assignee
+ mrs_a.first.assignees << assignee
+ end
+
+ it 'can find all merge requests assigned to user' do
+ post_graphql(query, current_user: user, variables: { user: assignee.username, path: group.full_path })
+
+ expect(mrs_data).to match_array(expected_mrs([mrs_a.first, mrs_b.second]))
+ end
+ end
+
+ describe 'passing include_subgroups: true' do
+ let(:query) do
+ <<~GQL
+ query($path: ID!) {
+ group(fullPath: $path) {
+ mergeRequests(includeSubgroups: true) { nodes { id } }
+ }
+ }
+ GQL
+ end
+
+ it 'can find all merge requests in the group, including sub-groups' do
+ post_graphql(query, current_user: user, variables: { path: group.full_path })
+
+ expect(mrs_data).to match_array(expected_mrs(mrs_a + mrs_b + mrs_c))
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/instance_statistics_measurements_spec.rb b/spec/requests/api/graphql/instance_statistics_measurements_spec.rb
index b8cbe54534a..5d7dbcf2e3c 100644
--- a/spec/requests/api/graphql/instance_statistics_measurements_spec.rb
+++ b/spec/requests/api/graphql/instance_statistics_measurements_spec.rb
@@ -9,13 +9,16 @@ RSpec.describe 'InstanceStatisticsMeasurements' do
let!(:instance_statistics_measurement_1) { create(:instance_statistics_measurement, :project_count, recorded_at: 20.days.ago, count: 5) }
let!(:instance_statistics_measurement_2) { create(:instance_statistics_measurement, :project_count, recorded_at: 10.days.ago, count: 10) }
- let(:query) { graphql_query_for(:instanceStatisticsMeasurements, 'identifier: PROJECTS', 'nodes { count }') }
+ let(:query) { graphql_query_for(:instanceStatisticsMeasurements, 'identifier: PROJECTS', 'nodes { count identifier }') }
before do
post_graphql(query, current_user: current_user)
end
it 'returns measurement objects' do
- expect(graphql_data.dig('instanceStatisticsMeasurements', 'nodes')).to eq([{ "count" => 10 }, { "count" => 5 }])
+ expect(graphql_data.dig('instanceStatisticsMeasurements', 'nodes')).to eq([
+ { "count" => 10, 'identifier' => 'PROJECTS' },
+ { "count" => 5, 'identifier' => 'PROJECTS' }
+ ])
end
end
diff --git a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
index 1d38bb39d59..3aaebb5095a 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
@@ -45,8 +45,9 @@ RSpec.describe 'Adding an AwardEmoji' do
it_behaves_like 'a mutation that does not create an AwardEmoji'
- it_behaves_like 'a mutation that returns top-level errors',
- errors: ['Cannot award emoji to this resource']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/was provided invalid value for awardableId/) }
+ end
end
context 'when the given awardable is an Awardable but still cannot be awarded an emoji' do
diff --git a/spec/requests/api/graphql/mutations/award_emojis/remove_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/remove_spec.rb
index c6e8800de1f..7cd39f93ae7 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/remove_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/remove_spec.rb
@@ -50,8 +50,9 @@ RSpec.describe 'Removing an AwardEmoji' do
it_behaves_like 'a mutation that does not destroy an AwardEmoji'
- it_behaves_like 'a mutation that returns top-level errors',
- errors: ['Cannot award emoji to this resource']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/was provided invalid value for awardableId/) }
+ end
end
context 'when the given awardable is an Awardable' do
diff --git a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
index 2df59ce97ca..6910ad80a11 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
@@ -44,8 +44,9 @@ RSpec.describe 'Toggling an AwardEmoji' do
it_behaves_like 'a mutation that does not create or destroy an AwardEmoji'
- it_behaves_like 'a mutation that returns top-level errors',
- errors: ['Cannot award emoji to this resource']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/was provided invalid value for awardableId/) }
+ end
end
context 'when the given awardable is an Awardable but still cannot be awarded an emoji' do
diff --git a/spec/requests/api/graphql/mutations/boards/create_spec.rb b/spec/requests/api/graphql/mutations/boards/create_spec.rb
new file mode 100644
index 00000000000..c5f981262ea
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/create_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Boards::Create do
+ let_it_be(:parent) { create(:project) }
+ let(:project_path) { parent.full_path }
+ let(:params) do
+ {
+ project_path: project_path,
+ name: name
+ }
+ end
+
+ it_behaves_like 'boards create mutation'
+end
diff --git a/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb b/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
new file mode 100644
index 00000000000..42f690f53ed
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Boards::Lists::Destroy do
+ include GraphqlHelpers
+
+ let_it_be(:current_user, reload: true) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board) }
+ let(:mutation) do
+ variables = {
+ list_id: GitlabSchema.id_from_object(list).to_s
+ }
+
+ graphql_mutation(:destroy_board_list, variables)
+ end
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ def mutation_response
+ graphql_mutation_response(:destroy_board_list)
+ end
+
+ context 'when the user does not have permission' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not destroy the list' do
+ expect { subject }.not_to change { List.count }
+ end
+ end
+
+ context 'when the user has permission' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ context 'when given id is not for a list' do
+ let_it_be(:list) { build_stubbed(:issue, project: project) }
+
+ it 'returns an error' do
+ subject
+
+ expect(graphql_errors.first['message']).to include('does not represent an instance of List')
+ end
+ end
+
+ context 'when everything is ok' do
+ it 'destroys the list' do
+ expect { subject }.to change { List.count }.from(2).to(1)
+ end
+
+ it 'returns an empty list' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('list')
+ expect(mutation_response['list']).to be_nil
+ end
+ end
+
+ context 'when the list is not destroyable' do
+ let_it_be(:list) { create(:list, board: board, list_type: :backlog) }
+
+ it 'does not destroy the list' do
+ expect { subject }.not_to change { List.count }.from(3)
+ end
+
+ it 'returns an error and not nil list' do
+ subject
+
+ expect(mutation_response['errors']).not_to be_empty
+ expect(mutation_response['list']).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/create_spec.rb b/spec/requests/api/graphql/mutations/issues/create_spec.rb
new file mode 100644
index 00000000000..39b408faa90
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/create_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create an issue' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:assignee1) { create(:user) }
+ let_it_be(:assignee2) { create(:user) }
+ let_it_be(:project_label1) { create(:label, project: project) }
+ let_it_be(:project_label2) { create(:label, project: project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:new_label1) { FFaker::Lorem.word }
+ let_it_be(:new_label2) { FFaker::Lorem.word }
+
+ let(:input) do
+ {
+ 'title' => 'new title',
+ 'description' => 'new description',
+ 'confidential' => true,
+ 'dueDate' => Date.tomorrow.strftime('%Y-%m-%d')
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:createIssue, input.merge('projectPath' => project.full_path, 'locked' => true)) }
+
+ let(:mutation_response) { graphql_mutation_response(:create_issue) }
+
+ context 'the user is not allowed to create an issue' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to create an issue' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'updates the issue' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['issue']).to include(input)
+ expect(mutation_response['issue']).to include('discussionLocked' => true)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/move_spec.rb b/spec/requests/api/graphql/mutations/issues/move_spec.rb
new file mode 100644
index 00000000000..5bbaff61edd
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/move_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Moving an issue' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:target_project) { create(:project) }
+
+ let(:mutation) do
+ variables = {
+ project_path: issue.project.full_path,
+ target_project_path: target_project.full_path,
+ iid: issue.iid.to_s
+ }
+
+ graphql_mutation(:issue_move, variables,
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ issue {
+ title
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:issue_move)
+ end
+
+ context 'when the user is not allowed to read source project' do
+ it 'returns an error' do
+ error = "The resource that you are attempting to access does not exist or you don't have permission to perform this action"
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_errors).to include(a_hash_including('message' => error))
+ end
+ end
+
+ context 'when the user is not allowed to move issue to target project' do
+ before do
+ issue.project.add_developer(user)
+ end
+
+ it 'returns an error' do
+ error = "Cannot move issue due to insufficient permissions!"
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors'][0]).to eq(error)
+ end
+ end
+
+ context 'when the user is allowed to move issue' do
+ before do
+ issue.project.add_developer(user)
+ target_project.add_developer(user)
+ end
+
+ it 'moves the issue' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response.dig('issue', 'title')).to eq(issue.title)
+ expect(issue.reload.state).to eq('closed')
+ expect(target_project.issues.find_by_title(issue.title)).to be_present
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
index af52f9d57a3..71f25dbbe49 100644
--- a/spec/requests/api/graphql/mutations/issues/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -10,13 +10,15 @@ RSpec.describe 'Update of an existing issue' do
let_it_be(:issue) { create(:issue, project: project) }
let(:input) do
{
- project_path: project.full_path,
- iid: issue.iid.to_s,
- locked: true
+ 'iid' => issue.iid.to_s,
+ 'title' => 'new title',
+ 'description' => 'new description',
+ 'confidential' => true,
+ 'dueDate' => Date.tomorrow.strftime('%Y-%m-%d')
}
end
- let(:mutation) { graphql_mutation(:update_issue, input) }
+ let(:mutation) { graphql_mutation(:update_issue, input.merge(project_path: project.full_path, locked: true)) }
let(:mutation_response) { graphql_mutation_response(:update_issue) }
context 'the user is not allowed to update issue' do
@@ -32,9 +34,8 @@ RSpec.describe 'Update of an existing issue' do
post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['issue']).to include(
- 'discussionLocked' => true
- )
+ expect(mutation_response['issue']).to include(input)
+ expect(mutation_response['issue']).to include('discussionLocked' => true)
end
end
end
diff --git a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
index 10ca2cf1cf8..81d13b29dde 100644
--- a/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/metrics/dashboard/annotations/create_spec.rb
@@ -101,7 +101,9 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
graphql_mutation(:create_annotation, variables)
end
- it_behaves_like 'a mutation that returns top-level errors', errors: ['invalid_id is not a valid GitLab ID.']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/is not a valid Global ID/) }
+ end
end
end
end
@@ -109,7 +111,7 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
context 'when annotation source is cluster' do
let(:mutation) do
variables = {
- cluster_id: GitlabSchema.id_from_object(cluster).to_s,
+ cluster_id: cluster.to_global_id.to_s,
starting_at: starting_at,
ending_at: ending_at,
dashboard_path: dashboard_path,
@@ -188,15 +190,17 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
graphql_mutation(:create_annotation, variables)
end
- it_behaves_like 'a mutation that returns top-level errors', errors: ['invalid_id is not a valid GitLab ID.']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/is not a valid Global ID/) }
+ end
end
end
context 'when both environment_id and cluster_id are provided' do
let(:mutation) do
variables = {
- environment_id: GitlabSchema.id_from_object(environment).to_s,
- cluster_id: GitlabSchema.id_from_object(cluster).to_s,
+ environment_id: environment.to_global_id.to_s,
+ cluster_id: cluster.to_global_id.to_s,
starting_at: starting_at,
ending_at: ending_at,
dashboard_path: dashboard_path,
@@ -210,14 +214,14 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
end
context 'when a non-cluster or environment id is provided' do
+ let(:gid) { { environment_id: project.to_global_id.to_s } }
let(:mutation) do
variables = {
- environment_id: GitlabSchema.id_from_object(project).to_s,
starting_at: starting_at,
ending_at: ending_at,
dashboard_path: dashboard_path,
description: description
- }
+ }.merge!(gid)
graphql_mutation(:create_annotation, variables)
end
@@ -226,6 +230,18 @@ RSpec.describe Mutations::Metrics::Dashboard::Annotations::Create do
project.add_developer(current_user)
end
- it_behaves_like 'a mutation that returns top-level errors', errors: [described_class::INVALID_ANNOTATION_SOURCE_ERROR]
+ describe 'non-environment id' do
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/does not represent an instance of Environment/) }
+ end
+ end
+
+ describe 'non-cluster id' do
+ let(:gid) { { cluster_id: project.to_global_id.to_s } }
+
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/does not represent an instance of Clusters::Cluster/) }
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
index 391ced7dc98..6d761eb0a54 100644
--- a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
@@ -60,6 +60,14 @@ RSpec.describe 'Adding a Note' do
expect(mutation_response['note']['discussion']['id']).to eq(discussion.to_global_id.to_s)
end
+
+ context 'when the discussion_id is not for a Discussion' do
+ let(:discussion) { create(:issue) }
+
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/ does not represent an instance of Discussion/) }
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
index 0c00906d6bf..efa2ceb65c2 100644
--- a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
@@ -178,6 +178,12 @@ RSpec.describe 'Updating an image DiffNote' do
it_behaves_like 'a mutation that returns top-level errors', errors: ['body or position arguments are required']
end
+ context 'when the resource is not a Note' do
+ let(:diff_note) { note }
+
+ it_behaves_like 'a Note mutation when the given resource id is not for a Note'
+ end
+
context 'when resource is not a DiffNote on an image' do
let!(:diff_note) { create(:diff_note_on_merge_request, note: original_body) }
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 1bb446de708..d2fa3cfc24f 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -76,21 +76,25 @@ RSpec.describe 'Creating a Snippet' do
expect(mutation_response['snippet']).to be_nil
end
+
+ it_behaves_like 'spam flag is present'
end
shared_examples 'creates snippet' do
- it 'returns the created Snippet' do
+ it 'returns the created Snippet', :aggregate_failures do
expect do
subject
end.to change { Snippet.count }.by(1)
+ snippet = Snippet.last
+ created_file_1 = snippet.repository.blob_at('HEAD', file_1[:filePath])
+ created_file_2 = snippet.repository.blob_at('HEAD', file_2[:filePath])
+
+ expect(created_file_1.data).to match(file_1[:content])
+ expect(created_file_2.data).to match(file_2[:content])
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
expect(mutation_response['snippet']['visibilityLevel']).to eq(visibility_level)
- expect(mutation_response['snippet']['blobs'][0]['plainData']).to match(file_1[:content])
- expect(mutation_response['snippet']['blobs'][0]['fileName']).to match(file_1[:file_path])
- expect(mutation_response['snippet']['blobs'][1]['plainData']).to match(file_2[:content])
- expect(mutation_response['snippet']['blobs'][1]['fileName']).to match(file_2[:file_path])
end
context 'when action is invalid' do
@@ -101,6 +105,10 @@ RSpec.describe 'Creating a Snippet' do
end
it_behaves_like 'snippet edit usage data counters'
+ it_behaves_like 'spam flag is present'
+ it_behaves_like 'can raise spam flag' do
+ let(:service) { Snippets::CreateService }
+ end
end
context 'with PersonalSnippet' do
@@ -140,6 +148,9 @@ RSpec.describe 'Creating a Snippet' do
it_behaves_like 'a mutation that returns errors in the response', errors: ["Title can't be blank"]
it_behaves_like 'does not create snippet'
+ it_behaves_like 'can raise spam flag' do
+ let(:service) { Snippets::CreateService }
+ end
end
context 'when there non ActiveRecord errors' do
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 58ce74b9263..21d403c6f73 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -37,6 +37,8 @@ RSpec.describe 'Updating a Snippet' do
graphql_mutation_response(:update_snippet)
end
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
shared_examples 'graphql update actions' do
context 'when the user does not have permission' do
let(:current_user) { create(:user) }
@@ -46,14 +48,14 @@ RSpec.describe 'Updating a Snippet' do
it 'does not update the Snippet' do
expect do
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
end.not_to change { snippet.reload }
end
end
context 'when the user has permission' do
it 'updates the snippet record' do
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
expect(snippet.reload.title).to eq(updated_title)
end
@@ -65,7 +67,7 @@ RSpec.describe 'Updating a Snippet' do
expect(blob_to_update.data).not_to eq updated_content
expect(blob_to_delete).to be_present
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
blob_to_update = blob_at(updated_file)
blob_to_delete = blob_at(deleted_file)
@@ -73,20 +75,25 @@ RSpec.describe 'Updating a Snippet' do
aggregate_failures do
expect(blob_to_update.data).to eq updated_content
expect(blob_to_delete).to be_nil
- expect(blob_in_mutation_response(updated_file)['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
expect(mutation_response['snippet']['visibilityLevel']).to eq('public')
end
end
+ it_behaves_like 'can raise spam flag' do
+ let(:service) { Snippets::UpdateService }
+ end
+
+ it_behaves_like 'spam flag is present'
+
context 'when there are ActiveRecord validation errors' do
let(:updated_title) { '' }
it_behaves_like 'a mutation that returns errors in the response', errors: ["Title can't be blank"]
it 'does not update the Snippet' do
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
expect(snippet.reload.title).to eq(original_title)
end
@@ -95,21 +102,21 @@ RSpec.describe 'Updating a Snippet' do
blob_to_update = blob_at(updated_file)
blob_to_delete = blob_at(deleted_file)
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
aggregate_failures do
expect(blob_at(updated_file).data).to eq blob_to_update.data
expect(blob_at(deleted_file).data).to eq blob_to_delete.data
- expect(blob_in_mutation_response(deleted_file)['plainData']).not_to be_nil
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
expect(mutation_response['snippet']['visibilityLevel']).to eq('private')
end
end
- end
- def blob_in_mutation_response(filename)
- mutation_response['snippet']['blobs'].select { |blob| blob['name'] == filename }[0]
+ it_behaves_like 'spam flag is present'
+ it_behaves_like 'can raise spam flag' do
+ let(:service) { Snippets::UpdateService }
+ end
end
def blob_at(filename)
@@ -150,7 +157,7 @@ RSpec.describe 'Updating a Snippet' do
context 'when the author is not a member of the project' do
it 'returns an an error' do
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
errors = json_response['errors']
expect(errors.first['message']).to eq(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
@@ -168,7 +175,7 @@ RSpec.describe 'Updating a Snippet' do
it 'returns an an error' do
project.project_feature.update_attribute(:snippets_access_level, ProjectFeature::DISABLED)
- post_graphql_mutation(mutation, current_user: current_user)
+ subject
errors = json_response['errors']
expect(errors.first['message']).to eq(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
diff --git a/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb b/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb
index 8bf8b96aff5..8a9a0b9e845 100644
--- a/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb
+++ b/spec/requests/api/graphql/mutations/todos/mark_done_spec.rb
@@ -76,15 +76,15 @@ RSpec.describe 'Marking todos done' do
end
context 'when using an invalid gid' do
- let(:input) { { id: 'invalid_gid' } }
- let(:invalid_gid_error) { 'invalid_gid is not a valid GitLab ID.' }
+ let(:input) { { id: GitlabSchema.id_from_object(author).to_s } }
+ let(:invalid_gid_error) { /"#{input[:id]}" does not represent an instance of #{todo1.class}/ }
it 'contains the expected error' do
post_graphql_mutation(mutation, current_user: current_user)
errors = json_response['errors']
expect(errors).not_to be_blank
- expect(errors.first['message']).to eq(invalid_gid_error)
+ expect(errors.first['message']).to match(invalid_gid_error)
expect(todo1.reload.state).to eq('pending')
expect(todo2.reload.state).to eq('done')
diff --git a/spec/requests/api/graphql/mutations/todos/restore_spec.rb b/spec/requests/api/graphql/mutations/todos/restore_spec.rb
index 8451dcdf587..a58c7fc69fc 100644
--- a/spec/requests/api/graphql/mutations/todos/restore_spec.rb
+++ b/spec/requests/api/graphql/mutations/todos/restore_spec.rb
@@ -76,15 +76,15 @@ RSpec.describe 'Restoring Todos' do
end
context 'when using an invalid gid' do
- let(:input) { { id: 'invalid_gid' } }
- let(:invalid_gid_error) { 'invalid_gid is not a valid GitLab ID.' }
+ let(:input) { { id: GitlabSchema.id_from_object(author).to_s } }
+ let(:invalid_gid_error) { /"#{input[:id]}" does not represent an instance of #{todo1.class}/ }
it 'contains the expected error' do
post_graphql_mutation(mutation, current_user: current_user)
errors = json_response['errors']
expect(errors).not_to be_blank
- expect(errors.first['message']).to eq(invalid_gid_error)
+ expect(errors.first['message']).to match(invalid_gid_error)
expect(todo1.reload.state).to eq('done')
expect(todo2.reload.state).to eq('pending')
diff --git a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
index d3a2e6a1deb..8deed75a466 100644
--- a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
@@ -139,6 +139,19 @@ RSpec.describe 'getting Alert Management Alerts' do
it { expect(alerts.size).to eq(0) }
end
end
+
+ context 'assignee_username' do
+ let(:alert) { triggered_alert }
+ let(:assignee) { alert.assignees.first! }
+ let(:params) { { assignee_username: assignee.username } }
+
+ it_behaves_like 'a working graphql query'
+
+ specify do
+ expect(alerts.size).to eq(1)
+ expect(first_alert['iid']).to eq(alert.iid.to_s)
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/project/issue/designs/notes_spec.rb b/spec/requests/api/graphql/project/issue/designs/notes_spec.rb
index 65191e057c7..e25453510d5 100644
--- a/spec/requests/api/graphql/project/issue/designs/notes_spec.rb
+++ b/spec/requests/api/graphql/project/issue/designs/notes_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe 'Getting designs related to an issue' do
post_graphql(query(note_fields), current_user: nil)
designs_data = graphql_data['project']['issue']['designs']['designs']
- design_data = designs_data['edges'].first['node']
- note_data = design_data['notes']['edges'].first['node']
+ design_data = designs_data['nodes'].first
+ note_data = design_data['notes']['nodes'].first
expect(note_data['id']).to eq(note.to_global_id.to_s)
end
@@ -40,14 +40,10 @@ RSpec.describe 'Getting designs related to an issue' do
def query(note_fields = all_graphql_fields_for(Note))
design_node = <<~NODE
designs {
- edges {
- node {
- notes {
- edges {
- node {
- #{note_fields}
- }
- }
+ nodes {
+ notes {
+ nodes {
+ #{note_fields}
}
}
}
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index 5d4276f47ca..40fec6ba068 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -53,16 +53,37 @@ RSpec.describe 'getting an issue list for a project' do
context 'when limiting the number of results' do
let(:query) do
- graphql_query_for(
- 'project',
- { 'fullPath' => project.full_path },
- "issues(first: 1) { #{fields} }"
- )
+ <<~GQL
+ query($path: ID!, $n: Int) {
+ project(fullPath: $path) {
+ issues(first: $n) { #{fields} }
+ }
+ }
+ GQL
+ end
+
+ let(:issue_limit) { 1 }
+ let(:variables) do
+ { path: project.full_path, n: issue_limit }
end
it_behaves_like 'a working graphql query' do
before do
- post_graphql(query, current_user: current_user)
+ post_graphql(query, current_user: current_user, variables: variables)
+ end
+
+ it 'only returns N issues' do
+ expect(issues_data.size).to eq(issue_limit)
+ end
+ end
+
+ context 'no limit is provided' do
+ let(:issue_limit) { nil }
+
+ it 'returns all issues' do
+ post_graphql(query, current_user: current_user, variables: variables)
+
+ expect(issues_data.size).to be > 1
end
end
@@ -71,7 +92,7 @@ RSpec.describe 'getting an issue list for a project' do
# Newest first, we only want to see the newest checked
expect(Ability).not_to receive(:allowed?).with(current_user, :read_issue, issues.first)
- post_graphql(query, current_user: current_user)
+ post_graphql(query, current_user: current_user, variables: variables)
end
end
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index 22b003501a1..c737e0b8caf 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
let_it_be(:merge_request_b) { create(:merge_request, :closed, :unique_branches, source_project: project) }
let_it_be(:merge_request_c) { create(:labeled_merge_request, :closed, :unique_branches, source_project: project, labels: [label]) }
let_it_be(:merge_request_d) { create(:merge_request, :locked, :unique_branches, source_project: project) }
+ let_it_be(:merge_request_e) { create(:merge_request, :unique_branches, source_project: project) }
let(:results) { graphql_data.dig('project', 'mergeRequests', 'nodes') }
@@ -118,7 +119,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
context 'there are no search params' do
let(:search_params) { nil }
- let(:mrs) { [merge_request_a, merge_request_b, merge_request_c, merge_request_d] }
+ let(:mrs) { [merge_request_a, merge_request_b, merge_request_c, merge_request_d, merge_request_e] }
it_behaves_like 'searching with parameters'
end
@@ -172,6 +173,28 @@ RSpec.describe 'getting merge request listings nested in a project' do
it_behaves_like 'searching with parameters'
end
+ context 'when requesting `approved_by`' do
+ let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_b.iid.to_s] } }
+ let(:extra_iid_for_second_query) { merge_request_c.iid.to_s }
+ let(:requested_fields) { query_graphql_field(:approved_by, nil, query_graphql_field(:nodes, nil, [:username])) }
+
+ def execute_query
+ query = query_merge_requests(requested_fields)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'exposes approver username' do
+ merge_request_a.approved_by_users << current_user
+
+ execute_query
+
+ user_data = { 'username' => current_user.username }
+ expect(results).to include(a_hash_including('approvedBy' => { 'nodes' => array_including(user_data) }))
+ end
+
+ include_examples 'N+1 query check'
+ end
+
describe 'fields' do
let(:requested_fields) { nil }
let(:extra_iid_for_second_query) { merge_request_c.iid.to_s }
@@ -209,7 +232,19 @@ RSpec.describe 'getting merge request listings nested in a project' do
include_examples 'N+1 query check'
end
+
+ context 'when requesting `user_notes_count`' do
+ let(:requested_fields) { [:user_notes_count] }
+
+ before do
+ create_list(:note_on_merge_request, 2, noteable: merge_request_a, project: project)
+ create(:note_on_merge_request, noteable: merge_request_c, project: project)
+ end
+
+ include_examples 'N+1 query check'
+ end
end
+
describe 'sorting and pagination' do
let(:data_path) { [:project, :mergeRequests] }
@@ -241,16 +276,50 @@ RSpec.describe 'getting merge request listings nested in a project' do
let(:expected_results) do
[
merge_request_b,
- merge_request_c,
merge_request_d,
+ merge_request_c,
+ merge_request_e,
merge_request_a
].map(&:to_gid).map(&:to_s)
end
before do
- merge_request_c.metrics.update!(merged_at: 5.days.ago)
+ five_days_ago = 5.days.ago
+
+ merge_request_d.metrics.update!(merged_at: five_days_ago)
+
+ # same merged_at, the second order column will decide (merge_request.id)
+ merge_request_c.metrics.update!(merged_at: five_days_ago)
+
merge_request_b.metrics.update!(merged_at: 1.day.ago)
end
+
+ context 'when paginating backwards' do
+ let(:params) { 'first: 2, sort: MERGED_AT_DESC' }
+ let(:page_info) { 'pageInfo { startCursor endCursor }' }
+
+ before do
+ post_graphql(pagination_query(params, page_info), current_user: current_user)
+ end
+
+ it 'paginates backwards correctly' do
+ # first page
+ first_page_response_data = graphql_dig_at(Gitlab::Json.parse(response.body), :data, *data_path, :edges)
+ end_cursor = graphql_dig_at(Gitlab::Json.parse(response.body), :data, :project, :mergeRequests, :pageInfo, :endCursor)
+
+ # second page
+ params = "first: 2, after: \"#{end_cursor}\", sort: MERGED_AT_DESC"
+ post_graphql(pagination_query(params, page_info), current_user: current_user)
+ start_cursor = graphql_dig_at(Gitlab::Json.parse(response.body), :data, :project, :mergeRequests, :pageInfo, :start_cursor)
+
+ # going back to the first page
+
+ params = "last: 2, before: \"#{start_cursor}\", sort: MERGED_AT_DESC"
+ post_graphql(pagination_query(params, page_info), current_user: current_user)
+ backward_paginated_response_data = graphql_dig_at(Gitlab::Json.parse(response.body), :data, *data_path, :edges)
+ expect(first_page_response_data).to eq(backward_paginated_response_data)
+ end
+ end
end
end
end
diff --git a/spec/requests/api/graphql/project/milestones_spec.rb b/spec/requests/api/graphql/project/milestones_spec.rb
new file mode 100644
index 00000000000..2fede4c7285
--- /dev/null
+++ b/spec/requests/api/graphql/project/milestones_spec.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting milestone listings nested in a project' do
+ include GraphqlHelpers
+
+ let_it_be(:today) { Time.now.utc.to_date }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:current_user) { create(:user) }
+
+ let_it_be(:no_dates) { create(:milestone, project: project, title: 'no dates') }
+ let_it_be(:no_end) { create(:milestone, project: project, title: 'no end', start_date: today - 10.days) }
+ let_it_be(:no_start) { create(:milestone, project: project, title: 'no start', due_date: today - 5.days) }
+ let_it_be(:fully_past) { create(:milestone, project: project, title: 'past', start_date: today - 10.days, due_date: today - 5.days) }
+ let_it_be(:covers_today) { create(:milestone, project: project, title: 'present', start_date: today - 5.days, due_date: today + 5.days) }
+ let_it_be(:fully_future) { create(:milestone, project: project, title: 'future', start_date: today + 5.days, due_date: today + 10.days) }
+ let_it_be(:closed) { create(:milestone, :closed, project: project) }
+
+ let(:results) { graphql_data_at(:project, :milestones, :nodes) }
+
+ let(:search_params) { nil }
+
+ def query_milestones(fields)
+ graphql_query_for(
+ :project,
+ { full_path: project.full_path },
+ query_graphql_field(:milestones, search_params, [
+ query_graphql_field(:nodes, nil, %i[id title])
+ ])
+ )
+ end
+
+ def result_list(expected)
+ expected.map do |milestone|
+ a_hash_including('id' => global_id_of(milestone))
+ end
+ end
+
+ let(:query) do
+ query_milestones(all_graphql_fields_for('Milestone', max_depth: 1))
+ end
+
+ let(:all_milestones) do
+ [no_dates, no_end, no_start, fully_past, fully_future, covers_today, closed]
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ shared_examples 'searching with parameters' do
+ it 'finds the right milestones' do
+ post_graphql(query, current_user: current_user)
+
+ expect(results).to match_array(result_list(expected))
+ end
+ end
+
+ context 'there are no search params' do
+ let(:search_params) { nil }
+ let(:expected) { all_milestones }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'the search params do not match anything' do
+ let(:search_params) { { title: 'wibble' } }
+ let(:expected) { [] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by state:closed' do
+ let(:search_params) { { state: :closed } }
+ let(:expected) { [closed] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by state:active' do
+ let(:search_params) { { state: :active } }
+ let(:expected) { all_milestones - [closed] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by title' do
+ let(:search_params) { { title: 'no start' } }
+ let(:expected) { [no_start] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by search_title' do
+ let(:search_params) { { search_title: 'no' } }
+ let(:expected) { [no_dates, no_start, no_end] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by containing_date' do
+ let(:search_params) { { containing_date: (today - 7.days).iso8601 } }
+ let(:expected) { [no_start, no_end, fully_past] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by containing_date = today' do
+ let(:search_params) { { containing_date: today.iso8601 } }
+ let(:expected) { [no_end, covers_today] }
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'searching by custom range' do
+ let(:expected) { [no_end, fully_future] }
+ let(:search_params) do
+ {
+ start_date: (today + 6.days).iso8601,
+ end_date: (today + 7.days).iso8601
+ }
+ end
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ context 'using timeframe argument' do
+ let(:expected) { [no_end, fully_future] }
+ let(:search_params) do
+ {
+ timeframe: {
+ start: (today + 6.days).iso8601,
+ end: (today + 7.days).iso8601
+ }
+ }
+ end
+
+ it_behaves_like 'searching with parameters'
+ end
+
+ describe 'timeframe validations' do
+ let(:vars) do
+ {
+ path: project.full_path,
+ start: (today + 6.days).iso8601,
+ end: (today + 7.days).iso8601
+ }
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ query = <<~GQL
+ query($path: ID!, $start: Date!, $end: Date!) {
+ project(fullPath: $path) {
+ milestones(timeframe: { start: $start, end: $end }) {
+ nodes { id }
+ }
+ }
+ }
+ GQL
+
+ post_graphql(query, current_user: current_user, variables: vars)
+ end
+ end
+
+ it 'is invalid to provide timeframe and start_date/end_date' do
+ query = <<~GQL
+ query($path: ID!, $tstart: Date!, $tend: Date!, $start: Time!, $end: Time!) {
+ project(fullPath: $path) {
+ milestones(timeframe: { start: $tstart, end: $tend }, startDate: $start, endDate: $end) {
+ nodes { id }
+ }
+ }
+ }
+ GQL
+
+ post_graphql(query, current_user: current_user,
+ variables: vars.merge(vars.transform_keys { |k| :"t#{k}" }))
+
+ expect(graphql_errors).to contain_exactly(a_hash_including('message' => include('deprecated in favor of timeframe')))
+ end
+
+ it 'is invalid to invert the timeframe arguments' do
+ query = <<~GQL
+ query($path: ID!, $start: Date!, $end: Date!) {
+ project(fullPath: $path) {
+ milestones(timeframe: { start: $end, end: $start }) {
+ nodes { id }
+ }
+ }
+ }
+ GQL
+
+ post_graphql(query, current_user: current_user, variables: vars)
+
+ expect(graphql_errors).to contain_exactly(a_hash_including('message' => include('start must be before end')))
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/user_query_spec.rb b/spec/requests/api/graphql/user_query_spec.rb
index 2f4dc0a9160..79debd0b7ef 100644
--- a/spec/requests/api/graphql/user_query_spec.rb
+++ b/spec/requests/api/graphql/user_query_spec.rb
@@ -29,15 +29,15 @@ RSpec.describe 'getting user information' do
let_it_be(:unauthorized_user) { create(:user) }
let_it_be(:assigned_mr) do
- create(:merge_request, :unique_branches,
+ create(:merge_request, :unique_branches, :unique_author,
source_project: project_a, assignees: [user])
end
let_it_be(:assigned_mr_b) do
- create(:merge_request, :unique_branches,
+ create(:merge_request, :unique_branches, :unique_author,
source_project: project_b, assignees: [user])
end
let_it_be(:assigned_mr_c) do
- create(:merge_request, :unique_branches,
+ create(:merge_request, :unique_branches, :unique_author,
source_project: project_b, assignees: [user])
end
let_it_be(:authored_mr) do
@@ -133,6 +133,17 @@ RSpec.describe 'getting user information' do
)
end
end
+
+ context 'filtering by author' do
+ let(:author) { assigned_mr_b.author }
+ let(:mr_args) { { author_username: author.username } }
+
+ it 'finds the authored mrs' do
+ expect(assigned_mrs).to contain_exactly(
+ a_hash_including('id' => global_id_of(assigned_mr_b))
+ )
+ end
+ end
end
context 'the current user does not have access' do
@@ -172,6 +183,23 @@ RSpec.describe 'getting user information' do
end
end
+ context 'filtering by assignee' do
+ let(:assignee) { create(:user) }
+ let(:mr_args) { { assignee_username: assignee.username } }
+
+ it 'finds the assigned mrs' do
+ authored_mr.assignees << assignee
+ authored_mr_c.assignees << assignee
+
+ post_graphql(query, current_user: current_user)
+
+ expect(authored_mrs).to contain_exactly(
+ a_hash_including('id' => global_id_of(authored_mr)),
+ a_hash_including('id' => global_id_of(authored_mr_c))
+ )
+ end
+ end
+
context 'filtering by project path and IID' do
let(:mr_args) do
{ project_path: project_b.full_path, iids: [authored_mr_b.iid.to_s] }
@@ -253,8 +281,10 @@ RSpec.describe 'getting user information' do
let(:current_user) { user }
it 'can be found' do
- expect(assigned_mrs).to include(
- a_hash_including('id' => global_id_of(assigned_mr))
+ expect(assigned_mrs).to contain_exactly(
+ a_hash_including('id' => global_id_of(assigned_mr)),
+ a_hash_including('id' => global_id_of(assigned_mr_b)),
+ a_hash_including('id' => global_id_of(assigned_mr_c))
)
end
end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index ff1a5aa1540..94a66f54e4d 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -9,7 +9,15 @@ RSpec.describe 'GraphQL' do
context 'logging' do
shared_examples 'logging a graphql query' do
let(:expected_params) do
- { query_string: query, variables: variables.to_s, duration_s: anything, depth: 1, complexity: 1 }
+ {
+ query_string: query,
+ variables: variables.to_s,
+ duration_s: anything,
+ depth: 1,
+ complexity: 1,
+ used_fields: ['Query.echo'],
+ used_deprecated_fields: []
+ }
end
it 'logs a query with the expected params' do
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index 068af1485e2..eb21ae9468c 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -172,6 +172,7 @@ RSpec.describe API::GroupClusters do
name: 'test-cluster',
domain: 'domain.example.com',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: platform_kubernetes_attributes,
management_project_id: management_project_id
}
@@ -206,6 +207,7 @@ RSpec.describe API::GroupClusters do
expect(cluster_result.domain).to eq('domain.example.com')
expect(cluster_result.managed).to be_falsy
expect(cluster_result.management_project_id).to eq management_project_id
+ expect(cluster_result.namespace_per_environment).to eq(false)
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.token).to eq('sample-token')
@@ -237,6 +239,22 @@ RSpec.describe API::GroupClusters do
end
end
+ context 'when namespace_per_environment is not set' do
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'defaults to true' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result).to be_namespace_per_environment
+ end
+ end
+
context 'current user does not have access to management_project_id' do
let(:management_project_id) { create(:project).id }
diff --git a/spec/requests/api/group_container_repositories_spec.rb b/spec/requests/api/group_container_repositories_spec.rb
index 3128becae6d..4584ef37bd0 100644
--- a/spec/requests/api/group_container_repositories_spec.rb
+++ b/spec/requests/api/group_container_repositories_spec.rb
@@ -25,7 +25,6 @@ RSpec.describe API::GroupContainerRepositories do
group.add_reporter(reporter)
group.add_guest(guest)
- stub_feature_flags(container_registry_api: true)
stub_container_registry_config(enabled: true)
root_repository
@@ -44,7 +43,7 @@ RSpec.describe API::GroupContainerRepositories do
let(:object) { group }
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
context 'with invalid group id' do
let(:url) { "/groups/#{non_existing_record_id}/registry/repositories" }
diff --git a/spec/requests/api/group_packages_spec.rb b/spec/requests/api/group_packages_spec.rb
index f67cafbd8f5..72ba25c59af 100644
--- a/spec/requests/api/group_packages_spec.rb
+++ b/spec/requests/api/group_packages_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'returns packages', :group, :reporter
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'with subgroup' do
@@ -88,7 +88,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returns packages with subgroups', :group, :owner
it_behaves_like 'returns packages with subgroups', :group, :maintainer
it_behaves_like 'returns packages with subgroups', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'returns packages with subgroups', :group, :reporter
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'excluding subgroup' do
@@ -97,7 +97,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'returns packages', :group, :reporter
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
end
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index da423e986c3..c7756a4fae5 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -1391,6 +1391,139 @@ RSpec.describe API::Groups do
end
end
+ describe 'GET /groups/:id/descendant_groups' do
+ let_it_be(:child_group1) { create(:group, parent: group1) }
+ let_it_be(:private_child_group1) { create(:group, :private, parent: group1) }
+ let_it_be(:sub_child_group1) { create(:group, parent: child_group1) }
+ let_it_be(:child_group2) { create(:group, :private, parent: group2) }
+ let_it_be(:sub_child_group2) { create(:group, :private, parent: child_group2) }
+ let(:response_groups) { json_response.map { |group| group['name'] } }
+
+ context 'when unauthenticated' do
+ it 'returns only public descendants' do
+ get api("/groups/#{group1.id}/descendant_groups")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ expect(response_groups).to contain_exactly(child_group1.name, sub_child_group1.name)
+ end
+
+ it 'returns 404 for a private group' do
+ get api("/groups/#{group2.id}/descendant_groups")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when authenticated as user' do
+ context 'when user is not member of a public group' do
+ it 'returns no descendants for the public group' do
+ get api("/groups/#{group1.id}/descendant_groups", user2)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ context 'when using all_available in request' do
+ it 'returns public descendants' do
+ get api("/groups/#{group1.id}/descendant_groups", user2), params: { all_available: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ expect(response_groups).to contain_exactly(child_group1.name, sub_child_group1.name)
+ end
+ end
+ end
+
+ context 'when user is not member of a private group' do
+ it 'returns 404 for the private group' do
+ get api("/groups/#{group2.id}/descendant_groups", user1)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is member of public group' do
+ before do
+ group1.add_guest(user2)
+ end
+
+ it 'returns private descendants' do
+ get api("/groups/#{group1.id}/descendant_groups", user2)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(3)
+ expect(response_groups).to contain_exactly(child_group1.name, sub_child_group1.name, private_child_group1.name)
+ end
+
+ context 'when using statistics in request' do
+ it 'does not include statistics' do
+ get api("/groups/#{group1.id}/descendant_groups", user2), params: { statistics: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first).not_to include 'statistics'
+ end
+ end
+ end
+
+ context 'when user is member of private group' do
+ before do
+ group2.add_guest(user1)
+ end
+
+ it 'returns descendants' do
+ get api("/groups/#{group2.id}/descendant_groups", user1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ expect(response_groups).to contain_exactly(child_group2.name, sub_child_group2.name)
+ end
+ end
+ end
+
+ context 'when authenticated as admin' do
+ it 'returns private descendants of a public group' do
+ get api("/groups/#{group1.id}/descendant_groups", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(3)
+ end
+
+ it 'returns descendants of a private group' do
+ get api("/groups/#{group2.id}/descendant_groups", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ end
+
+ it 'does not include statistics by default' do
+ get api("/groups/#{group1.id}/descendant_groups", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first).not_to include('statistics')
+ end
+
+ it 'includes statistics if requested' do
+ get api("/groups/#{group1.id}/descendant_groups", admin), params: { statistics: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first).to include('statistics')
+ end
+ end
+ end
+
describe "POST /groups" do
it_behaves_like 'group avatar upload' do
def make_upload_request
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 9c0ea14e3e3..91d10791541 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe API::Helpers do
include described_class
include TermsHelper
- let(:user) { create(:user) }
+ let_it_be(:user, reload: true) { create(:user) }
let(:admin) { create(:admin) }
let(:key) { create(:key, user: user) }
@@ -243,6 +243,67 @@ RSpec.describe API::Helpers do
end
end
end
+
+ describe "when authenticating using a job token" do
+ let_it_be(:job, reload: true) do
+ create(:ci_build, user: user, status: :running)
+ end
+
+ let(:route_authentication_setting) { {} }
+
+ before do
+ allow_any_instance_of(self.class).to receive(:route_authentication_setting)
+ .and_return(route_authentication_setting)
+ end
+
+ context 'when route is allowed to be authenticated' do
+ let(:route_authentication_setting) { { job_token_allowed: true } }
+
+ it "returns a 401 response for an invalid token" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = 'invalid token'
+
+ expect { current_user }.to raise_error /401/
+ end
+
+ it "returns a 401 response for a job that's not running" do
+ job.update!(status: :success)
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+
+ expect { current_user }.to raise_error /401/
+ end
+
+ it "returns a 403 response for a user without access" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+ allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(false)
+
+ expect { current_user }.to raise_error /403/
+ end
+
+ it 'returns a 403 response for a user who is blocked' do
+ user.block!
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+
+ expect { current_user }.to raise_error /403/
+ end
+
+ it "sets current_user" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+
+ expect(current_user).to eq(user)
+ end
+ end
+
+ context 'when route is not allowed to be authenticated' do
+ let(:route_authentication_setting) { { job_token_allowed: false } }
+
+ it "sets current_user to nil" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+ allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(true)
+
+ expect(current_user).to be_nil
+ end
+ end
+ end
end
describe '.handle_api_exception' do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 4a0a7c81781..ab5f09305ce 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe API::Internal::Base do
+ include APIInternalBaseHelpers
+
let_it_be(:user, reload: true) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository, :wiki_repo) }
let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
@@ -48,43 +50,63 @@ RSpec.describe API::Internal::Base do
end
end
- describe 'GET /internal/two_factor_recovery_codes' do
- it 'returns an error message when the key does not exist' do
- post api('/internal/two_factor_recovery_codes'),
- params: {
- secret_token: secret_token,
- key_id: non_existing_record_id
- }
+ shared_examples 'actor key validations' do
+ context 'key id is not provided' do
+ let(:key_id) { nil }
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Could not find the given key')
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find a user without a key')
+ end
end
- it 'returns an error message when the key is a deploy key' do
- deploy_key = create(:deploy_key)
+ context 'key does not exist' do
+ let(:key_id) { non_existing_record_id }
- post api('/internal/two_factor_recovery_codes'),
- params: {
- secret_token: secret_token,
- key_id: deploy_key.id
- }
+ it 'returns an error message' do
+ subject
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Deploy keys cannot be used to retrieve recovery codes')
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find the given key')
+ end
+ end
+
+ context 'key without user' do
+ let(:key_id) { create(:key, user: nil).id }
+
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find a user for the given key')
+ end
end
+ end
- it 'returns an error message when the user does not exist' do
- key_without_user = create(:key, user: nil)
+ describe 'GET /internal/two_factor_recovery_codes' do
+ let(:key_id) { key.id }
+ subject do
post api('/internal/two_factor_recovery_codes'),
params: {
secret_token: secret_token,
- key_id: key_without_user.id
+ key_id: key_id
}
+ end
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Could not find a user for the given key')
- expect(json_response['recovery_codes']).to be_nil
+ it_behaves_like 'actor key validations'
+
+ context 'key is a deploy key' do
+ let(:key_id) { create(:deploy_key).id }
+
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Deploy keys cannot be used to retrieve recovery codes')
+ end
end
context 'when two-factor is enabled' do
@@ -93,11 +115,7 @@ RSpec.describe API::Internal::Base do
allow_any_instance_of(User)
.to receive(:generate_otp_backup_codes!).and_return(%w(119135e5a3ebce8e 34bd7b74adbc8861))
- post api('/internal/two_factor_recovery_codes'),
- params: {
- secret_token: secret_token,
- key_id: key.id
- }
+ subject
expect(json_response['success']).to be_truthy
expect(json_response['recovery_codes']).to match_array(%w(119135e5a3ebce8e 34bd7b74adbc8861))
@@ -108,11 +126,7 @@ RSpec.describe API::Internal::Base do
it 'returns an error message' do
allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(false)
- post api('/internal/two_factor_recovery_codes'),
- params: {
- secret_token: secret_token,
- key_id: key.id
- }
+ subject
expect(json_response['success']).to be_falsey
expect(json_response['recovery_codes']).to be_nil
@@ -121,42 +135,27 @@ RSpec.describe API::Internal::Base do
end
describe 'POST /internal/personal_access_token' do
- it 'returns an error message when the key does not exist' do
- post api('/internal/personal_access_token'),
- params: {
- secret_token: secret_token,
- key_id: non_existing_record_id
- }
-
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Could not find the given key')
- end
-
- it 'returns an error message when the key is a deploy key' do
- deploy_key = create(:deploy_key)
+ let(:key_id) { key.id }
+ subject do
post api('/internal/personal_access_token'),
params: {
secret_token: secret_token,
- key_id: deploy_key.id
+ key_id: key_id
}
-
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Deploy keys cannot be used to create personal access tokens')
end
- it 'returns an error message when the user does not exist' do
- key_without_user = create(:key, user: nil)
+ it_behaves_like 'actor key validations'
- post api('/internal/personal_access_token'),
- params: {
- secret_token: secret_token,
- key_id: key_without_user.id
- }
+ context 'key is a deploy key' do
+ let(:key_id) { create(:deploy_key).id }
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Could not find a user for the given key')
- expect(json_response['token']).to be_nil
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Deploy keys cannot be used to create personal access tokens')
+ end
end
it 'returns an error message when given an non existent user' do
@@ -459,7 +458,7 @@ RSpec.describe API::Internal::Base do
end
it_behaves_like 'sets hook env' do
- let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_container(project) }
+ let(:gl_repository) { Gitlab::GlRepository::WIKI.identifier_for_container(project.wiki) }
end
end
@@ -1207,86 +1206,157 @@ RSpec.describe API::Internal::Base do
end
end
- def gl_repository_for(container)
- case container
- when ProjectWiki
- Gitlab::GlRepository::WIKI.identifier_for_container(container.project)
- when Project
- Gitlab::GlRepository::PROJECT.identifier_for_container(container)
- when Snippet
- Gitlab::GlRepository::SNIPPET.identifier_for_container(container)
- else
- nil
+ describe 'POST /internal/two_factor_config' do
+ let(:key_id) { key.id }
+
+ before do
+ stub_feature_flags(two_factor_for_cli: true)
end
- end
- def full_path_for(container)
- case container
- when PersonalSnippet
- "snippets/#{container.id}"
- when ProjectSnippet
- "#{container.project.full_path}/snippets/#{container.id}"
- else
- container.full_path
+ subject do
+ post api('/internal/two_factor_config'),
+ params: {
+ secret_token: secret_token,
+ key_id: key_id
+ }
end
- end
- def pull(key, container, protocol = 'ssh')
- post(
- api("/internal/allowed"),
- params: {
- key_id: key.id,
- project: full_path_for(container),
- gl_repository: gl_repository_for(container),
- action: 'git-upload-pack',
- secret_token: secret_token,
- protocol: protocol
- }
- )
- end
+ it_behaves_like 'actor key validations'
- def push(key, container, protocol = 'ssh', env: nil, changes: nil)
- push_with_path(key,
- full_path: full_path_for(container),
- gl_repository: gl_repository_for(container),
- protocol: protocol,
- env: env,
- changes: changes)
- end
+ context 'when the key is a deploy key' do
+ let(:key) { create(:deploy_key) }
- def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil)
- changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
+ it 'does not required two factor' do
+ subject
- params = {
- changes: changes,
- key_id: key.id,
- project: full_path,
- action: 'git-receive-pack',
- secret_token: secret_token,
- protocol: protocol,
- env: env
- }
- params[:gl_repository] = gl_repository if gl_repository
+ expect(json_response['success']).to be_truthy
+ expect(json_response['two_factor_required']).to be_falsey
+ end
+ end
- post(
- api("/internal/allowed"),
- params: params
- )
+ context 'when two-factor is enabled' do
+ it 'returns user two factor config' do
+ allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(true)
+
+ subject
+
+ expect(json_response['success']).to be_truthy
+ expect(json_response['two_factor_required']).to be_truthy
+ end
+ end
+
+ context 'when two-factor is not enabled' do
+ it 'returns an error message' do
+ allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(false)
+
+ subject
+
+ expect(json_response['success']).to be_truthy
+ expect(json_response['two_factor_required']).to be_falsey
+ end
+ end
+
+ context 'two_factor_for_cli feature is disabled' do
+ before do
+ stub_feature_flags(two_factor_for_cli: false)
+ end
+
+ context 'when two-factor is enabled for the user' do
+ it 'returns user two factor config' do
+ allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(true)
+
+ subject
+
+ expect(json_response['success']).to be_falsey
+ end
+ end
+ end
end
- def archive(key, container)
- post(
- api("/internal/allowed"),
- params: {
- ref: 'master',
- key_id: key.id,
- project: full_path_for(container),
- gl_repository: gl_repository_for(container),
- action: 'git-upload-archive',
- secret_token: secret_token,
- protocol: 'ssh'
- }
- )
+ describe 'POST /internal/two_factor_otp_check' do
+ let(:key_id) { key.id }
+ let(:otp) { '123456'}
+
+ before do
+ stub_feature_flags(two_factor_for_cli: true)
+ end
+
+ subject do
+ post api('/internal/two_factor_otp_check'),
+ params: {
+ secret_token: secret_token,
+ key_id: key_id,
+ otp_attempt: otp
+ }
+ end
+
+ it_behaves_like 'actor key validations'
+
+ context 'when the key is a deploy key' do
+ let(:key_id) { create(:deploy_key).id }
+
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Deploy keys cannot be used for Two Factor')
+ end
+ end
+
+ context 'when the two factor is enabled' do
+ before do
+ allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(true)
+ end
+
+ context 'when the OTP is valid' do
+ it 'returns success' do
+ allow_any_instance_of(Users::ValidateOtpService).to receive(:execute).with(otp).and_return(status: :success)
+
+ subject
+
+ expect(json_response['success']).to be_truthy
+ end
+ end
+
+ context 'when the OTP is invalid' do
+ it 'is not success' do
+ allow_any_instance_of(Users::ValidateOtpService).to receive(:execute).with(otp).and_return(status: :error)
+
+ subject
+
+ expect(json_response['success']).to be_falsey
+ end
+ end
+ end
+
+ context 'when the two factor is disabled' do
+ before do
+ allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(false)
+ end
+
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq 'Two-factor authentication is not enabled for this user'
+ end
+ end
+
+ context 'two_factor_for_cli feature is disabled' do
+ before do
+ stub_feature_flags(two_factor_for_cli: false)
+ end
+
+ context 'when two-factor is enabled for the user' do
+ it 'returns user two factor config' do
+ allow_any_instance_of(User).to receive(:two_factor_enabled?).and_return(true)
+
+ subject
+
+ expect(json_response['success']).to be_falsey
+ end
+ end
+ end
end
def lfs_auth_project(project)
diff --git a/spec/requests/api/internal/lfs_spec.rb b/spec/requests/api/internal/lfs_spec.rb
new file mode 100644
index 00000000000..4739ec62992
--- /dev/null
+++ b/spec/requests/api/internal/lfs_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::Lfs do
+ include APIInternalBaseHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:lfs_object) { create(:lfs_object, :with_file) }
+ let_it_be(:lfs_objects_project) { create(:lfs_objects_project, project: project, lfs_object: lfs_object) }
+ let_it_be(:gl_repository) { "project-#{project.id}" }
+ let_it_be(:filename) { lfs_object.file.path }
+
+ let(:secret_token) { Gitlab::Shell.secret_token }
+
+ describe 'GET /internal/lfs' do
+ let(:valid_params) do
+ { oid: lfs_object.oid, gl_repository: gl_repository, secret_token: secret_token }
+ end
+
+ context 'with invalid auth' do
+ let(:invalid_params) { valid_params.merge!(secret_token: 'invalid_tokne') }
+
+ it 'returns 401' do
+ get api("/internal/lfs"), params: invalid_params
+ end
+ end
+
+ context 'with valid auth' do
+ context 'LFS in local storage' do
+ it 'sends the file' do
+ get api("/internal/lfs"), params: valid_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Type']).to eq('application/octet-stream')
+ expect(response.headers['Content-Length'].to_i).to eq(File.stat(filename).size)
+ expect(response.body).to eq(File.open(filename, 'rb', &:read))
+ end
+
+ # https://www.rubydoc.info/github/rack/rack/master/Rack/Sendfile
+ it 'delegates sending to Web server' do
+ get api("/internal/lfs"), params: valid_params, env: { 'HTTP_X_SENDFILE_TYPE' => 'X-Sendfile' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Type']).to eq('application/octet-stream')
+ expect(response.headers['Content-Length'].to_i).to eq(0)
+ expect(response.headers['X-Sendfile']).to be_present
+ expect(response.body).to eq("")
+ end
+
+ it 'retuns 404 for unknown file' do
+ params = valid_params.merge(oid: SecureRandom.hex)
+
+ get api("/internal/lfs"), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 404 if LFS object does not belong to project' do
+ other_lfs = create(:lfs_object, :with_file)
+ params = valid_params.merge(oid: other_lfs.oid)
+
+ get api("/internal/lfs"), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'LFS in object storage' do
+ let!(:lfs_object2) { create(:lfs_object, :with_file) }
+ let!(:lfs_objects_project2) { create(:lfs_objects_project, project: project, lfs_object: lfs_object2) }
+ let(:valid_params) do
+ { oid: lfs_object2.oid, gl_repository: gl_repository, secret_token: secret_token }
+ end
+
+ before do
+ stub_lfs_object_storage(enabled: true)
+ lfs_object2.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'notifies Workhorse to send the file' do
+ get api("/internal/lfs"), params: valid_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
+ expect(response.headers['Content-Type']).to eq('application/octet-stream')
+ expect(response.headers['Content-Length'].to_i).to eq(0)
+ expect(response.body).to eq("")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 2d57146fbc9..c1498e03f76 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -465,12 +465,14 @@ RSpec.describe API::Jobs do
end
context 'find proper job' do
+ let(:job_with_artifacts) { job }
+
shared_examples 'a valid file' do
context 'when artifacts are stored locally', :sidekiq_might_not_need_inline do
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' =>
- %Q(attachment; filename="#{job.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}) }
+ %Q(attachment; filename="#{job_with_artifacts.artifacts_file.filename}"; filename*=UTF-8''#{job.artifacts_file.filename}) }
end
it { expect(response).to have_gitlab_http_status(:ok) }
@@ -518,6 +520,18 @@ RSpec.describe API::Jobs do
it_behaves_like 'a valid file'
end
+
+ context 'with job name in a child pipeline' do
+ let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
+ let!(:child_job) { create(:ci_build, :artifacts, :success, name: 'rspec', pipeline: child_pipeline) }
+ let(:job_with_artifacts) { child_job }
+
+ before do
+ get_for_ref('master', child_job.name)
+ end
+
+ it_behaves_like 'a valid file'
+ end
end
end
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index 4c60c8bd2a3..9890cdc20c0 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -17,23 +17,52 @@ RSpec.describe API::Lint do
expect(json_response['status']).to eq('valid')
expect(json_response['errors']).to eq([])
end
+
+ it 'outputs expanded yaml content' do
+ post api('/ci/lint'), params: { content: yaml_content, include_merged_yaml: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('merged_yaml')
+ end
end
context 'with an invalid .gitlab_ci.yml' do
- it 'responds with errors about invalid syntax' do
- post api('/ci/lint'), params: { content: 'invalid content' }
+ context 'with invalid syntax' do
+ let(:yaml_content) { 'invalid content' }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['status']).to eq('invalid')
- expect(json_response['errors']).to eq(['Invalid configuration format'])
+ it 'responds with errors about invalid syntax' do
+ post api('/ci/lint'), params: { content: yaml_content }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['status']).to eq('invalid')
+ expect(json_response['errors']).to eq(['Invalid configuration format'])
+ end
+
+ it 'outputs expanded yaml content' do
+ post api('/ci/lint'), params: { content: yaml_content, include_merged_yaml: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('merged_yaml')
+ end
end
- it "responds with errors about invalid configuration" do
- post api('/ci/lint'), params: { content: '{ image: "ruby:2.7", services: ["postgres"] }' }
+ context 'with invalid configuration' do
+ let(:yaml_content) { '{ image: "ruby:2.7", services: ["postgres"] }' }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['status']).to eq('invalid')
- expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
+ it 'responds with errors about invalid configuration' do
+ post api('/ci/lint'), params: { content: yaml_content }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['status']).to eq('invalid')
+ expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
+ end
+
+ it 'outputs expanded yaml content' do
+ post api('/ci/lint'), params: { content: yaml_content, include_merged_yaml: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('merged_yaml')
+ end
end
end
@@ -46,4 +75,204 @@ RSpec.describe API::Lint do
end
end
end
+
+ describe 'GET /projects/:id/ci/lint' do
+ subject(:ci_lint) { get api("/projects/#{project.id}/ci/lint", api_user), params: { dry_run: dry_run } }
+
+ let(:project) { create(:project, :repository) }
+ let(:dry_run) { nil }
+
+ RSpec.shared_examples 'valid config' do
+ it 'passes validation' do
+ ci_lint
+
+ included_config = YAML.safe_load(included_content, [Symbol])
+ root_config = YAML.safe_load(yaml_content, [Symbol])
+ expected_yaml = included_config.merge(root_config).except(:include).to_yaml
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Hash
+ expect(json_response['merged_yaml']).to eq(expected_yaml)
+ expect(json_response['valid']).to eq(true)
+ expect(json_response['errors']).to eq([])
+ end
+ end
+
+ RSpec.shared_examples 'invalid config' do
+ it 'responds with errors about invalid configuration' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['merged_yaml']).to eq(yaml_content)
+ expect(json_response['valid']).to eq(false)
+ expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
+ end
+ end
+
+ context 'when unauthenticated' do
+ let_it_be(:api_user) { nil }
+
+ it 'returns authentication error' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when authenticated as non-member' do
+ let_it_be(:api_user) { create(:user) }
+
+ let(:yaml_content) do
+ { include: { local: 'another-gitlab-ci.yml' }, test: { stage: 'test', script: 'echo 1' } }.to_yaml
+ end
+
+ context 'when project is private' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ stub_ci_pipeline_yaml_file(yaml_content)
+ end
+
+ it 'returns authentication error' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when project is public' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ context 'when running as dry run' do
+ let(:dry_run) { true }
+
+ before do
+ stub_ci_pipeline_yaml_file(yaml_content)
+ end
+
+ it 'returns pipeline creation error' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['merged_yaml']).to eq(nil)
+ expect(json_response['valid']).to eq(false)
+ expect(json_response['errors']).to eq(['Insufficient permissions to create a new pipeline'])
+ end
+ end
+
+ context 'when running static validation' do
+ let(:dry_run) { false }
+
+ let(:included_content) do
+ { another_test: { stage: 'test', script: 'echo 1' } }.to_yaml
+ end
+
+ before do
+ project.repository.create_file(
+ project.creator,
+ '.gitlab-ci.yml',
+ yaml_content,
+ message: 'Automatically created .gitlab-ci.yml',
+ branch_name: 'master'
+ )
+
+ project.repository.create_file(
+ project.creator,
+ 'another-gitlab-ci.yml',
+ included_content,
+ message: 'Automatically created another-gitlab-ci.yml',
+ branch_name: 'master'
+ )
+ end
+
+ it_behaves_like 'valid config'
+ end
+ end
+ end
+
+ context 'when authenticated as project guest' do
+ let_it_be(:api_user) { create(:user) }
+
+ before do
+ project.add_guest(api_user)
+ end
+
+ it 'returns authentication error' do
+ ci_lint
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when authenticated as project developer' do
+ let_it_be(:api_user) { create(:user) }
+
+ before do
+ project.add_developer(api_user)
+ end
+
+ context 'with valid .gitlab-ci.yml content' do
+ let(:yaml_content) do
+ { include: { local: 'another-gitlab-ci.yml' }, test: { stage: 'test', script: 'echo 1' } }.to_yaml
+ end
+
+ let(:included_content) do
+ { another_test: { stage: 'test', script: 'echo 1' } }.to_yaml
+ end
+
+ before do
+ project.repository.create_file(
+ project.creator,
+ '.gitlab-ci.yml',
+ yaml_content,
+ message: 'Automatically created .gitlab-ci.yml',
+ branch_name: 'master'
+ )
+
+ project.repository.create_file(
+ project.creator,
+ 'another-gitlab-ci.yml',
+ included_content,
+ message: 'Automatically created another-gitlab-ci.yml',
+ branch_name: 'master'
+ )
+ end
+
+ context 'when running as dry run' do
+ let(:dry_run) { true }
+
+ it_behaves_like 'valid config'
+ end
+
+ context 'when running static validation' do
+ let(:dry_run) { false }
+
+ it_behaves_like 'valid config'
+ end
+ end
+
+ context 'with invalid .gitlab-ci.yml content' do
+ let(:yaml_content) do
+ { image: 'ruby:2.7', services: ['postgres'] }.to_yaml
+ end
+
+ before do
+ stub_ci_pipeline_yaml_file(yaml_content)
+ end
+
+ context 'when running as dry run' do
+ let(:dry_run) { true }
+
+ it_behaves_like 'invalid config'
+ end
+
+ context 'when running static validation' do
+ let(:dry_run) { false }
+
+ it_behaves_like 'invalid config'
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 0a23aed109b..37748fe5ea7 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -15,10 +15,13 @@ RSpec.describe API::MavenPackages do
let_it_be(:job, reload: true) { create(:ci_build, user: user, status: :running) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
+ let_it_be(:deploy_token_for_group) { create(:deploy_token, :group, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: deploy_token_for_group, group: group) }
let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
let(:headers_with_token) { headers.merge('Private-Token' => personal_access_token.token) }
+ let(:group_deploy_token_headers) { { Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => deploy_token_for_group.token } }
let(:headers_with_deploy_token) do
headers.merge(
@@ -36,7 +39,7 @@ RSpec.describe API::MavenPackages do
context 'with jar file' do
let_it_be(:package_file) { jar_file }
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
end
@@ -342,6 +345,17 @@ RSpec.describe API::MavenPackages do
it_behaves_like 'downloads with a job token'
it_behaves_like 'downloads with a deploy token'
+
+ context 'with group deploy token' do
+ subject { download_file_with_token(package_file.file_name, {}, group_deploy_token_headers) }
+
+ it 'returns the file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+ end
end
def download_file(file_name, params = {}, request_headers = headers)
@@ -548,7 +562,7 @@ RSpec.describe API::MavenPackages do
allow(uploaded_file).to receive(:size).and_return(project.actual_limits.maven_max_file_size + 1)
end
- upload_file_with_token(params)
+ upload_file_with_token(params: params)
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -563,19 +577,19 @@ RSpec.describe API::MavenPackages do
context 'without workhorse header' do
let(:workhorse_header) { {} }
- subject { upload_file_with_token(params) }
+ subject { upload_file_with_token(params: params) }
it_behaves_like 'package workhorse uploads'
end
context 'event tracking' do
- subject { upload_file_with_token(params) }
+ subject { upload_file_with_token(params: params) }
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
it 'creates package and stores package file' do
- expect { upload_file_with_token(params) }.to change { project.packages.count }.by(1)
+ expect { upload_file_with_token(params: params) }.to change { project.packages.count }.by(1)
.and change { Packages::Maven::Metadatum.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
@@ -584,7 +598,7 @@ RSpec.describe API::MavenPackages do
end
it 'allows upload with running job token' do
- upload_file(params.merge(job_token: job.token))
+ upload_file(params: params.merge(job_token: job.token))
expect(response).to have_gitlab_http_status(:ok)
expect(project.reload.packages.last.build_info.pipeline).to eq job.pipeline
@@ -592,13 +606,13 @@ RSpec.describe API::MavenPackages do
it 'rejects upload without running job token' do
job.update!(status: :failed)
- upload_file(params.merge(job_token: job.token))
+ upload_file(params: params.merge(job_token: job.token))
expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'allows upload with deploy token' do
- upload_file(params, headers_with_deploy_token)
+ upload_file(params: params, request_headers: headers_with_deploy_token)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -612,7 +626,10 @@ RSpec.describe API::MavenPackages do
# We force the id of the deploy token and the user to be the same
unauthorized_deploy_token.update!(id: another_user.id)
- upload_file(params, headers.merge(Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => unauthorized_deploy_token.token))
+ upload_file(
+ params: params,
+ request_headers: headers.merge(Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => unauthorized_deploy_token.token)
+ )
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -621,16 +638,43 @@ RSpec.describe API::MavenPackages do
let(:version) { '$%123' }
it 'rejects request' do
- expect { upload_file_with_token(params) }.not_to change { project.packages.count }
+ expect { upload_file_with_token(params: params) }.not_to change { project.packages.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to include('Validation failed')
end
end
+
+ context 'for sha1 file' do
+ let(:dummy_package) { double(Packages::Package) }
+
+ it 'checks the sha1' do
+ # The sha verification done by the maven api is between:
+ # - the sha256 set by workhorse helpers
+ # - the sha256 of the sha1 of the uploaded package file
+ # We're going to send `file_upload` for the sha1 and stub the sha1 of the package file so that
+ # both sha256 being the same
+ expect(::Packages::PackageFileFinder).to receive(:new).and_return(double(execute!: dummy_package))
+ expect(dummy_package).to receive(:file_sha1).and_return(File.read(file_upload.path))
+
+ upload_file_with_token(params: params, file_extension: 'jar.sha1')
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'for md5 file' do
+ it 'returns an empty body' do
+ upload_file_with_token(params: params, file_extension: 'jar.md5')
+
+ expect(response.body).to eq('')
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
- def upload_file(params = {}, request_headers = headers)
- url = "/projects/#{project.id}/packages/maven/com/example/my-app/#{version}/my-app-1.0-20180724.124855-1.jar"
+ def upload_file(params: {}, request_headers: headers, file_extension: 'jar')
+ url = "/projects/#{project.id}/packages/maven/com/example/my-app/#{version}/my-app-1.0-20180724.124855-1.#{file_extension}"
workhorse_finalize(
api(url),
method: :put,
@@ -641,8 +685,8 @@ RSpec.describe API::MavenPackages do
)
end
- def upload_file_with_token(params = {}, request_headers = headers_with_token)
- upload_file(params, request_headers)
+ def upload_file_with_token(params: {}, request_headers: headers_with_token, file_extension: 'jar')
+ upload_file(params: params, request_headers: request_headers, file_extension: file_extension)
end
end
end
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 55b2447fc68..047b9423906 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -196,6 +196,7 @@ RSpec.describe API::Members do
# Member attributes
expect(json_response['access_level']).to eq(Member::DEVELOPER)
+ expect(json_response['created_at'].to_time).to be_like_time(developer.created_at)
end
end
end
@@ -251,6 +252,36 @@ RSpec.describe API::Members do
expect(json_response['id']).to eq(stranger.id)
expect(json_response['access_level']).to eq(Member::DEVELOPER)
end
+
+ describe 'executes the Members::CreateService for multiple user_ids' do
+ it 'returns success when it successfully create all members' do
+ expect do
+ user_ids = [stranger.id, access_requester.id].join(',')
+
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ params: { user_id: user_ids, access_level: Member::DEVELOPER }
+
+ expect(response).to have_gitlab_http_status(:created)
+ end.to change { source.members.count }.by(2)
+ expect(json_response['status']).to eq('success')
+ end
+
+ it 'returns the error message if there was an error adding members to group' do
+ error_message = 'Unable to find User ID'
+ user_ids = [stranger.id, access_requester.id].join(',')
+
+ allow_next_instance_of(::Members::CreateService) do |service|
+ expect(service).to receive(:execute).with(source).and_return({ status: :error, message: error_message })
+ end
+
+ expect do
+ post api("/#{source_type.pluralize}/#{source.id}/members", maintainer),
+ params: { user_id: user_ids, access_level: Member::DEVELOPER }
+ end.not_to change { source.members.count }
+ expect(json_response['status']).to eq('error')
+ expect(json_response['message']).to eq(error_message)
+ end
+ end
end
context 'access levels' do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 2757c56e0fe..506607f4cc2 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -856,6 +856,55 @@ RSpec.describe API::MergeRequests do
expect(json_response.first['id']).to eq merge_request_closed.id
end
+ context 'when filtering by deployments' do
+ let_it_be(:mr) do
+ create(:merge_request, :merged, source_project: project, target_project: project)
+ end
+
+ before do
+ env = create(:environment, project: project, name: 'staging')
+ deploy = create(:deployment, :success, environment: env, deployable: nil)
+
+ deploy.link_merge_requests(MergeRequest.where(id: mr.id))
+ end
+
+ it 'supports getting merge requests deployed to an environment' do
+ get api(endpoint_path, user), params: { environment: 'staging' }
+
+ expect(json_response.first['id']).to eq mr.id
+ end
+
+ it 'does not return merge requests for an environment without deployments' do
+ get api(endpoint_path, user), params: { environment: 'bla' }
+
+ expect_empty_array_response
+ end
+
+ it 'supports getting merge requests deployed after a date' do
+ get api(endpoint_path, user), params: { deployed_after: '1990-01-01' }
+
+ expect(json_response.first['id']).to eq mr.id
+ end
+
+ it 'does not return merge requests not deployed after a given date' do
+ get api(endpoint_path, user), params: { deployed_after: '2100-01-01' }
+
+ expect_empty_array_response
+ end
+
+ it 'supports getting merge requests deployed before a date' do
+ get api(endpoint_path, user), params: { deployed_before: '2100-01-01' }
+
+ expect(json_response.first['id']).to eq mr.id
+ end
+
+ it 'does not return merge requests not deployed before a given date' do
+ get api(endpoint_path, user), params: { deployed_before: '1990-01-01' }
+
+ expect_empty_array_response
+ end
+ end
+
context 'a project which enforces all discussions to be resolved' do
let_it_be(:project) { create(:project, :repository, only_allow_merge_if_all_discussions_are_resolved: true) }
@@ -1140,7 +1189,7 @@ RSpec.describe API::MergeRequests do
context 'when a merge request has more than the changes limit' do
it "returns a string indicating that more changes were made" do
- stub_const('Commit::DIFF_HARD_LIMIT_FILES', 5)
+ allow(Commit).to receive(:diff_hard_limit_files).and_return(5)
merge_request_overflow = create(:merge_request, :simple,
author: user,
diff --git a/spec/requests/api/npm_packages_spec.rb b/spec/requests/api/npm_packages_spec.rb
index 108ea84b7e6..8a3ccd7c6e3 100644
--- a/spec/requests/api/npm_packages_spec.rb
+++ b/spec/requests/api/npm_packages_spec.rb
@@ -88,12 +88,16 @@ RSpec.describe API::NpmPackages do
it_behaves_like 'returning the npm package info'
context 'with unknown package' do
+ subject { get api("/packages/npm/unknown") }
+
it 'returns a redirect' do
- get api("/packages/npm/unknown")
+ subject
expect(response).to have_gitlab_http_status(:found)
expect(response.headers['Location']).to eq('https://registry.npmjs.org/unknown')
end
+
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'npm_request_forward'
end
end
@@ -193,7 +197,7 @@ RSpec.describe API::NpmPackages do
expect(response.media_type).to eq('application/octet-stream')
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
context 'private project' do
@@ -301,7 +305,7 @@ RSpec.describe API::NpmPackages do
context 'with access token' do
subject { upload_package_with_token(package_name, params) }
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
it 'creates npm package with file' do
expect { subject }
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index ff35e380476..7b37862af74 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -171,6 +171,7 @@ RSpec.describe API::ProjectClusters do
name: 'test-cluster',
domain: 'domain.example.com',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: platform_kubernetes_attributes,
management_project_id: management_project_id
}
@@ -202,6 +203,7 @@ RSpec.describe API::ProjectClusters do
expect(cluster_result.domain).to eq('domain.example.com')
expect(cluster_result.managed).to be_falsy
expect(cluster_result.management_project_id).to eq management_project_id
+ expect(cluster_result.namespace_per_environment).to eq(false)
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.namespace).to eq(namespace)
@@ -235,6 +237,22 @@ RSpec.describe API::ProjectClusters do
end
end
+ context 'when namespace_per_environment is not set' do
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'defaults to true' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result).to be_namespace_per_environment
+ end
+ end
+
context 'current user does not have access to management_project_id' do
let(:management_project_id) { create(:project).id }
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 6cf0619cde4..34476b10576 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -31,7 +31,6 @@ RSpec.describe API::ProjectContainerRepositories do
project.add_reporter(reporter)
project.add_guest(guest)
- stub_feature_flags(container_registry_api: true)
stub_container_registry_config(enabled: true)
root_repository
@@ -45,7 +44,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :guest, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
let(:object) { project }
@@ -57,7 +56,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :developer, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a gitlab tracking event', described_class.name, 'delete_repository'
+ it_behaves_like 'a package tracking event', described_class.name, 'delete_repository'
context 'for maintainer' do
let(:api_user) { maintainer }
@@ -86,7 +85,7 @@ RSpec.describe API::ProjectContainerRepositories do
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest))
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_tags'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_tags'
it 'returns a list of tags' do
subject
@@ -114,7 +113,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :developer, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a gitlab tracking event', described_class.name, 'delete_tag_bulk'
+ it_behaves_like 'a package tracking event', described_class.name, 'delete_tag_bulk'
end
context 'for maintainer' do
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index 2f0d0fc87ec..4c8599d1a20 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -23,6 +23,19 @@ RSpec.describe API::ProjectPackages do
it_behaves_like 'returns packages', :project, :no_type
end
+ context 'with conan package' do
+ let!(:conan_package) { create(:conan_package, project: project) }
+
+ it 'uses the conan recipe as the package name' do
+ subject
+
+ response_conan_package = json_response.find { |package| package['id'] == conan_package.id }
+
+ expect(response_conan_package['name']).to eq(conan_package.conan_recipe)
+ expect(response_conan_package['conan_package_name']).to eq(conan_package.name)
+ end
+ end
+
context 'project is private' do
let(:project) { create(:project, :private) }
diff --git a/spec/requests/api/project_repository_storage_moves_spec.rb b/spec/requests/api/project_repository_storage_moves_spec.rb
index 4c9e058ef13..ecf4c75b52f 100644
--- a/spec/requests/api/project_repository_storage_moves_spec.rb
+++ b/spec/requests/api/project_repository_storage_moves_spec.rb
@@ -145,10 +145,17 @@ RSpec.describe API::ProjectRepositoryStorageMoves do
context 'destination_storage_name is missing' do
let(:destination_storage_name) { nil }
- it 'returns a validation error' do
+ it 'schedules a project repository storage move' do
create_project_repository_storage_move
- expect(response).to have_gitlab_http_status(:bad_request)
+ storage_move = project.repository_storage_moves.last
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response).to match_response_schema('public_api/v4/project_repository_storage_move')
+ expect(json_response['id']).to eq(storage_move.id)
+ expect(json_response['state']).to eq('scheduled')
+ expect(json_response['source_storage_name']).to eq('default')
+ expect(json_response['destination_storage_name']).to be_present
end
end
end
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index 08c88873078..6a9cf6e16e2 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -6,21 +6,16 @@ RSpec.describe API::ProjectSnippets do
include SnippetHelpers
let_it_be(:project) { create(:project, :public) }
- let_it_be(:user) { create(:user) }
- let_it_be(:admin) { create(:admin) }
let_it_be(:project_no_snippets) { create(:project, :snippets_disabled) }
-
- before do
- project_no_snippets.add_developer(admin)
- project_no_snippets.add_developer(user)
- end
+ let_it_be(:user) { create(:user, developer_projects: [project_no_snippets]) }
+ let_it_be(:admin) { create(:admin, developer_projects: [project_no_snippets]) }
+ let_it_be(:public_snippet, reload: true) { create(:project_snippet, :public, :repository, project: project) }
describe "GET /projects/:project_id/snippets/:id/user_agent_detail" do
- let(:snippet) { create(:project_snippet, :public, project: project) }
- let!(:user_agent_detail) { create(:user_agent_detail, subject: snippet) }
+ let_it_be(:user_agent_detail) { create(:user_agent_detail, subject: public_snippet) }
it 'exposes known attributes' do
- get api("/projects/#{project.id}/snippets/#{snippet.id}/user_agent_detail", admin)
+ get api("/projects/#{project.id}/snippets/#{public_snippet.id}/user_agent_detail", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['user_agent']).to eq(user_agent_detail.user_agent)
@@ -31,29 +26,27 @@ RSpec.describe API::ProjectSnippets do
it 'respects project scoping' do
other_project = create(:project)
- get api("/projects/#{other_project.id}/snippets/#{snippet.id}/user_agent_detail", admin)
+ get api("/projects/#{other_project.id}/snippets/#{public_snippet.id}/user_agent_detail", admin)
expect(response).to have_gitlab_http_status(:not_found)
end
it "returns unauthorized for non-admin users" do
- get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/user_agent_detail", user)
+ get api("/projects/#{public_snippet.project.id}/snippets/#{public_snippet.id}/user_agent_detail", user)
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'with snippets disabled' do
it_behaves_like '403 response' do
- let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123/user_agent_detail", admin) }
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/user_agent_detail", admin) }
end
end
end
describe 'GET /projects/:project_id/snippets/' do
- let(:user) { create(:user) }
-
it 'returns all snippets available to team member' do
project.add_developer(user)
- public_snippet = create(:project_snippet, :public, project: project)
+
internal_snippet = create(:project_snippet, :internal, project: project)
private_snippet = create(:project_snippet, :private, project: project)
@@ -62,8 +55,7 @@ RSpec.describe API::ProjectSnippets do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.size).to eq(3)
- expect(json_response.map { |snippet| snippet['id'] }).to include(public_snippet.id, internal_snippet.id, private_snippet.id)
+ expect(json_response.map { |snippet| snippet['id'] }).to contain_exactly(public_snippet.id, internal_snippet.id, private_snippet.id)
expect(json_response.last).to have_key('web_url')
end
@@ -75,7 +67,7 @@ RSpec.describe API::ProjectSnippets do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.size).to eq(0)
+ expect(json_response.map { |snippet| snippet['id'] }).to contain_exactly(public_snippet.id)
end
context 'with snippets disabled' do
@@ -86,8 +78,7 @@ RSpec.describe API::ProjectSnippets do
end
describe 'GET /projects/:project_id/snippets/:id' do
- let_it_be(:user) { create(:user) }
- let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) }
+ let(:snippet) { public_snippet }
it 'returns snippet json' do
get api("/projects/#{project.id}/snippets/#{snippet.id}", user)
@@ -113,12 +104,12 @@ RSpec.describe API::ProjectSnippets do
context 'with snippets disabled' do
it_behaves_like '403 response' do
- let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123", user) }
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", user) }
end
end
- it_behaves_like 'snippet_multiple_files feature disabled' do
- subject { get api("/projects/#{project.id}/snippets/#{snippet.id}", user) }
+ it_behaves_like 'project snippet access levels' do
+ let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}" }
end
end
@@ -133,37 +124,35 @@ RSpec.describe API::ProjectSnippets do
let(:file_path) { 'file_1.rb' }
let(:file_content) { 'puts "hello world"' }
- let(:params) { base_params.merge(file_params) }
let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
+ let(:params) { base_params.merge(file_params) }
+
+ subject { post api("/projects/#{project.id}/snippets/", actor), params: params }
shared_examples 'project snippet repository actions' do
let(:snippet) { ProjectSnippet.find(json_response['id']) }
- it 'creates repository' do
- subject
-
- expect(snippet.repository.exists?).to be_truthy
- end
-
it 'commit the files to the repository' do
subject
- blob = snippet.repository.blob_at('master', file_path)
+ aggregate_failures do
+ expect(snippet.repository.exists?).to be_truthy
+
+ blob = snippet.repository.blob_at('master', file_path)
- expect(blob.data).to eq file_content
+ expect(blob.data).to eq file_content
+ end
end
end
context 'with an external user' do
- let(:user) { create(:user, :external) }
+ let(:actor) { create(:user, :external) }
context 'that belongs to the project' do
- before do
- project.add_developer(user)
- end
-
it 'creates a new snippet' do
- post api("/projects/#{project.id}/snippets/", user), params: params
+ project.add_developer(actor)
+
+ subject
expect(response).to have_gitlab_http_status(:created)
end
@@ -171,7 +160,7 @@ RSpec.describe API::ProjectSnippets do
context 'that does not belong to the project' do
it 'does not create a new snippet' do
- post api("/projects/#{project.id}/snippets/", user), params: params
+ subject
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -179,16 +168,17 @@ RSpec.describe API::ProjectSnippets do
end
context 'with a regular user' do
- let(:user) { create(:user) }
+ let(:actor) { user }
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::PRIVATE])
params['visibility'] = 'internal'
end
- subject { post api("/projects/#{project.id}/snippets/", user), params: params }
-
it 'creates a new snippet' do
subject
@@ -205,7 +195,7 @@ RSpec.describe API::ProjectSnippets do
end
context 'with an admin' do
- subject { post api("/projects/#{project.id}/snippets/", admin), params: params }
+ let(:actor) { admin }
it 'creates a new snippet' do
subject
@@ -244,6 +234,8 @@ RSpec.describe API::ProjectSnippets do
end
context 'when save fails because the repository could not be created' do
+ let(:actor) { admin }
+
before do
allow_next_instance_of(Snippets::CreateService) do |instance|
allow(instance).to receive(:create_repository).and_raise(Snippets::CreateService::CreateRepositoryError)
@@ -251,43 +243,44 @@ RSpec.describe API::ProjectSnippets do
end
it 'returns 400' do
- post api("/projects/#{project.id}/snippets", admin), params: params
+ subject
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when the snippet is spam' do
- def create_snippet(project, snippet_params = {})
- project.add_developer(user)
-
- post api("/projects/#{project.id}/snippets", user), params: params.merge(snippet_params)
- end
+ let(:actor) { user }
before do
allow_next_instance_of(Spam::AkismetService) do |instance|
allow(instance).to receive(:spam?).and_return(true)
end
+
+ project.add_developer(user)
end
context 'when the snippet is private' do
it 'creates the snippet' do
- expect { create_snippet(project, visibility: 'private') }
- .to change { Snippet.count }.by(1)
+ params['visibility'] = 'private'
+
+ expect { subject }.to change { Snippet.count }.by(1)
end
end
context 'when the snippet is public' do
- it 'rejects the snippet' do
- expect { create_snippet(project, visibility: 'public') }
- .not_to change { Snippet.count }
+ before do
+ params['visibility'] = 'public'
+ end
+ it 'rejects the snippet' do
+ expect { subject }.not_to change { Snippet.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq({ "error" => "Spam detected" })
end
it 'creates a spam log' do
- expect { create_snippet(project, visibility: 'public') }
+ expect { subject }
.to log_spam(title: 'Test Title', user_id: user.id, noteable_type: 'ProjectSnippet')
end
end
@@ -363,7 +356,7 @@ RSpec.describe API::ProjectSnippets do
context 'with snippets disabled' do
it_behaves_like '403 response' do
- let(:request) { put api("/projects/#{project_no_snippets.id}/snippets/123", admin), params: { description: 'foo' } }
+ let(:request) { put api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin), params: { description: 'foo' } }
end
end
@@ -373,7 +366,7 @@ RSpec.describe API::ProjectSnippets do
end
describe 'DELETE /projects/:project_id/snippets/:id/' do
- let(:snippet) { create(:project_snippet, author: admin, project: project) }
+ let_it_be(:snippet, refind: true) { public_snippet }
it 'deletes snippet' do
delete api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin)
@@ -394,13 +387,13 @@ RSpec.describe API::ProjectSnippets do
context 'with snippets disabled' do
it_behaves_like '403 response' do
- let(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/123", admin) }
+ let(:request) { delete api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}", admin) }
end
end
end
describe 'GET /projects/:project_id/snippets/:id/raw' do
- let_it_be(:snippet) { create(:project_snippet, :repository, author: admin, project: project) }
+ let_it_be(:snippet) { create(:project_snippet, :repository, :public, author: admin, project: project) }
it 'returns raw text' do
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin)
@@ -416,9 +409,13 @@ RSpec.describe API::ProjectSnippets do
expect(json_response['message']).to eq('404 Snippet Not Found')
end
+ it_behaves_like 'project snippet access levels' do
+ let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw" }
+ end
+
context 'with snippets disabled' do
it_behaves_like '403 response' do
- let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/123/raw", admin) }
+ let(:request) { get api("/projects/#{project_no_snippets.id}/snippets/#{non_existing_record_id}/raw", admin) }
end
end
@@ -435,5 +432,9 @@ RSpec.describe API::ProjectSnippets do
it_behaves_like 'raw snippet files' do
let(:api_path) { "/projects/#{snippet.project.id}/snippets/#{snippet_id}/files/#{ref}/#{file_path}/raw" }
end
+
+ it_behaves_like 'project snippet access levels' do
+ let(:path) { "/projects/#{snippet.project.id}/snippets/#{snippet.id}/files/master/%2Egitattributes/raw" }
+ end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 831b0d6e678..2abcb39a1c8 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1615,6 +1615,7 @@ RSpec.describe API::Projects do
expect(json_response['allow_merge_on_skipped_pipeline']).to eq(project.allow_merge_on_skipped_pipeline)
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
+ expect(json_response['ci_forward_deployment_enabled']).to eq(project.ci_forward_deployment_enabled)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
expect(json_response['readme_url']).to eq(project.readme_url)
expect(json_response).to have_key 'packages_enabled'
@@ -2607,6 +2608,7 @@ RSpec.describe API::Projects do
merge_requests_enabled: true,
merge_method: 'ff',
ci_default_git_depth: 20,
+ ci_forward_deployment_enabled: false,
description: 'new description' }
put api("/projects/#{project3.id}", user4), params: project_param
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index e72ac002f6b..72a470dca4b 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -23,24 +23,24 @@ RSpec.describe API::PypiPackages do
using RSpec::Parameterized::TableSyntax
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | true | true | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | true | false | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | true | false | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | false | true | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | false | true | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | false | false | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | false | false | 'PyPi package versions' | :success
- 'PUBLIC' | :anonymous | false | true | 'PyPi package versions' | :success
- 'PRIVATE' | :developer | true | true | 'PyPi package versions' | :success
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | true | true | 'PyPI package versions' | :success
+ 'PUBLIC' | :guest | true | true | 'PyPI package versions' | :success
+ 'PUBLIC' | :developer | true | false | 'PyPI package versions' | :success
+ 'PUBLIC' | :guest | true | false | 'PyPI package versions' | :success
+ 'PUBLIC' | :developer | false | true | 'PyPI package versions' | :success
+ 'PUBLIC' | :guest | false | true | 'PyPI package versions' | :success
+ 'PUBLIC' | :developer | false | false | 'PyPI package versions' | :success
+ 'PUBLIC' | :guest | false | false | 'PyPI package versions' | :success
+ 'PUBLIC' | :anonymous | false | true | 'PyPI package versions' | :success
+ 'PRIVATE' | :developer | true | true | 'PyPI package versions' | :success
+ 'PRIVATE' | :guest | true | true | 'process PyPI api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPI api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPI api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPI api request' | :unauthorized
end
with_them do
@@ -57,6 +57,16 @@ RSpec.describe API::PypiPackages do
end
end
+ context 'with a normalized package name' do
+ let_it_be(:package) { create(:pypi_package, project: project, name: 'my.package') }
+ let(:url) { "/projects/#{project.id}/packages/pypi/simple/my-package" }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
+
+ subject { get api(url), headers: headers }
+
+ it_behaves_like 'PyPI package versions', :developer, :success
+ end
+
it_behaves_like 'deploy token for package GET requests'
it_behaves_like 'job token for package GET requests'
@@ -76,24 +86,24 @@ RSpec.describe API::PypiPackages do
using RSpec::Parameterized::TableSyntax
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process PyPi api request' | :success
- 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :success
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | true | true | 'process PyPI api request' | :success
+ 'PUBLIC' | :guest | true | true | 'process PyPI api request' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process PyPI api request' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'process PyPI api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process PyPI api request' | :success
+ 'PRIVATE' | :guest | true | true | 'process PyPI api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPI api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPI api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPI api request' | :unauthorized
end
with_them do
@@ -142,24 +152,24 @@ RSpec.describe API::PypiPackages do
using RSpec::Parameterized::TableSyntax
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package creation' | :created
- 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :created
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | true | true | 'PyPI package creation' | :created
+ 'PUBLIC' | :guest | true | true | 'process PyPI api request' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process PyPI api request' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'process PyPI api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'process PyPI api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process PyPI api request' | :created
+ 'PRIVATE' | :guest | true | true | 'process PyPI api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPI api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPI api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPI api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPI api request' | :unauthorized
end
with_them do
@@ -185,7 +195,7 @@ RSpec.describe API::PypiPackages do
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
- it_behaves_like 'process PyPi api request', :developer, :bad_request, true
+ it_behaves_like 'process PyPI api request', :developer, :bad_request, true
end
context 'with an invalid package' do
@@ -232,24 +242,24 @@ RSpec.describe API::PypiPackages do
using RSpec::Parameterized::TableSyntax
where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package download' | :success
- 'PUBLIC' | :guest | true | true | 'PyPi package download' | :success
- 'PUBLIC' | :developer | true | false | 'PyPi package download' | :success
- 'PUBLIC' | :guest | true | false | 'PyPi package download' | :success
- 'PUBLIC' | :developer | false | true | 'PyPi package download' | :success
- 'PUBLIC' | :guest | false | true | 'PyPi package download' | :success
- 'PUBLIC' | :developer | false | false | 'PyPi package download' | :success
- 'PUBLIC' | :guest | false | false | 'PyPi package download' | :success
- 'PUBLIC' | :anonymous | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | true | true | 'PyPi package download' | :success
- 'PRIVATE' | :guest | true | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | true | false | 'PyPi package download' | :success
- 'PRIVATE' | :guest | true | false | 'PyPi package download' | :success
- 'PRIVATE' | :developer | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :guest | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | false | false | 'PyPi package download' | :success
- 'PRIVATE' | :guest | false | false | 'PyPi package download' | :success
- 'PRIVATE' | :anonymous | false | true | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | true | true | 'PyPI package download' | :success
+ 'PUBLIC' | :guest | true | true | 'PyPI package download' | :success
+ 'PUBLIC' | :developer | true | false | 'PyPI package download' | :success
+ 'PUBLIC' | :guest | true | false | 'PyPI package download' | :success
+ 'PUBLIC' | :developer | false | true | 'PyPI package download' | :success
+ 'PUBLIC' | :guest | false | true | 'PyPI package download' | :success
+ 'PUBLIC' | :developer | false | false | 'PyPI package download' | :success
+ 'PUBLIC' | :guest | false | false | 'PyPI package download' | :success
+ 'PUBLIC' | :anonymous | false | true | 'PyPI package download' | :success
+ 'PRIVATE' | :developer | true | true | 'PyPI package download' | :success
+ 'PRIVATE' | :guest | true | true | 'PyPI package download' | :success
+ 'PRIVATE' | :developer | true | false | 'PyPI package download' | :success
+ 'PRIVATE' | :guest | true | false | 'PyPI package download' | :success
+ 'PRIVATE' | :developer | false | true | 'PyPI package download' | :success
+ 'PRIVATE' | :guest | false | true | 'PyPI package download' | :success
+ 'PRIVATE' | :developer | false | false | 'PyPI package download' | :success
+ 'PRIVATE' | :guest | false | false | 'PyPI package download' | :success
+ 'PRIVATE' | :anonymous | false | true | 'PyPI package download' | :success
end
with_them do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 779ae983886..e78d05835f2 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -53,6 +53,49 @@ RSpec.describe API::Releases do
expect(json_response.second['tag_name']).to eq(release_1.tag)
end
+ RSpec.shared_examples 'release sorting' do |order_by|
+ subject { get api(url, access_level), params: { sort: sort, order_by: order_by } }
+
+ context "sorting by #{order_by}" do
+ context 'ascending order' do
+ let(:sort) { 'asc' }
+
+ it 'returns the sorted releases' do
+ subject
+
+ expect(json_response.map { |release| release['name'] }).to eq(releases.map(&:name))
+ end
+ end
+
+ context 'descending order' do
+ let(:sort) { 'desc' }
+
+ it 'returns the sorted releases' do
+ subject
+
+ expect(json_response.map { |release| release['name'] }).to eq(releases.reverse.map(&:name))
+ end
+ end
+ end
+ end
+
+ context 'return releases in sorted order' do
+ before do
+ release_2.update_attribute(:created_at, 3.days.ago)
+ end
+
+ let(:url) { "/projects/#{project.id}/releases" }
+ let(:access_level) { maintainer }
+
+ it_behaves_like 'release sorting', 'released_at' do
+ let(:releases) { [release_1, release_2] }
+ end
+
+ it_behaves_like 'release sorting', 'created_at' do
+ let(:releases) { [release_2, release_1] }
+ end
+ end
+
it 'matches response schema' do
get api("/projects/#{project.id}/releases", maintainer)
@@ -259,7 +302,7 @@ RSpec.describe API::Releases do
end
it '#collected_at' do
- Timecop.freeze(Time.now.round) do
+ travel_to(Time.now.round) do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['evidences'].first['collected_at'].to_datetime.to_i).to be_within(1.minute).of(release.evidences.first.created_at.to_i)
@@ -476,7 +519,7 @@ RSpec.describe API::Releases do
it 'sets the released_at to the current time if the released_at parameter is not provided' do
now = Time.zone.parse('2015-08-25 06:00:00Z')
- Timecop.freeze(now) do
+ travel_to(now) do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(project.releases.last.released_at).to eq(now)
@@ -598,7 +641,7 @@ RSpec.describe API::Releases do
end
end
- context 'when create two assets' do
+ context 'when creating two assets' do
let(:params) do
base_params.merge({
assets: {
@@ -758,6 +801,65 @@ RSpec.describe API::Releases do
expect(response).to have_gitlab_http_status(:conflict)
end
end
+
+ context 'with milestones' do
+ let(:subject) { post api("/projects/#{project.id}/releases", maintainer), params: params }
+ let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
+ let(:returned_milestones) { json_response['milestones'].map {|m| m['title']} }
+
+ before do
+ params.merge!(milestone_params)
+
+ subject
+ end
+
+ context 'with a project milestone' do
+ let(:milestone_params) { { milestones: [milestone.title] } }
+
+ it 'adds the milestone' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(returned_milestones).to match_array(['v1.0'])
+ end
+ end
+
+ context 'with multiple milestones' do
+ let(:milestone2) { create(:milestone, project: project, title: 'm2') }
+ let(:milestone_params) { { milestones: [milestone.title, milestone2.title] } }
+
+ it 'adds all milestones' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(returned_milestones).to match_array(['v1.0', 'm2'])
+ end
+ end
+
+ context 'with an empty milestone' do
+ let(:milestone_params) { { milestones: [] } }
+
+ it 'removes all milestones' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['milestones']).to be_nil
+ end
+ end
+
+ context 'with a non-existant milestone' do
+ let(:milestone_params) { { milestones: ['xyz'] } }
+
+ it 'returns a 400 error as milestone not found' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq("Milestone(s) not found: xyz")
+ end
+ end
+
+ context 'with a milestone from a different project' do
+ let(:milestone) { create(:milestone, title: 'v1.0') }
+ let(:milestone_params) { { milestones: [milestone.title] } }
+
+ it 'returns a 400 error as milestone not found' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq("Milestone(s) not found: v1.0")
+ end
+ end
+ end
end
describe 'PUT /projects/:id/releases/:tag_name' do
@@ -863,6 +965,83 @@ RSpec.describe API::Releases do
end
end
end
+
+ context 'with milestones' do
+ let(:returned_milestones) { json_response['milestones'].map {|m| m['title']} }
+
+ subject { put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params }
+
+ context 'when a milestone is passed in' do
+ let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
+ let(:milestone_title) { milestone.title }
+ let(:params) { { milestones: [milestone_title] } }
+
+ before do
+ release.milestones << milestone
+ end
+
+ context 'a different milestone' do
+ let(:milestone_title) { 'v2.0' }
+ let!(:milestone2) { create(:milestone, project: project, title: milestone_title) }
+
+ it 'replaces the milestone' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(returned_milestones).to match_array(['v2.0'])
+ end
+ end
+
+ context 'an identical milestone' do
+ let(:milestone_title) { 'v1.0' }
+
+ it 'does not change the milestone' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(returned_milestones).to match_array(['v1.0'])
+ end
+ end
+
+ context 'an empty milestone' do
+ let(:milestone_title) { nil }
+
+ it 'removes the milestone' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['milestones']).to be_nil
+ end
+ end
+
+ context 'multiple milestones' do
+ context 'with one new' do
+ let!(:milestone2) { create(:milestone, project: project, title: 'milestone2') }
+ let(:params) { { milestones: [milestone.title, milestone2.title] } }
+
+ it 'adds the new milestone' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(returned_milestones).to match_array(['v1.0', 'milestone2'])
+ end
+ end
+
+ context 'with all new' do
+ let!(:milestone2) { create(:milestone, project: project, title: 'milestone2') }
+ let!(:milestone3) { create(:milestone, project: project, title: 'milestone3') }
+ let(:params) { { milestones: [milestone2.title, milestone3.title] } }
+
+ it 'replaces the milestones' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(returned_milestones).to match_array(%w(milestone2 milestone3))
+ end
+ end
+ end
+ end
+ end
end
describe 'DELETE /projects/:id/releases/:tag_name' do
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 36707f32d04..45bce8c8a5c 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -402,7 +402,9 @@ RSpec.describe API::Repositories do
end
it "returns an empty string when the diff overflows" do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: 2, max_lines: 2 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: 2, max_lines: 2 })
get api(route, current_user), params: { from: 'master', to: 'feature' }
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index af6731f3015..05cfad9cc62 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -58,6 +58,17 @@ RSpec.describe API::Search do
end
end
+ shared_examples 'filter by confidentiality' do |scope:, search:|
+ it 'respects confidentiality filtering' do
+ get api(endpoint, user), params: { scope: scope, search: search, confidential: confidential.to_s }
+
+ documents = Gitlab::Json.parse(response.body)
+
+ expect(documents.count).to eq(1)
+ expect(documents.first['confidential']).to eq(confidential)
+ end
+ end
+
describe 'GET /search' do
let(:endpoint) { '/search' }
@@ -137,6 +148,26 @@ RSpec.describe API::Search do
include_examples 'filter by state', scope: :issues, search: 'awesome'
end
end
+
+ context 'filter by confidentiality' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: true)
+ create(:issue, project: project, author: user, title: 'awesome non-confidential issue')
+ create(:issue, :confidential, project: project, author: user, title: 'awesome confidential issue')
+ end
+
+ context 'confidential: true' do
+ let(:confidential) { true }
+
+ include_examples 'filter by confidentiality', scope: :issues, search: 'awesome'
+ end
+
+ context 'confidential: false' do
+ let(:confidential) { false }
+
+ include_examples 'filter by confidentiality', scope: :issues, search: 'awesome'
+ end
+ end
end
context 'for merge_requests scope' do
@@ -231,18 +262,6 @@ RSpec.describe API::Search do
it_behaves_like 'pagination', scope: :users
it_behaves_like 'ping counters', scope: :users
-
- context 'when users search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
-
- get api(endpoint, user), params: { scope: 'users', search: 'billy' }
- end
-
- it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
end
context 'for snippet_titles scope' do
@@ -416,18 +435,6 @@ RSpec.describe API::Search do
include_examples 'pagination', scope: :users
end
-
- context 'when users search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
-
- get api(endpoint, user), params: { scope: 'users', search: 'billy' }
- end
-
- it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
end
context 'for users scope with group path as id' do
@@ -589,18 +596,6 @@ RSpec.describe API::Search do
include_examples 'pagination', scope: :users
end
-
- context 'when users search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
-
- get api(endpoint, user), params: { scope: 'users', search: 'billy' }
- end
-
- it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
end
context 'for notes scope' do
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 5528a0c094f..63ed57c5045 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -264,4 +264,34 @@ RSpec.describe API::Services do
expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
end
end
+
+ describe 'Hangouts Chat service' do
+ let(:service_name) { 'hangouts-chat' }
+ let(:params) do
+ {
+ webhook: 'https://hook.example.com',
+ branches_to_be_notified: 'default'
+ }
+ end
+
+ before do
+ project.create_hangouts_chat_service(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts branches_to_be_notified for update', :aggregate_failures do
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['branches_to_be_notified']).to eq('all')
+ end
+
+ it 'only requires the webhook param' do
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: { webhook: 'https://hook.example.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index ef12f6dbed3..8b5f74df8f8 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -96,6 +96,7 @@ RSpec.describe API::Settings, 'Settings' do
help_page_text: 'custom help text',
help_page_hide_commercial_content: true,
help_page_support_url: 'http://example.com/help',
+ help_page_documentation_base_url: 'https://docs.gitlab.com',
project_export_enabled: false,
rsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE,
dsa_key_restriction: 2048,
@@ -138,6 +139,7 @@ RSpec.describe API::Settings, 'Settings' do
expect(json_response['help_page_text']).to eq('custom help text')
expect(json_response['help_page_hide_commercial_content']).to be_truthy
expect(json_response['help_page_support_url']).to eq('http://example.com/help')
+ expect(json_response['help_page_documentation_base_url']).to eq('https://docs.gitlab.com')
expect(json_response['project_export_enabled']).to be_falsey
expect(json_response['rsa_key_restriction']).to eq(ApplicationSetting::FORBIDDEN_KEY_VALUE)
expect(json_response['dsa_key_restriction']).to eq(2048)
@@ -413,6 +415,14 @@ RSpec.describe API::Settings, 'Settings' do
end
end
+ it 'supports legacy admin_notification_email' do
+ put api('/application/settings', admin),
+ params: { admin_notification_email: 'test@example.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['abuse_notification_email']).to eq('test@example.com')
+ end
+
context "missing sourcegraph_url value when sourcegraph_enabled is true" do
it "returns a blank parameter error message" do
put api("/application/settings", admin), params: { sourcegraph_enabled: true }
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 8d77026d26c..227c53f8fb9 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -2,18 +2,28 @@
require 'spec_helper'
-RSpec.describe API::Snippets do
+RSpec.describe API::Snippets, factory_default: :keep do
include SnippetHelpers
- let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
- describe 'GET /snippets/' do
- it 'returns snippets available' do
- public_snippet = create(:personal_snippet, :repository, :public, author: user)
- private_snippet = create(:personal_snippet, :repository, :private, author: user)
- internal_snippet = create(:personal_snippet, :repository, :internal, author: user)
+ let_it_be(:public_snippet) { create(:personal_snippet, :repository, :public, author: user) }
+ let_it_be(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
+ let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
+
+ let_it_be(:user_token) { create(:personal_access_token, user: user) }
+ let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
+ let_it_be(:project) do
+ create_default(:project, :public).tap do |p|
+ p.add_maintainer(user)
+ end
+ end
- get api("/snippets/", user)
+ describe 'GET /snippets/' do
+ it 'returns snippets available for user' do
+ get api("/snippets/", personal_access_token: user_token)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -29,9 +39,7 @@ RSpec.describe API::Snippets do
end
it 'hides private snippets from regular user' do
- create(:personal_snippet, :private)
-
- get api("/snippets/", user)
+ get api("/snippets/", personal_access_token: other_user_token)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -39,21 +47,17 @@ RSpec.describe API::Snippets do
expect(json_response.size).to eq(0)
end
- it 'returns 404 for non-authenticated' do
- create(:personal_snippet, :internal)
-
+ it 'returns 401 for non-authenticated' do
get api("/snippets/")
expect(response).to have_gitlab_http_status(:unauthorized)
end
it 'does not return snippets related to a project with disable feature visibility' do
- project = create(:project)
- create(:project_member, project: project, user: user)
- public_snippet = create(:personal_snippet, :public, author: user, project: project)
+ public_snippet = create(:project_snippet, :public, author: user, project: project)
project.project_feature.update_attribute(:snippets_access_level, 0)
- get api("/snippets/", user)
+ get api("/snippets/", personal_access_token: user_token)
json_response.each do |snippet|
expect(snippet["id"]).not_to eq(public_snippet.id)
@@ -62,10 +66,6 @@ RSpec.describe API::Snippets do
end
describe 'GET /snippets/public' do
- let_it_be(:other_user) { create(:user) }
- let_it_be(:public_snippet) { create(:personal_snippet, :repository, :public, author: user) }
- let_it_be(:private_snippet) { create(:personal_snippet, :repository, :private, author: user) }
- let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: user) }
let_it_be(:public_snippet_other) { create(:personal_snippet, :repository, :public, author: other_user) }
let_it_be(:private_snippet_other) { create(:personal_snippet, :repository, :private, author: other_user) }
let_it_be(:internal_snippet_other) { create(:personal_snippet, :repository, :internal, author: other_user) }
@@ -73,8 +73,10 @@ RSpec.describe API::Snippets do
let_it_be(:private_snippet_project) { create(:project_snippet, :repository, :private, author: user) }
let_it_be(:internal_snippet_project) { create(:project_snippet, :repository, :internal, author: user) }
- it 'returns all snippets with public visibility from all users' do
- get api("/snippets/public", user)
+ let(:path) { "/snippets/public" }
+
+ it 'returns only public snippets from all users when authenticated' do
+ get api(path, personal_access_token: user_token)
aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
@@ -90,20 +92,23 @@ RSpec.describe API::Snippets do
expect(json_response[1]['files'].first).to eq snippet_blob_file(public_snippet.blobs.first)
end
end
- end
-
- describe 'GET /snippets/:id/raw' do
- let_it_be(:author) { create(:user) }
- let_it_be(:snippet) { create(:personal_snippet, :repository, :private, author: author) }
it 'requires authentication' do
- get api("/snippets/#{snippet.id}", nil)
+ get api(path, nil)
expect(response).to have_gitlab_http_status(:unauthorized)
end
+ end
+
+ describe 'GET /snippets/:id/raw' do
+ let(:snippet) { private_snippet }
+
+ it_behaves_like 'snippet access with different users' do
+ let(:path) { "/snippets/#{snippet.id}/raw" }
+ end
it 'returns raw text' do
- get api("/snippets/#{snippet.id}/raw", author)
+ get api("/snippets/#{snippet.id}/raw", personal_access_token: user_token)
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq 'text/plain'
@@ -113,69 +118,37 @@ RSpec.describe API::Snippets do
it 'returns 404 for invalid snippet id' do
snippet.destroy!
- get api("/snippets/#{snippet.id}/raw", author)
+ get api("/snippets/#{snippet.id}/raw", personal_access_token: user_token)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
- it 'hides private snippets from ordinary users' do
- get api("/snippets/#{snippet.id}/raw", user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
-
- it 'shows internal snippets to ordinary users' do
- internal_snippet = create(:personal_snippet, :internal, author: author)
-
- get api("/snippets/#{internal_snippet.id}/raw", user)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
it_behaves_like 'snippet blob content' do
- let_it_be(:snippet_with_empty_repo) { create(:personal_snippet, :empty_repo, :private, author: author) }
+ let_it_be(:snippet_with_empty_repo) { create(:personal_snippet, :empty_repo, :private, author: user) }
- subject { get api("/snippets/#{snippet.id}/raw", snippet.author) }
+ subject { get api("/snippets/#{snippet.id}/raw", snippet.author, personal_access_token: user_token) }
end
end
describe 'GET /snippets/:id/files/:ref/:file_path/raw' do
- let_it_be(:snippet) { create(:personal_snippet, :repository, :private) }
+ let_it_be(:snippet) { private_snippet }
it_behaves_like 'raw snippet files' do
let(:api_path) { "/snippets/#{snippet_id}/files/#{ref}/#{file_path}/raw" }
end
- end
-
- describe 'GET /snippets/:id' do
- let_it_be(:admin) { create(:user, :admin) }
- let_it_be(:author) { create(:user) }
- let_it_be(:private_snippet) { create(:personal_snippet, :repository, :private, author: author) }
- let_it_be(:internal_snippet) { create(:personal_snippet, :repository, :internal, author: author) }
- let(:snippet) { private_snippet }
- subject { get api("/snippets/#{snippet.id}", user) }
-
- it 'hides private snippets from an ordinary user' do
- subject
-
- expect(response).to have_gitlab_http_status(:not_found)
+ it_behaves_like 'snippet access with different users' do
+ let(:path) { "/snippets/#{snippet.id}/files/master/%2Egitattributes/raw" }
end
+ end
- context 'without a user' do
- let(:user) { nil }
+ describe 'GET /snippets/:id' do
+ let(:snippet_id) { private_snippet.id }
- it 'requires authentication' do
- subject
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
- end
+ subject { get api("/snippets/#{snippet_id}", personal_access_token: user_token) }
context 'with the author' do
- let(:user) { author }
-
it 'returns snippet json' do
subject
@@ -191,18 +164,10 @@ RSpec.describe API::Snippets do
end
end
- context 'with an admin' do
- let(:user) { admin }
-
- it 'shows private snippets to an admin' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'returns 404 for invalid snippet id' do
- private_snippet.destroy!
+ context 'with a non-existent snippet ID' do
+ let(:snippet_id) { 0 }
+ it 'returns 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
@@ -210,18 +175,8 @@ RSpec.describe API::Snippets do
end
end
- context 'with an internal snippet' do
- let(:snippet) { internal_snippet }
-
- it 'shows internal snippets to an ordinary user' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- it_behaves_like 'snippet_multiple_files feature disabled' do
- let(:user) { author }
+ it_behaves_like 'snippet access with different users' do
+ let(:path) { "/snippets/#{snippet.id}" }
end
end
@@ -241,7 +196,7 @@ RSpec.describe API::Snippets do
let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
let(:extra_params) { {} }
- subject { post api("/snippets/", user), params: params }
+ subject { post api("/snippets/", personal_access_token: user_token), params: params }
shared_examples 'snippet creation' do
let(:snippet) { Snippet.find(json_response["id"]) }
@@ -305,12 +260,9 @@ RSpec.describe API::Snippets do
it_behaves_like 'snippet creation'
- it_behaves_like 'snippet_multiple_files feature disabled' do
- let(:snippet) { Snippet.find(json_response["id"]) }
- end
-
context 'with an external user' do
let(:user) { create(:user, :external) }
+ let(:user_token) { create(:personal_access_token, user: user) }
it 'does not create a new snippet' do
subject
@@ -384,8 +336,6 @@ RSpec.describe API::Snippets do
end
describe 'PUT /snippets/:id' do
- let_it_be(:other_user) { create(:user) }
-
let(:visibility_level) { Snippet::PUBLIC }
let(:snippet) do
create(:personal_snippet, :repository, author: user, visibility_level: visibility_level)
@@ -465,11 +415,10 @@ RSpec.describe API::Snippets do
end
context "when admin" do
- let(:admin) { create(:admin) }
- let(:token) { create(:personal_access_token, user: admin, scopes: [:sudo]) }
+ let_it_be(:token) { create(:personal_access_token, user: admin, scopes: [:sudo]) }
subject do
- put api("/snippets/#{snippet.id}", admin, personal_access_token: token), params: { visibility: 'private', sudo: user.id }
+ put api("/snippets/#{snippet.id}", personal_access_token: token), params: { visibility: 'private', sudo: user.id }
end
context 'when sudo is defined' do
@@ -496,34 +445,32 @@ RSpec.describe API::Snippets do
end
describe 'DELETE /snippets/:id' do
- let!(:public_snippet) { create(:personal_snippet, :public, author: user) }
-
it 'deletes snippet' do
expect do
- delete api("/snippets/#{public_snippet.id}", user)
+ delete api("/snippets/#{public_snippet.id}", personal_access_token: user_token)
expect(response).to have_gitlab_http_status(:no_content)
end.to change { PersonalSnippet.count }.by(-1)
end
it 'returns 404 for invalid snippet id' do
- delete api("/snippets/#{non_existing_record_id}", user)
+ delete api("/snippets/#{non_existing_record_id}", personal_access_token: user_token)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
it_behaves_like '412 response' do
- let(:request) { api("/snippets/#{public_snippet.id}", user) }
+ let(:request) { api("/snippets/#{public_snippet.id}", personal_access_token: user_token) }
end
end
describe "GET /snippets/:id/user_agent_detail" do
- let(:admin) { create(:admin) }
- let(:snippet) { create(:personal_snippet, :public, author: user) }
- let!(:user_agent_detail) { create(:user_agent_detail, subject: snippet) }
+ let(:snippet) { public_snippet }
it 'exposes known attributes' do
+ user_agent_detail = create(:user_agent_detail, subject: snippet)
+
get api("/snippets/#{snippet.id}/user_agent_detail", admin)
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index 8d128bd911f..aff41ff5974 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe API::Terraform::State do
let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name}" }
before do
- stub_terraform_state_object_storage(Terraform::StateUploader)
+ stub_terraform_state_object_storage
end
describe 'GET /projects/:id/terraform/state/:name' do
diff --git a/spec/requests/api/terraform/state_version_spec.rb b/spec/requests/api/terraform/state_version_spec.rb
new file mode 100644
index 00000000000..ade0aacf805
--- /dev/null
+++ b/spec/requests/api/terraform/state_version_spec.rb
@@ -0,0 +1,210 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Terraform::StateVersion do
+ include HttpBasicAuthHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user, developer_projects: [project]) }
+ let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
+ let_it_be(:user_without_access) { create(:user) }
+
+ let_it_be(:state) { create(:terraform_state, project: project) }
+
+ let!(:versions) { create_list(:terraform_state_version, 3, terraform_state: state) }
+
+ let(:current_user) { maintainer }
+ let(:auth_header) { user_basic_auth_header(current_user) }
+ let(:project_id) { project.id }
+ let(:state_name) { state.name }
+ let(:version) { versions.last }
+ let(:version_serial) { version.version }
+ let(:state_version_path) { "/projects/#{project_id}/terraform/state/#{state_name}/versions/#{version_serial}" }
+
+ describe 'GET /projects/:id/terraform/state/:name/versions/:serial' do
+ subject(:request) { get api(state_version_path), headers: auth_header }
+
+ context 'with invalid authentication' do
+ let(:auth_header) { basic_auth_header('bad', 'token') }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'with no authentication' do
+ let(:auth_header) { nil }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'personal acceess token authentication' do
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+
+ context 'for a project that does not exist' do
+ let(:project_id) { '0000' }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+ end
+
+ context 'with no permissions' do
+ let(:current_user) { user_without_access }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'job token authentication' do
+ let(:auth_header) { job_basic_auth_header(job) }
+
+ context 'with maintainer permissions' do
+ let(:job) { create(:ci_build, status: :running, project: project, user: maintainer) }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+
+ it 'returns unauthorized status if the the job is not running' do
+ job.update!(status: :failed)
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ context 'for a project that does not exist' do
+ let(:project_id) { '0000' }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:job) { create(:ci_build, status: :running, project: project, user: developer) }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+ end
+
+ context 'with no permissions' do
+ let(:current_user) { user_without_access }
+ let(:job) { create(:ci_build, status: :running, user: current_user) }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ describe 'DELETE /projects/:id/terraform/state/:name/versions/:serial' do
+ subject(:request) { delete api(state_version_path), headers: auth_header }
+
+ context 'with invalid authentication' do
+ let(:auth_header) { basic_auth_header('bad', 'token') }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'with no authentication' do
+ let(:auth_header) { nil }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'deletes the version' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ context 'version does not exist' do
+ let(:version_serial) { -1 }
+
+ it 'does not delete a version' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden status' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'with no permissions' do
+ let(:current_user) { user_without_access }
+
+ it 'returns not found status' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/unleash_spec.rb b/spec/requests/api/unleash_spec.rb
new file mode 100644
index 00000000000..0b70d62b093
--- /dev/null
+++ b/spec/requests/api/unleash_spec.rb
@@ -0,0 +1,608 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Unleash do
+ include FeatureFlagHelpers
+
+ let_it_be(:project, refind: true) { create(:project) }
+ let(:project_id) { project.id }
+ let(:params) { }
+ let(:headers) { }
+
+ shared_examples 'authenticated request' do
+ context 'when using instance id' do
+ let(:client) { create(:operations_feature_flags_client, project: project) }
+ let(:params) { { instance_id: client.token } }
+
+ it 'responds with OK' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when repository is disabled' do
+ before do
+ project.project_feature.update!(
+ repository_access_level: ::ProjectFeature::DISABLED,
+ merge_requests_access_level: ::ProjectFeature::DISABLED,
+ builds_access_level: ::ProjectFeature::DISABLED
+ )
+ end
+
+ it 'responds with forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when repository is private' do
+ before do
+ project.project_feature.update!(
+ repository_access_level: ::ProjectFeature::PRIVATE,
+ merge_requests_access_level: ::ProjectFeature::DISABLED,
+ builds_access_level: ::ProjectFeature::DISABLED
+ )
+ end
+
+ it 'responds with OK' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'when using header' do
+ let(:client) { create(:operations_feature_flags_client, project: project) }
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token }}
+
+ it 'responds with OK' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when using bogus instance id' do
+ let(:params) { { instance_id: 'token' } }
+
+ it 'responds with unauthorized' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when using not existing project' do
+ let(:project_id) { -5000 }
+ let(:params) { { instance_id: 'token' } }
+
+ it 'responds with unauthorized' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ shared_examples_for 'support multiple environments' do
+ let!(:client) { create(:operations_feature_flags_client, project: project) }
+ let!(:base_headers) { { "UNLEASH-INSTANCEID" => client.token } }
+ let!(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "test" }) }
+
+ let!(:feature_flag_1) do
+ create(:operations_feature_flag, name: "feature_flag_1", project: project, active: true)
+ end
+
+ let!(:feature_flag_2) do
+ create(:operations_feature_flag, name: "feature_flag_2", project: project, active: false)
+ end
+
+ before do
+ create_scope(feature_flag_1, 'production', false)
+ create_scope(feature_flag_2, 'review/*', true)
+ end
+
+ it 'does not have N+1 problem' do
+ control_count = ActiveRecord::QueryRecorder.new { get api(features_url), headers: headers }.count
+
+ create(:operations_feature_flag, name: "feature_flag_3", project: project, active: true)
+
+ expect { get api(features_url), headers: headers }.not_to exceed_query_limit(control_count)
+ end
+
+ context 'when app name is staging' do
+ let(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "staging" }) }
+
+ it 'returns correct active values' do
+ subject
+
+ feature_flag_1 = json_response['features'].find { |f| f['name'] == 'feature_flag_1' }
+ feature_flag_2 = json_response['features'].find { |f| f['name'] == 'feature_flag_2' }
+
+ expect(feature_flag_1['enabled']).to eq(true)
+ expect(feature_flag_2['enabled']).to eq(false)
+ end
+ end
+
+ context 'when app name is production' do
+ let(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "production" }) }
+
+ it 'returns correct active values' do
+ subject
+
+ feature_flag_1 = json_response['features'].find { |f| f['name'] == 'feature_flag_1' }
+ feature_flag_2 = json_response['features'].find { |f| f['name'] == 'feature_flag_2' }
+
+ expect(feature_flag_1['enabled']).to eq(false)
+ expect(feature_flag_2['enabled']).to eq(false)
+ end
+ end
+
+ context 'when app name is review/patch-1' do
+ let(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "review/patch-1" }) }
+
+ it 'returns correct active values' do
+ subject
+
+ feature_flag_1 = json_response['features'].find { |f| f['name'] == 'feature_flag_1' }
+ feature_flag_2 = json_response['features'].find { |f| f['name'] == 'feature_flag_2' }
+
+ expect(feature_flag_1['enabled']).to eq(true)
+ expect(feature_flag_2['enabled']).to eq(false)
+ end
+ end
+
+ context 'when app name is empty' do
+ let(:headers) { base_headers }
+
+ it 'returns empty list' do
+ subject
+
+ expect(json_response['features'].count).to eq(0)
+ end
+ end
+ end
+
+ %w(/feature_flags/unleash/:project_id/features /feature_flags/unleash/:project_id/client/features).each do |features_endpoint|
+ describe "GET #{features_endpoint}" do
+ let(:features_url) { features_endpoint.sub(':project_id', project_id.to_s) }
+ let(:client) { create(:operations_feature_flags_client, project: project) }
+
+ subject { get api(features_url), params: params, headers: headers }
+
+ it_behaves_like 'authenticated request'
+
+ context 'with version 1 (legacy) feature flags' do
+ let(:feature_flag) { create(:operations_feature_flag, project: project, name: 'feature1', active: true, version: 1) }
+
+ it_behaves_like 'support multiple environments'
+
+ context 'with a list of feature flags' do
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "production" } }
+ let!(:enabled_feature_flag) { create(:operations_feature_flag, project: project, name: 'feature1', active: true, version: 1) }
+ let!(:disabled_feature_flag) { create(:operations_feature_flag, project: project, name: 'feature2', active: false, version: 1) }
+
+ it 'responds with a list of features' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['version']).to eq(1)
+ expect(json_response['features']).not_to be_empty
+ expect(json_response['features'].map { |f| f['name'] }.sort).to eq(%w[feature1 feature2])
+ expect(json_response['features'].sort_by {|f| f['name'] }.map { |f| f['enabled'] }).to eq([true, false])
+ end
+
+ it 'matches json schema' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('unleash/unleash')
+ end
+ end
+
+ it 'returns a feature flag strategy' do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'sandbox',
+ active: true,
+ strategies: [{ name: "gradualRolloutUserId",
+ parameters: { groupId: "default", percentage: "50" } }])
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "sandbox" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(true)
+ strategies = json_response['features'].first['strategies']
+ expect(strategies).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => {
+ "percentage" => "50",
+ "groupId" => "default"
+ }
+ }])
+ end
+
+ it 'returns a default strategy for a scope' do
+ create(:operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: 'sandbox', active: true)
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "sandbox" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(true)
+ strategies = json_response['features'].first['strategies']
+ expect(strategies).to eq([{ "name" => "default", "parameters" => {} }])
+ end
+
+ it 'returns multiple strategies for a feature flag' do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'staging',
+ active: true,
+ strategies: [{ name: "userWithId", parameters: { userIds: "max,fred" } },
+ { name: "gradualRolloutUserId",
+ parameters: { groupId: "default", percentage: "50" } }])
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "staging" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(true)
+ strategies = json_response['features'].first['strategies'].sort_by { |s| s['name'] }
+ expect(strategies).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => {
+ "percentage" => "50",
+ "groupId" => "default"
+ }
+ }, {
+ "name" => "userWithId",
+ "parameters" => {
+ "userIds" => "max,fred"
+ }
+ }])
+ end
+
+ it 'returns a disabled feature when the flag is disabled' do
+ flag = create(:operations_feature_flag, project: project, name: 'test_feature', active: false, version: 1)
+ create(:operations_feature_flag_scope, feature_flag: flag, environment_scope: 'production', active: true)
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "production" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(false)
+ end
+
+ context "with an inactive scope" do
+ let!(:scope) { create(:operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: 'production', active: false, strategies: [{ name: "default", parameters: {} }]) }
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "production" } }
+
+ it 'returns a disabled feature' do
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ feature_json = json_response['features'].first
+ expect(feature_json['enabled']).to eq(false)
+ expect(feature_json['strategies']).to eq([{ 'name' => 'default', 'parameters' => {} }])
+ end
+ end
+ end
+
+ context 'with version 2 feature flags' do
+ it 'does not return a flag without any strategies' do
+ create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to be_empty
+ end
+
+ it 'returns a flag with a default strategy' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns a flag with a userWithId strategy' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user123,user456' })
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user123,user456' }
+ }]
+ }])
+ end
+
+ it 'returns a flag with multiple strategies' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user_a,user_b' })
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '45' })
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'production')
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].map { |f| f['name'] }.sort).to eq(['feature1'])
+ features_json = json_response['features'].map do |feature|
+ feature.merge(feature.slice('strategies').transform_values { |v| v.sort_by { |s| s['name'] } })
+ end
+ expect(features_json).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'gradualRolloutUserId',
+ 'parameters' => { 'groupId' => 'default', 'percentage' => '45' }
+ }, {
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user_a,user_b' }
+ }]
+ }])
+ end
+
+ it 'returns only flags matching the environment scope' do
+ feature_flag_a = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag_a)
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'production')
+ feature_flag_b = create(:operations_feature_flag, project: project,
+ name: 'feature2', active: true, version: 2)
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag_b)
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'staging' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].map { |f| f['name'] }.sort).to eq(['feature2'])
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature2',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns only strategies matching the environment scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user2,user8,user4' })
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'production')
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user2,user8,user4' }
+ }]
+ }])
+ end
+
+ it 'returns only flags for the given project' do
+ project_b = create(:project)
+ feature_flag_a = create(:operations_feature_flag, project: project, name: 'feature_a', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag_a)
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'sandbox')
+ feature_flag_b = create(:operations_feature_flag, project: project_b, name: 'feature_b', active: true, version: 2)
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag_b)
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'sandbox')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'sandbox' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature_a',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns all strategies with a matching scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user2,user8,user4' })
+ create(:operations_scope, strategy: strategy_a, environment_scope: '*')
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'review/*')
+ strategy_c = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '15' })
+ create(:operations_scope, strategy: strategy_c, environment_scope: 'review/patch-1')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'review/patch-1' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['strategies'].sort_by { |s| s['name'] }).to eq([{
+ 'name' => 'default',
+ 'parameters' => {}
+ }, {
+ 'name' => 'gradualRolloutUserId',
+ 'parameters' => { 'groupId' => 'default', 'percentage' => '15' }
+ }, {
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user2,user8,user4' }
+ }])
+ end
+
+ it 'returns a strategy with more than one matching scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ create(:operations_scope, strategy: strategy, environment_scope: '*')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns a disabled flag with a matching scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'myfeature', active: false, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'myfeature',
+ 'enabled' => false,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns a userWithId strategy for a gitlabUserList strategy' do
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project,
+ name: 'myfeature', active: true)
+ user_list = create(:operations_feature_flag_user_list, project: project,
+ name: 'My List', user_xids: 'user1,user2')
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'gitlabUserList', parameters: {}, user_list: user_list)
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'myfeature',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user1,user2' }
+ }]
+ }])
+ end
+ end
+
+ context 'when mixing version 1 and version 2 feature flags' do
+ it 'returns both types of flags when both match' do
+ feature_flag_a = create(:operations_feature_flag, project: project,
+ name: 'feature_a', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag_a,
+ name: 'userWithId', parameters: { userIds: 'user8' })
+ create(:operations_scope, strategy: strategy, environment_scope: 'staging')
+ feature_flag_b = create(:operations_feature_flag, project: project,
+ name: 'feature_b', active: true, version: 1)
+ create(:operations_feature_flag_scope, feature_flag: feature_flag_b,
+ active: true, strategies: [{ name: 'default', parameters: {} }], environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'staging' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].sort_by {|f| f['name']}).to eq([{
+ 'name' => 'feature_a',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user8' }
+ }]
+ }, {
+ 'name' => 'feature_b',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns legacy flags when only legacy flags match' do
+ feature_flag_a = create(:operations_feature_flag, project: project,
+ name: 'feature_a', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag_a,
+ name: 'userWithId', parameters: { userIds: 'user8' })
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ feature_flag_b = create(:operations_feature_flag, project: project,
+ name: 'feature_b', active: true, version: 1)
+ create(:operations_feature_flag_scope, feature_flag: feature_flag_b,
+ active: true, strategies: [{ name: 'default', parameters: {} }], environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'staging' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature_b',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+ end
+ end
+ end
+
+ describe 'POST /feature_flags/unleash/:project_id/client/register' do
+ subject { post api("/feature_flags/unleash/#{project_id}/client/register"), params: params, headers: headers }
+
+ it_behaves_like 'authenticated request'
+ end
+
+ describe 'POST /feature_flags/unleash/:project_id/client/metrics' do
+ subject { post api("/feature_flags/unleash/#{project_id}/client/metrics"), params: params, headers: headers }
+
+ it_behaves_like 'authenticated request'
+ end
+end
diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb
index 46dd54dcc73..4f4f386e9db 100644
--- a/spec/requests/api/usage_data_spec.rb
+++ b/spec/requests/api/usage_data_spec.rb
@@ -66,6 +66,10 @@ RSpec.describe API::UsageData do
end
context 'with unknown event' do
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
it 'returns status ok' do
expect(Gitlab::Redis::HLL).not_to receive(:add)
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 806b586ef49..7330c89fe77 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1460,39 +1460,47 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
describe 'GET /user/:id/gpg_keys' do
- context 'when unauthenticated' do
- it 'returns authentication error' do
- get api("/users/#{user.id}/gpg_keys")
+ it 'returns 404 for non-existing user' do
+ get api('/users/0/gpg_keys')
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
end
- context 'when authenticated' do
- it 'returns 404 for non-existing user' do
- get api('/users/0/gpg_keys', admin)
+ it 'returns array of GPG keys' do
+ user.gpg_keys << gpg_key
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 User Not Found')
- end
+ get api("/users/#{user.id}/gpg_keys")
- it 'returns 404 error if key not foud' do
- delete api("/users/#{user.id}/gpg_keys/#{non_existing_record_id}", admin)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.first['key']).to eq(gpg_key.key)
+ end
+ end
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 GPG Key Not Found')
- end
+ describe 'GET /user/:id/gpg_keys/:key_id' do
+ it 'returns 404 for non-existing user' do
+ get api('/users/0/gpg_keys/1')
- it 'returns array of GPG keys' do
- user.gpg_keys << gpg_key
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
- get api("/users/#{user.id}/gpg_keys", admin)
+ it 'returns 404 for non-existing key' do
+ get api("/users/#{user.id}/gpg_keys/0")
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.first['key']).to eq(gpg_key.key)
- end
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 GPG Key Not Found')
+ end
+
+ it 'returns a single GPG key' do
+ user.gpg_keys << gpg_key
+
+ get api("/users/#{user.id}/gpg_keys/#{gpg_key.id}")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['key']).to eq(gpg_key.key)
end
end
@@ -2308,23 +2316,31 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
describe 'POST /users/:id/activate' do
+ subject(:activate) { post api("/users/#{user_id}/activate", api_user) }
+
+ let(:user_id) { user.id }
+
context 'performed by a non-admin user' do
+ let(:api_user) { user }
+
it 'is not authorized to perform the action' do
- post api("/users/#{user.id}/activate", user)
+ activate
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'performed by an admin user' do
+ let(:api_user) { admin }
+
context 'for a deactivated user' do
before do
user.deactivate
-
- post api("/users/#{user.id}/activate", admin)
end
it 'activates a deactivated user' do
+ activate
+
expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
@@ -2333,11 +2349,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for an active user' do
before do
user.activate
-
- post api("/users/#{user.id}/activate", admin)
end
it 'returns 201' do
+ activate
+
expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
@@ -2346,11 +2362,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a blocked user' do
before do
user.block
-
- post api("/users/#{user.id}/activate", admin)
end
it 'returns 403' do
+ activate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('blocked')
@@ -2360,11 +2376,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a ldap blocked user' do
before do
user.ldap_block
-
- post api("/users/#{user.id}/activate", admin)
end
it 'returns 403' do
+ activate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('ldap_blocked')
@@ -2372,8 +2388,10 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
context 'for a user that does not exist' do
+ let(:user_id) { 0 }
+
before do
- post api("/users/0/activate", admin)
+ activate
end
it_behaves_like '404'
@@ -2382,15 +2400,23 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
describe 'POST /users/:id/deactivate' do
+ subject(:deactivate) { post api("/users/#{user_id}/deactivate", api_user) }
+
+ let(:user_id) { user.id }
+
context 'performed by a non-admin user' do
+ let(:api_user) { user }
+
it 'is not authorized to perform the action' do
- post api("/users/#{user.id}/deactivate", user)
+ deactivate
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'performed by an admin user' do
+ let(:api_user) { admin }
+
context 'for an active user' do
let(:activity) { {} }
let(:user) { create(:user, **activity) }
@@ -2398,11 +2424,9 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'with no recent activity' do
let(:activity) { { last_activity_on: ::User::MINIMUM_INACTIVE_DAYS.next.days.ago } }
- before do
- post api("/users/#{user.id}/deactivate", admin)
- end
-
it 'deactivates an active user' do
+ deactivate
+
expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('deactivated')
end
@@ -2411,11 +2435,9 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'with recent activity' do
let(:activity) { { last_activity_on: ::User::MINIMUM_INACTIVE_DAYS.pred.days.ago } }
- before do
- post api("/users/#{user.id}/deactivate", admin)
- end
-
it 'does not deactivate an active user' do
+ deactivate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq("403 Forbidden - The user you are trying to deactivate has been active in the past #{::User::MINIMUM_INACTIVE_DAYS} days and cannot be deactivated")
expect(user.reload.state).to eq('active')
@@ -2426,11 +2448,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a deactivated user' do
before do
user.deactivate
-
- post api("/users/#{user.id}/deactivate", admin)
end
it 'returns 201' do
+ deactivate
+
expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('deactivated')
end
@@ -2439,11 +2461,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a blocked user' do
before do
user.block
-
- post api("/users/#{user.id}/deactivate", admin)
end
it 'returns 403' do
+ deactivate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
expect(user.reload.state).to eq('blocked')
@@ -2453,20 +2475,33 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a ldap blocked user' do
before do
user.ldap_block
-
- post api("/users/#{user.id}/deactivate", admin)
end
it 'returns 403' do
+ deactivate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user cannot be deactivated by the API')
expect(user.reload.state).to eq('ldap_blocked')
end
end
+ context 'for an internal user' do
+ let(:user) { User.alert_bot }
+
+ it 'returns 403' do
+ deactivate
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('403 Forbidden - An internal user cannot be deactivated by the API')
+ end
+ end
+
context 'for a user that does not exist' do
+ let(:user_id) { 0 }
+
before do
- post api("/users/0/deactivate", admin)
+ deactivate
end
it_behaves_like '404'
@@ -2506,6 +2541,15 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
expect(json_response['message']).to eq('404 User Not Found')
end
+ it 'returns a 403 error if user is internal' do
+ internal_user = create(:user, :bot)
+
+ post api("/users/#{internal_user.id}/block", admin)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['message']).to eq('An internal user cannot be blocked')
+ end
+
it 'returns a 201 if user is already blocked' do
post api("/users/#{blocked_user.id}/block", admin)
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index dbba2b35d74..a3bfa7ea33c 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Git HTTP requests' do
shared_examples_for 'pulls are allowed' do
it 'allows pulls' do
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -99,7 +99,7 @@ RSpec.describe 'Git HTTP requests' do
shared_examples_for 'pushes are allowed' do
it 'allows pushes', :sidekiq_might_not_need_inline do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -259,7 +259,7 @@ RSpec.describe 'Git HTTP requests' do
it_behaves_like 'pulls are allowed'
it 'rejects pushes with 403 Forbidden' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_wiki_error(:write_to_wiki))
end
@@ -347,7 +347,7 @@ RSpec.describe 'Git HTTP requests' do
end
it 'rejects pushes with 403 Forbidden' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_error(:receive_pack_disabled_over_http))
end
@@ -358,7 +358,7 @@ RSpec.describe 'Git HTTP requests' do
it "rejects pushes with 403 Forbidden" do
allow(Gitlab.config.gitlab_shell).to receive(:upload_pack).and_return(false)
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_error(:upload_pack_disabled_over_http))
end
@@ -370,7 +370,7 @@ RSpec.describe 'Git HTTP requests' do
it_behaves_like 'pulls are allowed'
it 'rejects pushes with 403 Forbidden' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq('You are not allowed to push code to this project.')
end
@@ -485,7 +485,7 @@ RSpec.describe 'Git HTTP requests' do
user.block
project.add_maintainer(user)
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -507,7 +507,7 @@ RSpec.describe 'Git HTTP requests' do
it "resets the IP in Rack Attack on download" do
expect(Rack::Attack::Allow2Ban).to receive(:reset).twice
- download(path, env) do
+ download(path, **env) do
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -516,7 +516,7 @@ RSpec.describe 'Git HTTP requests' do
it "resets the IP in Rack Attack on upload" do
expect(Rack::Attack::Allow2Ban).to receive(:reset).twice
- upload(path, env) do
+ upload(path, **env) do
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -525,7 +525,7 @@ RSpec.describe 'Git HTTP requests' do
it 'updates the user last activity', :clean_gitlab_redis_shared_state do
expect(user.last_activity_on).to be_nil
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(user.reload.last_activity_on).to eql(Date.today)
end
end
@@ -699,7 +699,7 @@ RSpec.describe 'Git HTTP requests' do
end
it 'uploads get status 404 with "project was moved" message' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -917,11 +917,11 @@ RSpec.describe 'Git HTTP requests' do
expect(response).to have_gitlab_http_status(:forbidden)
end
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
end
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 4338bfa3759..3f57b8ba67b 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'value stream analytics events' do
project.add_developer(user)
3.times do |count|
- Timecop.freeze(Time.now + count.days) do
+ travel_to(Time.now + count.days) do
create_cycle
end
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 444ee478cbb..9fdafc06695 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe 'Rack Attack global throttles' do
expect_rejection { get url_that_does_not_require_authentication }
- Timecop.travel(period.from_now) do
+ travel_to(period.from_now) do
requests_per_period.times do
get url_that_does_not_require_authentication
expect(response).to have_gitlab_http_status(:ok)
diff --git a/spec/requests/request_profiler_spec.rb b/spec/requests/request_profiler_spec.rb
index 7f9999bf3d2..72689595480 100644
--- a/spec/requests/request_profiler_spec.rb
+++ b/spec/requests/request_profiler_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Request Profiler' do
time = Time.now
path = "/#{project.full_path}"
- Timecop.freeze(time) do
+ travel_to(time) do
get path, params: {}, headers: { 'X-Profile-Token' => Gitlab::RequestProfiler.profile_token, 'X-Profile-Mode' => profile_type }
end
diff --git a/spec/requests/user_activity_spec.rb b/spec/requests/user_activity_spec.rb
index 6f0726dbdc9..148bb2d6fae 100644
--- a/spec/requests/user_activity_spec.rb
+++ b/spec/requests/user_activity_spec.rb
@@ -3,18 +3,6 @@
require 'spec_helper'
RSpec.describe 'Update of user activity' do
- let(:user) { create(:user, last_activity_on: nil) }
-
- before do
- group = create(:group, name: 'group')
- project = create(:project, :public, namespace: group, name: 'project')
-
- create(:issue, project: project, iid: 10)
- create(:merge_request, source_project: project, iid: 15)
-
- project.add_maintainer(user)
- end
-
paths_to_visit = [
'/group',
'/group/project',
@@ -30,85 +18,5 @@ RSpec.describe 'Update of user activity' do
'/group/project/-/merge_requests/15'
]
- context 'without an authenticated user' do
- it 'does not set the last activity cookie' do
- get "/group/project"
-
- expect(response.cookies['user_last_activity_on']).to be_nil
- end
- end
-
- context 'with an authenticated user' do
- before do
- login_as(user)
- end
-
- context 'with a POST request' do
- it 'does not set the last activity cookie' do
- post "/group/project/archive"
-
- expect(response.cookies['user_last_activity_on']).to be_nil
- end
- end
-
- paths_to_visit.each do |path|
- context "on GET to #{path}" do
- it 'updates the last activity date' do
- expect(Users::ActivityService).to receive(:new).and_call_original
-
- get path
-
- expect(user.last_activity_on).to eq(Date.today)
- end
-
- context 'when calling it twice' do
- it 'updates last_activity_on just once' do
- expect(Users::ActivityService).to receive(:new).once.and_call_original
-
- 2.times do
- get path
- end
- end
- end
-
- context 'when last_activity_on is nil' do
- before do
- user.update_attribute(:last_activity_on, nil)
- end
-
- it 'updates the last activity date' do
- expect(user.last_activity_on).to be_nil
-
- get path
-
- expect(user.last_activity_on).to eq(Date.today)
- end
- end
-
- context 'when last_activity_on is stale' do
- before do
- user.update_attribute(:last_activity_on, 2.days.ago.to_date)
- end
-
- it 'updates the last activity date' do
- get path
-
- expect(user.last_activity_on).to eq(Date.today)
- end
- end
-
- context 'when last_activity_on is up to date' do
- before do
- user.update_attribute(:last_activity_on, Date.today)
- end
-
- it 'does not try to update it' do
- expect(Users::ActivityService).not_to receive(:new)
-
- get path
- end
- end
- end
- end
- end
+ it_behaves_like 'updating of user activity', paths_to_visit
end
diff --git a/spec/requests/user_sends_null_bytes_spec.rb b/spec/requests/user_sends_null_bytes_spec.rb
new file mode 100644
index 00000000000..1ddfad40996
--- /dev/null
+++ b/spec/requests/user_sends_null_bytes_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User sends null bytes as params' do
+ let(:null_byte) { "\u0000" }
+
+ it 'raises a 400 error' do
+ post '/nonexistent', params: { a: "A #{null_byte} nasty string" }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to eq('Bad Request')
+ end
+end
diff --git a/spec/requests/whats_new_controller_spec.rb b/spec/requests/whats_new_controller_spec.rb
new file mode 100644
index 00000000000..29500a7b5f9
--- /dev/null
+++ b/spec/requests/whats_new_controller_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WhatsNewController do
+ describe 'whats_new_path' do
+ before do
+ allow_any_instance_of(WhatsNewController).to receive(:whats_new_most_recent_release_items).and_return('items')
+ end
+
+ context 'with whats_new_drawer feature enabled' do
+ before do
+ stub_feature_flags(whats_new_drawer: true)
+ end
+
+ it 'is successful' do
+ get whats_new_path, xhr: true
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with whats_new_drawer feature disabled' do
+ before do
+ stub_feature_flags(whats_new_drawer: false)
+ end
+
+ it 'returns a 404' do
+ get whats_new_path, xhr: true
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/routing/admin_routing_spec.rb b/spec/routing/admin_routing_spec.rb
index fedafff0d1b..9374df0c4a2 100644
--- a/spec/routing/admin_routing_spec.rb
+++ b/spec/routing/admin_routing_spec.rb
@@ -184,3 +184,9 @@ RSpec.describe Admin::PlanLimitsController, "routing" do
expect(post("/admin/plan_limits")).to route_to('admin/plan_limits#create')
end
end
+
+RSpec.describe Admin::RunnersController, "routing" do
+ it "to #runner_setup_scripts" do
+ expect(get("/admin/runners/runner_setup_scripts")).to route_to('admin/runners#runner_setup_scripts')
+ end
+end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index f4d5f899519..9de99b73d23 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -43,6 +43,10 @@ RSpec.shared_examples 'groups routing' do
expect(get("/groups/#{group_path}/-/milestones")).to route_to('groups/milestones#index', group_id: group_path)
end
+ it "to #runner_setup_scripts" do
+ expect(get("/groups/#{group_path}/-/settings/ci_cd/runner_setup_scripts")).to route_to('groups/settings/ci_cd#runner_setup_scripts', group_id: group_path)
+ end
+
it 'routes to the avatars controller' do
expect(delete("/groups/#{group_path}/-/avatar"))
.to route_to(group_id: group_path,
diff --git a/spec/routing/instance_statistics_routing_spec.rb b/spec/routing/instance_statistics_routing_spec.rb
deleted file mode 100644
index 7eec807fb0b..00000000000
--- a/spec/routing/instance_statistics_routing_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Instance Statistics', 'routing' do
- include RSpec::Rails::RequestExampleGroup
-
- it "routes '/-/instance_statistics' to dev ops report" do
- expect(get('/-/instance_statistics')).to redirect_to('/admin/dev_ops_report')
- end
-end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index b80baf0aa13..a683dc28f4f 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -306,12 +306,9 @@ RSpec.describe 'project routing' do
end
# raw_project_snippet GET /:project_id/snippets/:id/raw(.:format) snippets#raw
# project_snippets GET /:project_id/snippets(.:format) snippets#index
- # POST /:project_id/snippets(.:format) snippets#create
# new_project_snippet GET /:project_id/snippets/new(.:format) snippets#new
# edit_project_snippet GET /:project_id/snippets/:id/edit(.:format) snippets#edit
# project_snippet GET /:project_id/snippets/:id(.:format) snippets#show
- # PUT /:project_id/snippets/:id(.:format) snippets#update
- # DELETE /:project_id/snippets/:id(.:format) snippets#destroy
describe SnippetsController, 'routing' do
it 'to #raw' do
expect(get('/gitlab/gitlabhq/-/snippets/1/raw')).to route_to('projects/snippets#raw', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
@@ -321,10 +318,6 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/snippets')).to route_to('projects/snippets#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
- it 'to #create' do
- expect(post('/gitlab/gitlabhq/-/snippets')).to route_to('projects/snippets#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
- end
-
it 'to #new' do
expect(get('/gitlab/gitlabhq/-/snippets/new')).to route_to('projects/snippets#new', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
@@ -337,14 +330,6 @@ RSpec.describe 'project routing' do
expect(get('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
- it 'to #update' do
- expect(put('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#update', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
- end
-
- it 'to #destroy' do
- expect(delete('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#destroy', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
- end
-
it 'to #show from unscope routing' do
expect(get('/gitlab/gitlabhq/snippets/1')).to route_to('projects/snippets#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
@@ -731,6 +716,12 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::Settings::CiCdController, 'routing' do
+ it "to #runner_setup_scripts" do
+ expect(get("/gitlab/gitlabhq/-/settings/ci_cd/runner_setup_scripts")).to route_to('projects/settings/ci_cd#runner_setup_scripts', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+ end
+
describe Projects::TemplatesController, 'routing' do
describe '#show' do
def show_with_template_type(template_type)
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 722e687838f..f665dc31ee4 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -61,12 +61,9 @@ RSpec.describe "Mounted Apps", "routing" do
end
# snippets GET /snippets(.:format) snippets#index
-# POST /snippets(.:format) snippets#create
# new_snippet GET /snippets/new(.:format) snippets#new
# edit_snippet GET /snippets/:id/edit(.:format) snippets#edit
# snippet GET /snippets/:id(.:format) snippets#show
-# PUT /snippets/:id(.:format) snippets#update
-# DELETE /snippets/:id(.:format) snippets#destroy
RSpec.describe SnippetsController, "routing" do
it "to #raw" do
expect(get("/-/snippets/1/raw")).to route_to('snippets#raw', id: '1')
@@ -76,10 +73,6 @@ RSpec.describe SnippetsController, "routing" do
expect(get("/-/snippets")).to route_to('snippets#index')
end
- it "to #create" do
- expect(post("/-/snippets")).to route_to('snippets#create')
- end
-
it "to #new" do
expect(get("/-/snippets/new")).to route_to('snippets#new')
end
@@ -92,14 +85,6 @@ RSpec.describe SnippetsController, "routing" do
expect(get("/-/snippets/1")).to route_to('snippets#show', id: '1')
end
- it "to #update" do
- expect(put("/-/snippets/1")).to route_to('snippets#update', id: '1')
- end
-
- it "to #destroy" do
- expect(delete("/-/snippets/1")).to route_to('snippets#destroy', id: '1')
- end
-
it 'to #show from unscoped routing' do
expect(get("/snippets/1")).to route_to('snippets#show', id: '1')
end
@@ -119,9 +104,9 @@ RSpec.describe HelpController, "routing" do
path: 'user/markdown',
format: 'md')
- path = '/help/workflow/protected_branches/protected_branches1.png'
+ path = '/help/user/markdown/markdown_logo.png'
expect(get(path)).to route_to('help#show',
- path: 'workflow/protected_branches/protected_branches1',
+ path: 'user/markdown/markdown_logo',
format: 'png')
end
end
@@ -148,6 +133,10 @@ RSpec.describe ProfilesController, "routing" do
it "to #show" do
expect(get("/profile")).to route_to('profiles#show')
end
+
+ it 'to #show from scope routing' do
+ expect(get("/-/profile")).to route_to('profiles#show')
+ end
end
# profile_preferences GET /profile/preferences(.:format) profiles/preferences#show
@@ -374,3 +363,9 @@ RSpec.describe Snippets::BlobsController, "routing" do
.to route_to('snippets/blobs#raw', snippet_id: '1', ref: 'master', path: 'lib/version.rb')
end
end
+
+RSpec.describe RunnerSetupController, 'routing' do
+ it 'to #platforms' do
+ expect(get("/-/runner_setup/platforms")).to route_to('runner_setup#platforms')
+ end
+end
diff --git a/spec/rubocop/cop/api/base_spec.rb b/spec/rubocop/cop/api/base_spec.rb
new file mode 100644
index 00000000000..893bcf49627
--- /dev/null
+++ b/spec/rubocop/cop/api/base_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../../rubocop/cop/api/base'
+
+RSpec.describe RuboCop::Cop::API::Base, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ let(:corrected) do
+ <<~CORRECTED
+ class SomeAPI < ::API::Base
+ end
+ CORRECTED
+ end
+
+ ['Grape::API', '::Grape::API', 'Grape::API::Instance', '::Grape::API::Instance'].each do |offense|
+ it "adds an offense when inheriting from #{offense}" do
+ expect_offense(<<~CODE)
+ class SomeAPI < #{offense}
+ #{'^' * offense.length} #{described_class::MSG}
+ end
+ CODE
+
+ expect_correction(corrected)
+ end
+ end
+
+ it 'does not add an offense when inheriting from BaseAPI' do
+ expect_no_offenses(corrected)
+ end
+end
diff --git a/spec/rubocop/cop/api/grape_api_instance_spec.rb b/spec/rubocop/cop/api/grape_api_instance_spec.rb
deleted file mode 100644
index 74f175cb707..00000000000
--- a/spec/rubocop/cop/api/grape_api_instance_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require 'rubocop'
-require_relative '../../../../rubocop/cop/api/grape_api_instance'
-
-RSpec.describe RuboCop::Cop::API::GrapeAPIInstance do
- include CopHelper
-
- subject(:cop) { described_class.new }
-
- it 'adds an offense when inheriting from Grape::API' do
- inspect_source(<<~CODE)
- class SomeAPI < Grape::API
- end
- CODE
-
- expect(cop.offenses.size).to eq(1)
- end
-
- it 'does not add an offense when inheriting from Grape::API::Instance' do
- inspect_source(<<~CODE)
- class SomeAPI < Grape::API::Instance
- end
- CODE
-
- expect(cop.offenses.size).to be_zero
- end
-end
diff --git a/spec/rubocop/cop/code_reuse/active_record_spec.rb b/spec/rubocop/cop/code_reuse/active_record_spec.rb
index 25eca185f26..e15b9e11aed 100644
--- a/spec/rubocop/cop/code_reuse/active_record_spec.rb
+++ b/spec/rubocop/cop/code_reuse/active_record_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe RuboCop::Cop::CodeReuse::ActiveRecord, type: :rubocop do
SOURCE
end
- it 'autocorrects offenses in instance methods by whitelisting them' do
+ it 'autocorrects offenses in instance methods by allowing them' do
corrected = autocorrect_source(<<~SOURCE)
def foo
User.where
@@ -100,7 +100,7 @@ RSpec.describe RuboCop::Cop::CodeReuse::ActiveRecord, type: :rubocop do
SOURCE
end
- it 'autocorrects offenses in class methods by whitelisting them' do
+ it 'autocorrects offenses in class methods by allowing them' do
corrected = autocorrect_source(<<~SOURCE)
def self.foo
User.where
@@ -116,7 +116,7 @@ RSpec.describe RuboCop::Cop::CodeReuse::ActiveRecord, type: :rubocop do
SOURCE
end
- it 'autocorrects offenses in blocks by whitelisting them' do
+ it 'autocorrects offenses in blocks by allowing them' do
corrected = autocorrect_source(<<~SOURCE)
get '/' do
User.where
diff --git a/spec/rubocop/cop/graphql/gid_expected_type_spec.rb b/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
new file mode 100644
index 00000000000..a81af2aea5d
--- /dev/null
+++ b/spec/rubocop/cop/graphql/gid_expected_type_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+
+require_relative '../../../../rubocop/cop/graphql/gid_expected_type'
+
+RSpec.describe RuboCop::Cop::Graphql::GIDExpectedType, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ it 'adds an offense when there is no expected_type parameter' do
+ inspect_source(<<~TYPE)
+ GitlabSchema.object_from_id(received_id)
+ TYPE
+
+ expect(cop.offenses.size).to eq 1
+ end
+
+ it 'does not add an offense for calls that have an expected_type parameter' do
+ expect_no_offenses(<<~TYPE.strip)
+ GitlabSchema.object_from_id("some_id", expected_type: SomeClass)
+ TYPE
+ end
+end
diff --git a/spec/rubocop/cop/graphql/id_type_spec.rb b/spec/rubocop/cop/graphql/id_type_spec.rb
new file mode 100644
index 00000000000..8767412e282
--- /dev/null
+++ b/spec/rubocop/cop/graphql/id_type_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+
+require_relative '../../../../rubocop/cop/graphql/id_type'
+
+RSpec.describe RuboCop::Cop::Graphql::IDType, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ it 'adds an offense when GraphQL::ID_TYPE is used as a param to #argument' do
+ inspect_source(<<~TYPE)
+ argument :some_arg, GraphQL::ID_TYPE, some: other, params: do_not_matter
+ TYPE
+
+ expect(cop.offenses.size).to eq 1
+ end
+
+ context 'whitelisted arguments' do
+ RuboCop::Cop::Graphql::IDType::WHITELISTED_ARGUMENTS.each do |arg|
+ it "does not add an offense for calls to #argument with #{arg} as argument name" do
+ expect_no_offenses(<<~TYPE.strip)
+ argument #{arg}, GraphQL::ID_TYPE, some: other, params: do_not_matter
+ TYPE
+ end
+ end
+ end
+
+ it 'does not add an offense for calls to #argument without GraphQL::ID_TYPE' do
+ expect_no_offenses(<<~TYPE.strip)
+ argument :some_arg, ::Types::GlobalIDType[::Awardable], some: other, params: do_not_matter
+ TYPE
+ end
+end
diff --git a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
index b43d44dba65..aaf191a1b6b 100644
--- a/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
+++ b/spec/rubocop/cop/migration/add_concurrent_foreign_key_spec.rb
@@ -36,5 +36,15 @@ RSpec.describe RuboCop::Cop::Migration::AddConcurrentForeignKey, type: :rubocop
expect(cop.offenses).to be_empty
end
+
+ it 'does not register an offense when `add_foreign_key` is within `with_lock_retries`' do
+ inspect_source <<~RUBY
+ with_lock_retries do
+ add_foreign_key :key, :projects, column: :project_id, on_delete: :cascade
+ end
+ RUBY
+
+ expect(cop.offenses).to be_empty
+ end
end
end
diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
index 5f0ca419548..0bea7bd7a0c 100644
--- a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
+++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
@@ -129,6 +129,28 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns, type: :rubocop do
end
end
+ context 'when text columns are used for encryption' do
+ it 'registers no offenses' do
+ expect_no_offenses(<<~RUBY)
+ class TestTextLimits < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+ disable_ddl_transaction!
+
+ def up
+ create_table :test_text_limits, id: false do |t|
+ t.integer :test_id, null: false
+ t.text :encrypted_name
+ end
+
+ add_column :encrypted_test_text_limits, :encrypted_email, :text
+ add_column_with_default :encrypted_test_text_limits, :encrypted_role, :text, default: 'default'
+ change_column_type_concurrently :encrypted_test_text_limits, :encrypted_test_id, :text
+ end
+ end
+ RUBY
+ end
+ end
+
context 'on down' do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
diff --git a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
index fa4acc62226..93f43b0feb0 100644
--- a/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
+++ b/spec/rubocop/cop/migration/create_table_with_foreign_keys_spec.rb
@@ -83,7 +83,7 @@ RSpec.describe RuboCop::Cop::Migration::CreateTableWithForeignKeys, type: :ruboc
context 'with more than one foreign keys' do
let(:offense) do
'Creating a table with more than one foreign key at once violates our migration style guide. ' \
- 'For more details check the https://docs.gitlab.com/ce/development/migration_style_guide.html#examples'
+ 'For more details check the https://docs.gitlab.com/ee/development/migration_style_guide.html#examples'
end
shared_examples 'target to high traffic table' do |dsl_method, table_name|
diff --git a/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb b/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb
index 11e4d784617..607daf0c9f0 100644
--- a/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb
+++ b/spec/rubocop/cop/migration/with_lock_retries_disallowed_method_spec.rb
@@ -53,6 +53,22 @@ RSpec.describe RuboCop::Cop::Migration::WithLockRetriesDisallowedMethod, type: :
expect(cop.offenses.size).to eq(0)
end
+
+ describe 'for `add_foreign_key`' do
+ it 'registers an offense when more than two FKs are added' do
+ message = described_class::MSG_ONLY_ONE_FK_ALLOWED
+
+ expect_offense <<~RUBY
+ with_lock_retries do
+ add_foreign_key :imports, :projects, column: :project_id, on_delete: :cascade
+ ^^^^^^^^^^^^^^^ #{message}
+ add_column :projects, :name, :text
+ add_foreign_key :imports, :users, column: :user_id, on_delete: :cascade
+ ^^^^^^^^^^^^^^^ #{message}
+ end
+ RUBY
+ end
+ end
end
context 'outside of migration' do
diff --git a/spec/rubocop/cop/rspec/expect_gitlab_tracking_spec.rb b/spec/rubocop/cop/rspec/expect_gitlab_tracking_spec.rb
new file mode 100644
index 00000000000..f7adc1373df
--- /dev/null
+++ b/spec/rubocop/cop/rspec/expect_gitlab_tracking_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/rspec/expect_gitlab_tracking'
+
+RSpec.describe RuboCop::Cop::RSpec::ExpectGitlabTracking do
+ include CopHelper
+
+ let(:source_file) { 'spec/foo_spec.rb' }
+
+ subject(:cop) { described_class.new }
+
+ good_samples = [
+ 'expect_snowplow_event(category: nil, action: nil)',
+ 'expect_snowplow_event(category: "EventCategory", action: "event_action")',
+ 'expect_snowplow_event(category: "EventCategory", action: "event_action", label: "label", property: "property")',
+ 'expect_no_snowplow_event'
+ ]
+
+ bad_samples = [
+ 'expect(Gitlab::Tracking).to receive(:event)',
+ 'expect(Gitlab::Tracking).to_not receive(:event)',
+ 'expect(Gitlab::Tracking).not_to receive(:event)',
+ 'expect(Gitlab::Tracking).to_not receive(:event).with("EventCategory", "event_action")',
+ 'expect(Gitlab::Tracking).not_to receive(:event).with("EventCategory", "event_action")',
+ 'expect(Gitlab::Tracking).to receive(:event).with("EventCategory", "event_action", label: "label", property: "property")',
+ 'expect(Gitlab::Tracking).to have_received(:event).with("EventCategory", "event_action")',
+ 'expect(Gitlab::Tracking).to_not have_received(:event).with("EventCategory", "event_action")',
+ 'expect(Gitlab::Tracking).not_to have_received(:event).with("EventCategory", "event_action")',
+ 'allow(Gitlab::Tracking).to receive(:event).and_call_original'
+ ]
+
+ good_samples.each do |good|
+ context "good: #{good}" do
+ it 'does not register an offense' do
+ inspect_source(good)
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+ end
+
+ bad_samples.each do |bad|
+ context "bad: #{bad}" do
+ it 'registers an offense', :aggregate_failures do
+ inspect_source(bad, source_file)
+
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ expect(cop.highlights).to eq([bad])
+
+ msg = cop.offenses.first.message
+
+ expect(msg).to match(
+ /Do not expect directly on `Gitlab::Tracking#event`/
+ )
+ expect(msg).to match(/add the `snowplow` annotation/)
+ expect(msg).to match(/use `expect_snowplow_event` instead/)
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb b/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb
new file mode 100644
index 00000000000..70dbe086127
--- /dev/null
+++ b/spec/rubocop/cop/rspec/factory_bot/inline_association_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require 'rubocop'
+
+require_relative '../../../../../rubocop/cop/rspec/factory_bot/inline_association'
+
+RSpec.describe RuboCop::Cop::RSpec::FactoryBot::InlineAssociation, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ shared_examples 'offense' do |code_snippet, autocorrected|
+ # We allow `create` or `FactoryBot.create` or `::FactoryBot.create`
+ let(:type) { code_snippet[/^(?:::)?(?:FactoryBot\.)?(\w+)/, 1] }
+ let(:offense_marker) { '^' * code_snippet.size }
+ let(:offense_msg) { msg(type) }
+ let(:offense) { "#{offense_marker} #{offense_msg}" }
+ let(:pristine_source) { source.sub(offense, '') }
+ let(:source) do
+ <<~RUBY
+ FactoryBot.define do
+ factory :project do
+ attribute { #{code_snippet} }
+ #{offense}
+ end
+ end
+ RUBY
+ end
+
+ it 'registers an offense' do
+ expect_offense(source)
+ end
+
+ it 'autocorrects the source' do
+ corrected = autocorrect_source(pristine_source)
+
+ expect(corrected).not_to include(code_snippet)
+ expect(corrected).to include(autocorrected)
+ end
+ end
+
+ shared_examples 'no offense' do |code_snippet|
+ first_line = code_snippet.lines.first.chomp
+
+ context "for `#{first_line}`" do
+ it 'does not register any offenses' do
+ expect_no_offenses <<~RUBY
+ FactoryBot.define do
+ factory :project do
+ #{code_snippet}
+ end
+ end
+ RUBY
+ end
+ end
+ end
+
+ context 'offenses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:code_snippet, :autocorrected) do
+ # create
+ 'create(:user)' | 'association(:user)'
+ 'FactoryBot.create(:user)' | 'association(:user)'
+ '::FactoryBot.create(:user)' | 'association(:user)'
+ 'create(:user, :admin)' | 'association(:user, :admin)'
+ 'create(:user, name: "any")' | 'association(:user, name: "any")'
+ # build
+ 'build(:user)' | 'association(:user)'
+ 'FactoryBot.build(:user)' | 'association(:user)'
+ '::FactoryBot.build(:user)' | 'association(:user)'
+ 'build(:user, :admin)' | 'association(:user, :admin)'
+ 'build(:user, name: "any")' | 'association(:user, name: "any")'
+ end
+
+ with_them do
+ include_examples 'offense', params[:code_snippet], params[:autocorrected]
+ end
+
+ it 'recognizes `add_attribute`' do
+ expect_offense <<~RUBY
+ FactoryBot.define do
+ factory :project, class: 'Project' do
+ add_attribute(:method) { create(:user) }
+ ^^^^^^^^^^^^^ #{msg(:create)}
+ end
+ end
+ RUBY
+ end
+
+ it 'recognizes `transient` attributes' do
+ expect_offense <<~RUBY
+ FactoryBot.define do
+ factory :project, class: 'Project' do
+ transient do
+ creator { create(:user) }
+ ^^^^^^^^^^^^^ #{msg(:create)}
+ end
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'no offenses' do
+ include_examples 'no offense', 'association(:user)'
+ include_examples 'no offense', 'association(:user, :admin)'
+ include_examples 'no offense', 'association(:user, name: "any")'
+
+ include_examples 'no offense', <<~RUBY
+ after(:build) do |object|
+ object.user = create(:user)
+ end
+ RUBY
+
+ include_examples 'no offense', <<~RUBY
+ initialize_with do
+ create(:user)
+ end
+ RUBY
+
+ include_examples 'no offense', <<~RUBY
+ user_id { create(:user).id }
+ RUBY
+ end
+
+ def msg(type)
+ format(described_class::MSG, type: type)
+ end
+end
diff --git a/spec/rubocop/cop/rspec/timecop_travel_spec.rb b/spec/rubocop/cop/rspec/timecop_travel_spec.rb
new file mode 100644
index 00000000000..25a8127d40e
--- /dev/null
+++ b/spec/rubocop/cop/rspec/timecop_travel_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/rspec/timecop_travel'
+
+RSpec.describe RuboCop::Cop::RSpec::TimecopTravel, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'when calling Timecop.travel' do
+ let(:source) do
+ <<~SRC
+ Timecop.travel(1.day.ago) { create(:issue) }
+ SRC
+ end
+
+ let(:corrected_source) do
+ <<~SRC
+ travel_to(1.day.ago) { create(:issue) }
+ SRC
+ end
+
+ it 'registers an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'can autocorrect the source' do
+ expect(autocorrect_source(source)).to eq(corrected_source)
+ end
+ end
+
+ context 'when calling a different method on Timecop' do
+ let(:source) do
+ <<~SRC
+ Timecop.freeze { create(:issue) }
+ SRC
+ end
+
+ it 'does not register an offence' do
+ inspect_source(source)
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+end
diff --git a/spec/serializers/blob_entity_spec.rb b/spec/serializers/blob_entity_spec.rb
index b8c8c4c17de..27c62967755 100644
--- a/spec/serializers/blob_entity_spec.rb
+++ b/spec/serializers/blob_entity_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe BlobEntity do
name: blob.name,
mode: "100644",
readable_text: true,
- icon: "file-text-o",
+ icon: "doc-text",
url: "/#{project.full_path}/-/blob/master/bar/branch-test.txt"
})
end
diff --git a/spec/serializers/ci/trigger_entity_spec.rb b/spec/serializers/ci/trigger_entity_spec.rb
new file mode 100644
index 00000000000..b2f3337d166
--- /dev/null
+++ b/spec/serializers/ci/trigger_entity_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TriggerEntity do
+ let(:project) { create(:project) }
+ let(:trigger) { create(:ci_trigger, project: project, token: '237f3604900a4cd71ed06ef13e57b96d') }
+ let(:user) { create(:user) }
+ let(:entity) { described_class.new(trigger, current_user: user, project: project) }
+
+ describe '#as_json' do
+ let(:as_json) { entity.as_json }
+ let(:project_trigger_path) { "/#{project.full_path}/-/triggers/#{trigger.id}" }
+
+ it 'contains required fields' do
+ expect(as_json).to include(
+ :description, :owner, :last_used, :token, :has_token_exposed, :can_access_project
+ )
+ end
+
+ it 'contains user fields' do
+ expect(as_json[:owner].to_json).to match_schema('entities/user')
+ end
+
+ context 'when current user can manage triggers' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns short_token as token' do
+ expect(as_json[:token]).to eq(trigger.short_token)
+ end
+
+ it 'contains project_trigger_path' do
+ expect(as_json[:project_trigger_path]).to eq(project_trigger_path)
+ end
+
+ it 'does not contain edit_project_trigger_path' do
+ expect(as_json).not_to include(:edit_project_trigger_path)
+ end
+
+ it 'returns has_token_exposed' do
+ expect(as_json[:has_token_exposed]).to eq(false)
+ end
+ end
+
+ context 'when current user is the owner of the trigger' do
+ before do
+ project.add_maintainer(user)
+ trigger.update!(owner: user)
+ end
+
+ it 'returns token as token' do
+ expect(as_json[:token]).to eq(trigger.token)
+ end
+
+ it 'contains project_trigger_path' do
+ expect(as_json[:project_trigger_path]).to eq(project_trigger_path)
+ end
+
+ it 'contains edit_project_trigger_path' do
+ expect(as_json[:edit_project_trigger_path]).to eq("#{project_trigger_path}/edit")
+ end
+
+ it 'returns has_token_exposed' do
+ expect(as_json[:has_token_exposed]).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/serializers/ci/trigger_serializer_spec.rb b/spec/serializers/ci/trigger_serializer_spec.rb
new file mode 100644
index 00000000000..a669a8c3ed0
--- /dev/null
+++ b/spec/serializers/ci/trigger_serializer_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TriggerSerializer do
+ describe '#represent' do
+ let(:represent) { described_class.new.represent(trigger) }
+
+ let(:trigger) { build_stubbed(:ci_trigger) }
+
+ it 'matches schema' do
+ expect(represent.to_json).to match_schema('entities/trigger')
+ end
+ end
+end
diff --git a/spec/serializers/cluster_serializer_spec.rb b/spec/serializers/cluster_serializer_spec.rb
index 04999975276..e65e97b6ae0 100644
--- a/spec/serializers/cluster_serializer_spec.rb
+++ b/spec/serializers/cluster_serializer_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe ClusterSerializer do
:cluster_type,
:enabled,
:environment_scope,
+ :id,
:gitlab_managed_apps_logs_path,
:enable_advanced_logs_querying,
:kubernetes_errors,
diff --git a/spec/serializers/deployment_entity_spec.rb b/spec/serializers/deployment_entity_spec.rb
index 27673b905d3..588675f5232 100644
--- a/spec/serializers/deployment_entity_spec.rb
+++ b/spec/serializers/deployment_entity_spec.rb
@@ -30,6 +30,10 @@ RSpec.describe DeploymentEntity do
expect(subject[:ref][:name]).to eq 'master'
end
+ it 'exposes status' do
+ expect(subject).to include(:status)
+ end
+
it 'exposes creation date' do
expect(subject).to include(:created_at)
end
diff --git a/spec/serializers/diff_file_base_entity_spec.rb b/spec/serializers/diff_file_base_entity_spec.rb
index 94c39e11790..99dbaff4b7e 100644
--- a/spec/serializers/diff_file_base_entity_spec.rb
+++ b/spec/serializers/diff_file_base_entity_spec.rb
@@ -3,10 +3,24 @@
require 'spec_helper'
RSpec.describe DiffFileBaseEntity do
- let(:project) { create(:project, :repository) }
+ include ProjectForksHelper
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
let(:repository) { project.repository }
let(:entity) { described_class.new(diff_file, options).as_json }
+ shared_examples 'nil if removed source branch' do |key|
+ before do
+ allow(merge_request).to receive(:source_branch_exists?).and_return(false)
+ end
+
+ specify do
+ expect(entity[key]).to eq(nil)
+ end
+ end
+
context 'submodule information for a' do
let(:commit_sha) { "" }
let(:commit) { project.commit(commit_sha) }
@@ -67,7 +81,7 @@ RSpec.describe DiffFileBaseEntity do
context 'edit_path' do
let(:diff_file) { merge_request.diffs.diff_files.to_a.last }
- let(:options) { { request: EntityRequest.new(current_user: create(:user)), merge_request: merge_request } }
+ let(:options) { { request: EntityRequest.new(current_user: user), merge_request: merge_request } }
let(:params) { {} }
shared_examples 'a diff file edit path to the source branch' do
@@ -81,16 +95,7 @@ RSpec.describe DiffFileBaseEntity do
let(:params) { { from_merge_request_iid: merge_request.iid } }
it_behaves_like 'a diff file edit path to the source branch'
-
- context 'removed source branch' do
- before do
- allow(merge_request).to receive(:source_branch_exists?).and_return(false)
- end
-
- it do
- expect(entity[:edit_path]).to eq(nil)
- end
- end
+ it_behaves_like 'nil if removed source branch', :edit_path
end
context 'closed' do
@@ -118,4 +123,30 @@ RSpec.describe DiffFileBaseEntity do
end
end
end
+
+ context 'ide_edit_path' do
+ let(:source_project) { project }
+ let(:merge_request) { create(:merge_request, iid: 123, target_project: target_project, source_project: source_project) }
+ let(:diff_file) { merge_request.diffs.diff_files.to_a.last }
+ let(:options) { { request: EntityRequest.new(current_user: user), merge_request: merge_request } }
+ let(:expected_merge_request_path) { "/-/ide/project/#{source_project.full_path}/merge_requests/#{merge_request.iid}" }
+
+ context 'when source_project and target_project are the same' do
+ let(:target_project) { source_project }
+
+ it_behaves_like 'nil if removed source branch', :ide_edit_path
+
+ it 'returns the merge_request ide route' do
+ expect(entity[:ide_edit_path]).to eq expected_merge_request_path
+ end
+ end
+
+ context 'when source_project and target_project are different' do
+ let(:target_project) { fork_project(source_project, source_project.owner, repository: true) }
+
+ it 'returns the merge_request ide route with the target_project as param' do
+ expect(entity[:ide_edit_path]).to eq("#{expected_merge_request_path}?target_project=#{ERB::Util.url_encode(target_project.full_path)}")
+ end
+ end
+ end
end
diff --git a/spec/serializers/diffs_entity_spec.rb b/spec/serializers/diffs_entity_spec.rb
index 7c59e4aed83..5928a1c24b3 100644
--- a/spec/serializers/diffs_entity_spec.rb
+++ b/spec/serializers/diffs_entity_spec.rb
@@ -68,15 +68,5 @@ RSpec.describe DiffsEntity do
end
end
end
-
- context 'when code_navigation feature flag is disabled' do
- it 'does not include code navigation properties' do
- stub_feature_flags(code_navigation: false)
-
- expect(Gitlab::CodeNavigationPath).not_to receive(:new)
-
- expect(subject).not_to include(:definition_path_prefix)
- end
- end
end
end
diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb
index 306a4fa43a9..e1734d5290f 100644
--- a/spec/serializers/discussion_entity_spec.rb
+++ b/spec/serializers/discussion_entity_spec.rb
@@ -79,13 +79,5 @@ RSpec.describe DiscussionEntity do
:active
)
end
-
- context 'diff_head_compare feature is disabled' do
- it 'does not expose positions and line_codes attributes' do
- stub_feature_flags(merge_ref_head_comments: false)
-
- expect(subject.keys).not_to include(:positions, :line_codes)
- end
- end
end
end
diff --git a/spec/serializers/feature_flag_entity_spec.rb b/spec/serializers/feature_flag_entity_spec.rb
new file mode 100644
index 00000000000..21ecfe59c31
--- /dev/null
+++ b/spec/serializers/feature_flag_entity_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagEntity do
+ let(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:project) { create(:project) }
+ let(:request) { double('request', current_user: user) }
+ let(:user) { create(:user) }
+ let(:entity) { described_class.new(feature_flag, request: request) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ subject { entity.as_json }
+
+ it 'has feature flag attributes' do
+ expect(subject).to include(:id, :active, :created_at, :updated_at,
+ :description, :name, :edit_path, :destroy_path)
+ end
+end
diff --git a/spec/serializers/feature_flag_serializer_spec.rb b/spec/serializers/feature_flag_serializer_spec.rb
new file mode 100644
index 00000000000..fab8ca93b1b
--- /dev/null
+++ b/spec/serializers/feature_flag_serializer_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagSerializer do
+ let(:serializer) { described_class.new(project: project, current_user: user) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:feature_flags) { create_list(:operations_feature_flag, 3) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#represent' do
+ subject { serializer.represent(feature_flags) }
+
+ it 'includes feature flag attributes' do
+ is_expected.to all(include(:id, :active, :created_at, :updated_at,
+ :description, :name))
+ end
+ end
+end
diff --git a/spec/serializers/feature_flag_summary_entity_spec.rb b/spec/serializers/feature_flag_summary_entity_spec.rb
new file mode 100644
index 00000000000..385a9deb2d7
--- /dev/null
+++ b/spec/serializers/feature_flag_summary_entity_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagSummaryEntity do
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:project) { create(:project) }
+ let(:request) { double('request', current_user: user) }
+ let(:user) { create(:user) }
+ let(:entity) { described_class.new(project, request: request) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ subject { entity.as_json }
+
+ it 'has summary information' do
+ expect(subject).to include(:count)
+ end
+end
diff --git a/spec/serializers/feature_flag_summary_serializer_spec.rb b/spec/serializers/feature_flag_summary_serializer_spec.rb
new file mode 100644
index 00000000000..79cef6765f7
--- /dev/null
+++ b/spec/serializers/feature_flag_summary_serializer_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagSummarySerializer do
+ let(:serializer) { described_class.new(project: project, current_user: user) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let!(:feature_flags) { create(:operations_feature_flag, project: project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#represent' do
+ subject { serializer.represent(project) }
+
+ it 'has summary information' do
+ expect(subject).to include(:count)
+ end
+ end
+end
diff --git a/spec/serializers/feature_flags_client_serializer_spec.rb b/spec/serializers/feature_flags_client_serializer_spec.rb
new file mode 100644
index 00000000000..3746142a3f1
--- /dev/null
+++ b/spec/serializers/feature_flags_client_serializer_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagsClientSerializer do
+ let(:project) { create(:project) }
+ let(:feature_flags_client) { project.create_operations_feature_flags_client! }
+ let(:serializer) { described_class.new }
+
+ describe '#represent_token' do
+ subject { serializer.represent_token(feature_flags_client).to_json }
+
+ it 'includes feature flags client token' do
+ expect(subject).to match_schema('feature_flags_client_token')
+ end
+ end
+end
diff --git a/spec/serializers/group_group_link_entity_spec.rb b/spec/serializers/group_group_link_entity_spec.rb
index 8384563e3e6..9affe4af381 100644
--- a/spec/serializers/group_group_link_entity_spec.rb
+++ b/spec/serializers/group_group_link_entity_spec.rb
@@ -5,9 +5,27 @@ require 'spec_helper'
RSpec.describe GroupGroupLinkEntity do
include_context 'group_group_link'
- subject(:json) { described_class.new(group_group_link).to_json }
+ let_it_be(:current_user) { create(:user) }
+ let(:entity) { described_class.new(group_group_link) }
+
+ before do
+ allow(entity).to receive(:current_user).and_return(current_user)
+ end
it 'matches json schema' do
- expect(json).to match_schema('entities/group_group_link')
+ expect(entity.to_json).to match_schema('entities/group_group_link')
+ end
+
+ context 'a user with :admin_group_member permissions' do
+ before do
+ allow(entity).to receive(:can?).with(current_user, :admin_group_member, shared_group).and_return(true)
+ end
+
+ it 'sets `can_update` and `can_remove` to `true`' do
+ json = entity.as_json
+
+ expect(json[:can_update]).to be true
+ expect(json[:can_remove]).to be true
+ end
end
end
diff --git a/spec/serializers/import/bulk_import_entity_spec.rb b/spec/serializers/import/bulk_import_entity_spec.rb
new file mode 100644
index 00000000000..f35684bef20
--- /dev/null
+++ b/spec/serializers/import/bulk_import_entity_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::BulkImportEntity do
+ let(:importable_data) do
+ {
+ 'id' => 1,
+ 'full_name' => 'test',
+ 'full_path' => 'full/path/test',
+ 'foo' => 'bar'
+ }
+ end
+
+ subject { described_class.represent(importable_data).as_json }
+
+ %w[id full_name full_path].each do |attribute|
+ it "exposes #{attribute}" do
+ expect(subject[attribute.to_sym]).to eq(importable_data[attribute])
+ end
+ end
+
+ it 'does not expose unspecified attributes' do
+ expect(subject[:foo]).to be_nil
+ end
+end
diff --git a/spec/serializers/label_serializer_spec.rb b/spec/serializers/label_serializer_spec.rb
index ae1466b16e5..40249450f7f 100644
--- a/spec/serializers/label_serializer_spec.rb
+++ b/spec/serializers/label_serializer_spec.rb
@@ -37,11 +37,12 @@ RSpec.describe LabelSerializer do
subject { serializer.represent_appearance(resource) }
it 'serializes only attributes used for appearance' do
- expect(subject.keys).to eq([:id, :title, :color, :text_color])
+ expect(subject.keys).to eq([:id, :title, :color, :project_id, :text_color])
expect(subject[:id]).to eq(resource.id)
expect(subject[:title]).to eq(resource.title)
expect(subject[:color]).to eq(resource.color)
expect(subject[:text_color]).to eq(resource.text_color)
+ expect(subject[:project_id]).to eq(resource.project_id)
end
end
end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 51564de6041..031dc729a79 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -3,12 +3,11 @@
require 'spec_helper'
RSpec.describe MergeRequestPollCachedWidgetEntity do
- include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
- let(:project) { create :project, :repository }
- let(:resource) { create(:merge_request, source_project: project, target_project: project) }
- let(:user) { create(:user) }
+ let_it_be(:project, refind: true) { create :project, :repository }
+ let_it_be(:resource, refind: true) { create(:merge_request, source_project: project, target_project: project) }
+ let_it_be(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
@@ -174,8 +173,6 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
end
context 'when auto merge is not enabled' do
- let(:resource) { create(:merge_request) }
-
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_falsy
end
@@ -215,15 +212,55 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:commits_without_merge_commits].size).to eq(12)
end
end
+ end
- context 'when merge request is not mergeable' do
- before do
- allow(resource).to receive(:mergeable?).and_return(false)
+ describe 'pipeline' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
+
+ before do
+ allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
+ allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(can_access)
+ end
+
+ context 'when user has access to pipelines' do
+ let(:can_access) { true }
+
+ context 'when is up to date' do
+ let(:req) { double('request', current_user: user, project: project) }
+
+ it 'returns pipeline' do
+ pipeline_payload =
+ MergeRequests::PipelineEntity
+ .represent(pipeline, request: req)
+ .as_json
+
+ expect(subject[:pipeline]).to eq(pipeline_payload)
+ end
+
+ context 'when merge_request_cached_pipeline_serializer is disabled' do
+ it 'does not return pipeline' do
+ stub_feature_flags(merge_request_cached_pipeline_serializer: false)
+
+ expect(subject[:pipeline]).to be_nil
+ end
+ end
+ end
+
+ context 'when user does not have access to pipelines' do
+ let(:can_access) { false }
+ let(:req) { double('request', current_user: user, project: project) }
+
+ it 'does not have pipeline' do
+ expect(subject[:pipeline]).to eq(nil)
+ end
end
- it 'does not have default_squash_commit_message and commits_without_merge_commits' do
- expect(subject[:default_squash_commit_message]).to eq(nil)
- expect(subject[:commits_without_merge_commits]).to eq(nil)
+ context 'when is not up to date' do
+ it 'returns nil' do
+ pipeline.update!(sha: "not up to date")
+
+ expect(subject[:pipeline]).to eq(nil)
+ end
end
end
end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 161940dd01a..1e5a8915da0 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -44,20 +44,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
- context 'when merge_request_short_pipeline_serializer is disabled' do
- it 'returns detailed info about pipeline' do
- stub_feature_flags(merge_request_short_pipeline_serializer: false)
-
- pipeline.reload
- pipeline_payload =
- PipelineDetailsEntity
- .represent(pipeline, request: request)
- .as_json
-
- expect(subject[:merge_pipeline]).to eq(pipeline_payload)
- end
- end
-
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
@@ -236,21 +222,16 @@ RSpec.describe MergeRequestPollWidgetEntity do
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
- it 'returns pipeline' do
- pipeline_payload =
- MergeRequests::PipelineEntity
- .represent(pipeline, request: req)
- .as_json
-
- expect(subject[:pipeline]).to eq(pipeline_payload)
+ it 'does not return pipeline' do
+ expect(subject[:pipeline]).to be_nil
end
- context 'when merge_request_short_pipeline_serializer is disabled' do
+ context 'when merge_request_cached_pipeline_serializer is disabled' do
it 'returns detailed info about pipeline' do
- stub_feature_flags(merge_request_short_pipeline_serializer: false)
+ stub_feature_flags(merge_request_cached_pipeline_serializer: false)
pipeline_payload =
- PipelineDetailsEntity
+ MergeRequests::PipelineEntity
.represent(pipeline, request: req)
.as_json
@@ -276,10 +257,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
let(:result) { false }
let(:req) { double('request', current_user: user, project: project) }
- it 'does not have pipeline' do
- expect(subject[:pipeline]).to eq(nil)
- end
-
it 'does not return ci_status' do
expect(subject[:ci_status]).to eq(nil)
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 1432c4499ae..5cad35eaedf 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -88,25 +88,53 @@ RSpec.describe MergeRequestWidgetEntity do
end
describe 'codequality report artifacts', :request_store do
+ let(:merge_base_pipeline) { create(:ci_pipeline, :with_codequality_report, project: project) }
+
before do
project.add_developer(user)
allow(resource).to receive_messages(
+ merge_base_pipeline: merge_base_pipeline,
base_pipeline: pipeline,
head_pipeline: pipeline
)
end
- context "with report artifacts" do
+ context 'with report artifacts' do
let(:pipeline) { create(:ci_pipeline, :with_codequality_report, project: project) }
+ let(:generic_job_id) { pipeline.builds.first.id }
+ let(:merge_base_job_id) { merge_base_pipeline.builds.first.id }
- it "has data entry" do
- expect(subject).to include(:codeclimate)
+ it 'has head_path and base_path entries' do
+ expect(subject[:codeclimate][:head_path]).to be_present
+ expect(subject[:codeclimate][:base_path]).to be_present
+ end
+
+ context 'on pipelines for merged results' do
+ let(:pipeline) { create(:ci_pipeline, :merged_result_pipeline, :with_codequality_report, project: project) }
+
+ context 'with merge_base_pipelines enabled' do
+ it 'returns URLs from the head_pipeline and merge_base_pipeline' do
+ expect(subject[:codeclimate][:head_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
+ expect(subject[:codeclimate][:base_path]).to include("/jobs/#{merge_base_job_id}/artifacts/download?file_type=codequality")
+ end
+ end
+
+ context 'with merge_base_pipelines disabled' do
+ before do
+ stub_feature_flags(merge_base_pipelines: false)
+ end
+
+ it 'returns URLs from the head_pipeline and base_pipeline' do
+ expect(subject[:codeclimate][:head_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
+ expect(subject[:codeclimate][:base_path]).to include("/jobs/#{generic_job_id}/artifacts/download?file_type=codequality")
+ end
+ end
end
end
- context "without artifacts" do
- it "does not have data entry" do
+ context 'without artifacts' do
+ it 'does not have data entry' do
expect(subject).not_to include(:codeclimate)
end
end
@@ -271,9 +299,7 @@ RSpec.describe MergeRequestWidgetEntity do
describe 'user callouts' do
context 'when suggest pipeline feature is enabled' do
- before do
- stub_feature_flags(suggest_pipeline: true)
- end
+ subject { described_class.new(resource, request: request, experiment_enabled: :suggest_pipeline).as_json }
it 'provides a valid path value for user callout path' do
expect(subject[:user_callouts_path]).to eq '/-/user_callouts'
@@ -307,10 +333,6 @@ RSpec.describe MergeRequestWidgetEntity do
end
context 'when suggest pipeline feature is not enabled' do
- before do
- stub_feature_flags(suggest_pipeline: false)
- end
-
it 'provides no valid value for user callout path' do
expect(subject[:user_callouts_path]).to be_nil
end
@@ -354,4 +376,8 @@ RSpec.describe MergeRequestWidgetEntity do
expect(entity[:rebase_path]).to be_nil
end
end
+
+ it 'has security_reports_docs_path' do
+ expect(subject[:security_reports_docs_path]).not_to be_nil
+ end
end
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index a2c58baed55..821ed34d3ec 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -31,14 +31,4 @@ RSpec.describe PaginatedDiffEntity do
total_pages: 7
)
end
-
- context 'when code_navigation feature flag is disabled' do
- it 'does not execute Gitlab::CodeNavigationPath' do
- stub_feature_flags(code_navigation: false)
-
- expect(Gitlab::CodeNavigationPath).not_to receive(:new)
-
- subject
- end
- end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index b42a4f6ad3f..e0f6ab68034 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 43 : 40
+ expected_queries = Gitlab.ee? ? 39 : 36
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -176,7 +176,7 @@ RSpec.describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
- expected_queries = Gitlab.ee? ? 49 : 46
+ expected_queries = Gitlab.ee? ? 42 : 39
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -199,11 +199,10 @@ RSpec.describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- # 99 queries by default + 2 related to preloading
- # :source_pipeline and :source_job
# Existing numbers are high and require performance optimization
+ # Ongoing issue:
# https://gitlab.com/gitlab-org/gitlab/-/issues/225156
- expected_queries = Gitlab.ee? ? 95 : 86
+ expected_queries = Gitlab.ee? ? 85 : 76
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
diff --git a/spec/serializers/test_case_entity_spec.rb b/spec/serializers/test_case_entity_spec.rb
index 32e9562f4c1..45e63e3feec 100644
--- a/spec/serializers/test_case_entity_spec.rb
+++ b/spec/serializers/test_case_entity_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe TestCaseEntity do
expect(subject[:status]).to eq('success')
expect(subject[:name]).to eq('Test#sum when a is 1 and b is 3 returns summary')
expect(subject[:classname]).to eq('spec.test_spec')
+ expect(subject[:file]).to eq('./spec/test_spec.rb')
expect(subject[:execution_time]).to eq(1.11)
end
end
@@ -30,6 +31,7 @@ RSpec.describe TestCaseEntity do
expect(subject[:status]).to eq('failed')
expect(subject[:name]).to eq('Test#sum when a is 1 and b is 3 returns summary')
expect(subject[:classname]).to eq('spec.test_spec')
+ expect(subject[:file]).to eq('./spec/test_spec.rb')
expect(subject[:execution_time]).to eq(2.22)
end
end
diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/admin/propagate_integration_service_spec.rb
index 49d974b7154..5df4d9db8b1 100644
--- a/spec/services/admin/propagate_integration_service_spec.rb
+++ b/spec/services/admin/propagate_integration_service_spec.rb
@@ -10,129 +10,73 @@ RSpec.describe Admin::PropagateIntegrationService do
stub_jira_service_test
end
- let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance created_at updated_at default] }
- let!(:project) { create(:project) }
- let!(:group) { create(:group) }
- let!(:instance_integration) do
- JiraService.create!(
- instance: true,
- active: true,
- push_events: true,
- url: 'http://update-jira.instance.com',
- username: 'user',
- password: 'secret'
- )
- end
+ let(:group) { create(:group) }
- let!(:inherited_integration) do
- JiraService.create!(
- project: create(:project),
- inherit_from_id: instance_integration.id,
- instance: false,
- active: true,
- push_events: false,
- url: 'http://jira.instance.com',
- username: 'user',
- password: 'secret'
- )
+ let_it_be(:project) { create(:project) }
+ let_it_be(:instance_integration) { create(:jira_service, :instance) }
+ let_it_be(:not_inherited_integration) { create(:jira_service, project: project) }
+ let_it_be(:inherited_integration) do
+ create(:jira_service, project: create(:project), inherit_from_id: instance_integration.id)
end
-
- let!(:not_inherited_integration) do
- JiraService.create!(
- project: create(:project),
- inherit_from_id: nil,
- instance: false,
- active: true,
- push_events: false,
- url: 'http://jira.instance.com',
- username: 'user',
- password: 'secret'
- )
+ let_it_be(:different_type_inherited_integration) do
+ create(:redmine_service, project: project, inherit_from_id: instance_integration.id)
end
- let!(:different_type_inherited_integration) do
- BambooService.create!(
- project: create(:project),
- inherit_from_id: instance_integration.id,
- instance: false,
- active: true,
- push_events: false,
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: 'password',
- build_key: 'build'
- )
- end
+ context 'with inherited integration' do
+ let(:integration) { inherited_integration }
- shared_examples 'inherits settings from integration' do
- it 'updates the inherited integrations' do
- described_class.propagate(instance_integration)
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationInheritWorker).to receive(:perform_async)
+ .with(instance_integration.id, inherited_integration.id, inherited_integration.id)
- expect(integration.reload.inherit_from_id).to eq(instance_integration.id)
- expect(integration.attributes.except(*excluded_attributes))
- .to eq(instance_integration.attributes.except(*excluded_attributes))
+ described_class.propagate(instance_integration)
end
+ end
- context 'integration with data fields' do
- let(:excluded_attributes) { %w[id service_id created_at updated_at] }
+ context 'with a project without integration' do
+ let(:another_project) { create(:project) }
- it 'updates the data fields from inherited integrations' do
- described_class.propagate(instance_integration)
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(instance_integration.id, another_project.id, another_project.id)
- expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
- .to eq(instance_integration.data_fields.attributes.except(*excluded_attributes))
- end
+ described_class.propagate(instance_integration)
end
end
- shared_examples 'does not inherit settings from integration' do
- it 'does not update the not inherited integrations' do
- described_class.propagate(instance_integration)
+ context 'with a group without integration' do
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationGroupWorker).to receive(:perform_async)
+ .with(instance_integration.id, group.id, group.id)
- expect(integration.reload.attributes.except(*excluded_attributes))
- .not_to eq(instance_integration.attributes.except(*excluded_attributes))
+ described_class.propagate(instance_integration)
end
end
- context 'update only inherited integrations' do
- it_behaves_like 'inherits settings from integration' do
- let(:integration) { inherited_integration }
- end
-
- it_behaves_like 'does not inherit settings from integration' do
- let(:integration) { not_inherited_integration }
- end
+ context 'for a group-level integration' do
+ let(:group_integration) { create(:jira_service, group: group, project: nil) }
- it_behaves_like 'does not inherit settings from integration' do
- let(:integration) { different_type_inherited_integration }
- end
+ context 'with a project without integration' do
+ let(:another_project) { create(:project, group: group) }
- it_behaves_like 'inherits settings from integration' do
- let(:integration) { project.jira_service }
- end
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(group_integration.id, another_project.id, another_project.id)
- it_behaves_like 'inherits settings from integration' do
- let(:integration) { Service.find_by(group_id: group.id) }
+ described_class.propagate(group_integration)
+ end
end
- end
- it 'updates project#has_external_issue_tracker for issue tracker services' do
- described_class.propagate(instance_integration)
+ context 'with a group without integration' do
+ let(:subgroup) { create(:group, parent: group) }
- expect(project.reload.has_external_issue_tracker).to eq(true)
- end
-
- it 'updates project#has_external_wiki for external wiki services' do
- instance_integration = ExternalWikiService.create!(
- instance: true,
- active: true,
- push_events: false,
- external_wiki_url: 'http://external-wiki-url.com'
- )
-
- described_class.propagate(instance_integration)
+ it 'calls to PropagateIntegrationGroupWorker' do
+ expect(PropagateIntegrationGroupWorker).to receive(:perform_async)
+ .with(group_integration.id, subgroup.id, subgroup.id)
- expect(project.reload.has_external_wiki).to eq(true)
+ described_class.propagate(group_integration)
+ end
+ end
end
end
end
diff --git a/spec/services/admin/propagate_service_template_spec.rb b/spec/services/admin/propagate_service_template_spec.rb
index 15654653095..d95d31ceaea 100644
--- a/spec/services/admin/propagate_service_template_spec.rb
+++ b/spec/services/admin/propagate_service_template_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Admin::PropagateServiceTemplate do
describe '.propagate' do
+ let_it_be(:project) { create(:project) }
let!(:service_template) do
PushoverService.create!(
template: true,
@@ -19,124 +20,40 @@ RSpec.describe Admin::PropagateServiceTemplate do
)
end
- let!(:project) { create(:project) }
- let(:excluded_attributes) { %w[id project_id template created_at updated_at default] }
-
- it 'creates services for projects' do
- expect(project.pushover_service).to be_nil
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(service_template.id, project.id, project.id)
described_class.propagate(service_template)
-
- expect(project.reload.pushover_service).to be_present
- end
-
- it 'creates services for a project that has another service' do
- BambooService.create!(
- active: true,
- project: project,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: 'password',
- build_key: 'build'
- }
- )
-
- expect(project.pushover_service).to be_nil
-
- described_class.propagate(service_template)
-
- expect(project.reload.pushover_service).to be_present
- end
-
- it 'does not create the service if it exists already' do
- other_service = BambooService.create!(
- template: true,
- active: true,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: 'password',
- build_key: 'build'
- }
- )
-
- Service.build_from_integration(project.id, service_template).save!
- Service.build_from_integration(project.id, other_service).save!
-
- expect { described_class.propagate(service_template) }
- .not_to change { Service.count }
end
- it 'creates the service containing the template attributes' do
- described_class.propagate(service_template)
-
- expect(project.pushover_service.properties).to eq(service_template.properties)
-
- expect(project.pushover_service.attributes.except(*excluded_attributes))
- .to eq(service_template.attributes.except(*excluded_attributes))
- end
-
- context 'service with data fields' do
- include JiraServiceHelper
-
- let(:service_template) do
- stub_jira_service_test
-
- JiraService.create!(
- template: true,
+ context 'with a project that has another service' do
+ before do
+ BambooService.create!(
active: true,
- push_events: false,
- url: 'http://jira.instance.com',
- username: 'user',
- password: 'secret'
+ project: project,
+ properties: {
+ bamboo_url: 'http://gitlab.com',
+ username: 'mic',
+ password: 'password',
+ build_key: 'build'
+ }
)
end
- it 'creates the service containing the template attributes' do
- described_class.propagate(service_template)
-
- expect(project.jira_service.attributes.except(*excluded_attributes))
- .to eq(service_template.attributes.except(*excluded_attributes))
-
- excluded_attributes = %w[id service_id created_at updated_at]
- expect(project.jira_service.data_fields.attributes.except(*excluded_attributes))
- .to eq(service_template.data_fields.attributes.except(*excluded_attributes))
- end
- end
-
- describe 'bulk update', :use_sql_query_cache do
- let(:project_total) { 5 }
-
- before do
- stub_const('Admin::PropagateServiceTemplate::BATCH_SIZE', 3)
-
- project_total.times { create(:project) }
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(service_template.id, project.id, project.id)
described_class.propagate(service_template)
end
-
- it 'creates services for all projects' do
- expect(Service.all.reload.count).to eq(project_total + 2)
- end
- end
-
- describe 'external tracker' do
- it 'updates the project external tracker' do
- service_template.update!(category: 'issue_tracker')
-
- expect { described_class.propagate(service_template) }
- .to change { project.reload.has_external_issue_tracker }.to(true)
- end
end
- describe 'external wiki' do
- it 'updates the project external tracker' do
- service_template.update!(type: 'ExternalWikiService')
+ it 'does not create the service if it exists already' do
+ Service.build_from_integration(service_template, project_id: project.id).save!
- expect { described_class.propagate(service_template) }
- .to change { project.reload.has_external_wiki }.to(true)
- end
+ expect { described_class.propagate(service_template) }
+ .not_to change { Service.count }
end
end
end
diff --git a/spec/services/alert_management/alerts/update_service_spec.rb b/spec/services/alert_management/alerts/update_service_spec.rb
index ee04fc55984..4b47efca9ed 100644
--- a/spec/services/alert_management/alerts/update_service_spec.rb
+++ b/spec/services/alert_management/alerts/update_service_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe AlertManagement::Alerts::UpdateService do
context 'when a status is included' do
let(:params) { { status: new_status } }
- let(:new_status) { AlertManagement::Alert::STATUSES[:acknowledged] }
+ let(:new_status) { :acknowledged }
it 'successfully changes the status' do
expect { response }.to change { alert.acknowledged? }.to(true)
@@ -171,13 +171,13 @@ RSpec.describe AlertManagement::Alerts::UpdateService do
it_behaves_like 'adds a system note'
context 'with unknown status' do
- let(:new_status) { -1 }
+ let(:new_status) { :unknown_status }
it_behaves_like 'error response', 'Invalid status'
end
context 'with resolving status' do
- let(:new_status) { AlertManagement::Alert::STATUSES[:resolved] }
+ let(:new_status) { :resolved }
it 'changes the status' do
expect { response }.to change { alert.resolved? }.to(true)
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index b14cc65506a..ae0b8d6d7ac 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -117,15 +117,19 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
end
context 'when alert cannot be created' do
+ let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] })}
+
before do
- payload['annotations']['title'] = 'description' * 50
+ allow(service).to receive(:alert).and_call_original
+ allow(service).to receive_message_chain(:alert, :save).and_return(false)
+ allow(service).to receive_message_chain(:alert, :errors).and_return(errors)
end
it 'writes a warning to the log' do
expect(Gitlab::AppLogger).to receive(:warn).with(
message: 'Unable to create AlertManagement::Alert',
project_id: project.id,
- alert_errors: { title: ["is too long (maximum is 200 characters)"] }
+ alert_errors: { hosts: ['hosts array is over 255 chars'] }
)
execute
@@ -148,28 +152,20 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
expect { execute }.to change { alert.reload.resolved? }.to(true)
end
- [true, false].each do |state_tracking_enabled|
- context 'existing issue' do
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
- end
-
- let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint) }
+ context 'existing issue' do
+ let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint) }
- it 'closes the issue' do
- issue = alert.issue
+ it 'closes the issue' do
+ issue = alert.issue
- expect { execute }
- .to change { issue.reload.state }
- .from('opened')
- .to('closed')
- end
+ expect { execute }
+ .to change { issue.reload.state }
+ .from('opened')
+ .to('closed')
+ end
- if state_tracking_enabled
- specify { expect { execute }.to change(ResourceStateEvent, :count).by(1) }
- else
- specify { expect { execute }.to change(Note, :count).by(1) }
- end
+ it 'creates a resource state event' do
+ expect { execute }.to change(ResourceStateEvent, :count).by(1)
end
end
end
diff --git a/spec/services/audit_event_service_spec.rb b/spec/services/audit_event_service_spec.rb
index 93de2a23edc..3317fcf8444 100644
--- a/spec/services/audit_event_service_spec.rb
+++ b/spec/services/audit_event_service_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe AuditEventService do
let(:audit_service) { described_class.new(user, user, with: 'standard') }
it 'creates an authentication event' do
- expect(AuthenticationEvent).to receive(:create).with(
+ expect(AuthenticationEvent).to receive(:new).with(
user: user,
user_name: user.name,
ip_address: user.current_sign_in_ip,
@@ -67,6 +67,17 @@ RSpec.describe AuditEventService do
audit_service.for_authentication.security_event
end
+
+ it 'tracks exceptions when the event cannot be created' do
+ allow(user).to receive_messages(current_sign_in_ip: 'invalid IP')
+
+ expect(Gitlab::ErrorTracking).to(
+ receive(:track_exception)
+ .with(ActiveRecord::RecordInvalid, audit_event_type: 'AuthenticationEvent').and_call_original
+ )
+
+ audit_service.for_authentication.security_event
+ end
end
end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
new file mode 100644
index 00000000000..5d896f78b35
--- /dev/null
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkCreateIntegrationService do
+ include JiraServiceHelper
+
+ before do
+ stub_jira_service_test
+ end
+
+ let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
+ let!(:instance_integration) { create(:jira_service, :instance) }
+ let!(:template_integration) { create(:jira_service, :template) }
+
+ shared_examples 'creates integration from batch ids' do
+ it 'updates the inherited integrations' do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.attributes.except(*excluded_attributes))
+ .to eq(integration.attributes.except(*excluded_attributes))
+ end
+
+ context 'integration with data fields' do
+ let(:excluded_attributes) { %w[id service_id created_at updated_at] }
+
+ it 'updates the data fields from inherited integrations' do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.reload.data_fields.attributes.except(*excluded_attributes))
+ .to eq(integration.data_fields.attributes.except(*excluded_attributes))
+ end
+ end
+ end
+
+ shared_examples 'updates inherit_from_id' do
+ it 'updates inherit_from_id attributes' do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.reload.inherit_from_id).to eq(integration.id)
+ end
+ end
+
+ shared_examples 'runs project callbacks' do
+ it 'updates projects#has_external_issue_tracker for issue tracker services' do
+ described_class.new(integration, batch, association).execute
+
+ expect(project.reload.has_external_issue_tracker).to eq(true)
+ end
+
+ context 'with an external wiki integration' do
+ let(:integration) do
+ ExternalWikiService.create!(
+ instance: true,
+ active: true,
+ push_events: false,
+ external_wiki_url: 'http://external-wiki-url.com'
+ )
+ end
+
+ it 'updates projects#has_external_wiki for external wiki services' do
+ described_class.new(integration, batch, association).execute
+
+ expect(project.reload.has_external_wiki).to eq(true)
+ end
+ end
+ end
+
+ context 'with an instance-level integration' do
+ let(:integration) { instance_integration }
+
+ context 'with a project association' do
+ let!(:project) { create(:project) }
+ let(:created_integration) { project.jira_service }
+ let(:batch) { Project.all }
+ let(:association) { 'project' }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'runs project callbacks'
+ end
+
+ context 'with a group association' do
+ let!(:group) { create(:group) }
+ let(:created_integration) { Service.find_by(group: group) }
+ let(:batch) { Group.all }
+ let(:association) { 'group' }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'updates inherit_from_id'
+ end
+ end
+
+ context 'with a template integration' do
+ let(:integration) { template_integration }
+
+ context 'with a project association' do
+ let!(:project) { create(:project) }
+ let(:created_integration) { project.jira_service }
+ let(:batch) { Project.all }
+ let(:association) { 'project' }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'runs project callbacks'
+ end
+ end
+end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
new file mode 100644
index 00000000000..2f0bfd31600
--- /dev/null
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkUpdateIntegrationService do
+ include JiraServiceHelper
+
+ before do
+ stub_jira_service_test
+ end
+
+ let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
+ let!(:instance_integration) do
+ JiraService.create!(
+ instance: true,
+ active: true,
+ push_events: true,
+ url: 'http://update-jira.instance.com',
+ username: 'user',
+ password: 'secret'
+ )
+ end
+
+ let!(:integration) do
+ JiraService.create!(
+ project: create(:project),
+ inherit_from_id: instance_integration.id,
+ instance: false,
+ active: true,
+ push_events: false,
+ url: 'http://jira.instance.com',
+ username: 'user',
+ password: 'secret'
+ )
+ end
+
+ context 'with inherited integration' do
+ it 'updates the integration' do
+ described_class.new(instance_integration, Service.inherit_from_id(instance_integration.id)).execute
+
+ expect(integration.reload.inherit_from_id).to eq(instance_integration.id)
+ expect(integration.attributes.except(*excluded_attributes))
+ .to eq(instance_integration.attributes.except(*excluded_attributes))
+ end
+
+ context 'with integration with data fields' do
+ let(:excluded_attributes) { %w[id service_id created_at updated_at] }
+
+ it 'updates the data fields from the integration' do
+ described_class.new(instance_integration, Service.inherit_from_id(instance_integration.id)).execute
+
+ expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
+ .to eq(instance_integration.data_fields.attributes.except(*excluded_attributes))
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/build_report_result_service_spec.rb b/spec/services/ci/build_report_result_service_spec.rb
index 70bcf74ba43..134b662a72a 100644
--- a/spec/services/ci/build_report_result_service_spec.rb
+++ b/spec/services/ci/build_report_result_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::BuildReportResultService do
- describe "#execute" do
+ describe '#execute', :clean_gitlab_redis_shared_state do
subject(:build_report_result) { described_class.new.execute(build) }
context 'when build is finished' do
@@ -17,6 +17,25 @@ RSpec.describe Ci::BuildReportResultService do
expect(build_report_result.tests_skipped).to eq(0)
expect(build_report_result.tests_duration).to eq(0.010284)
expect(Ci::BuildReportResult.count).to eq(1)
+
+ unique_test_cases_parsed = Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(
+ event_names: described_class::EVENT_NAME,
+ start_date: 2.weeks.ago,
+ end_date: 2.weeks.from_now
+ )
+ expect(unique_test_cases_parsed).to eq(4)
+ end
+
+ context 'when feature flag for tracking is disabled' do
+ before do
+ stub_feature_flags(track_unique_test_cases_parsed: false)
+ end
+
+ it 'creates the report but does not track the event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ expect(build_report_result.tests_name).to eq("test")
+ expect(Ci::BuildReportResult.count).to eq(1)
+ end
end
context 'when data has already been persisted' do
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index a6ea30e4703..0cc380439a7 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -325,20 +325,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
expect(bridge.reload).to be_success
end
-
- context 'when FF ci_child_of_child_pipeline is disabled' do
- before do
- stub_feature_flags(ci_child_of_child_pipeline: false)
- end
-
- it 'does not create a further child pipeline' do
- expect { service.execute(bridge) }
- .not_to change { Ci::Pipeline.count }
-
- expect(bridge.reload).to be_failed
- expect(bridge.failure_reason).to eq 'bridge_pipeline_is_child_pipeline'
- end
- end
end
context 'when upstream pipeline has a parent pipeline, which has a parent pipeline' do
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
index 614e46f1b1a..1438c2e4aa0 100644
--- a/spec/services/ci/create_pipeline_service/cache_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -36,7 +36,8 @@ RSpec.describe Ci::CreatePipelineService do
'key' => 'a-key',
'paths' => ['logs/', 'binaries/'],
'policy' => 'pull-push',
- 'untracked' => true
+ 'untracked' => true,
+ 'when' => 'on_success'
}
expect(pipeline).to be_persisted
@@ -67,7 +68,8 @@ RSpec.describe Ci::CreatePipelineService do
expected = {
'key' => /[a-f0-9]{40}/,
'paths' => ['logs/'],
- 'policy' => 'pull-push'
+ 'policy' => 'pull-push',
+ 'when' => 'on_success'
}
expect(pipeline).to be_persisted
@@ -82,7 +84,8 @@ RSpec.describe Ci::CreatePipelineService do
expected = {
'key' => /default/,
'paths' => ['logs/'],
- 'policy' => 'pull-push'
+ 'policy' => 'pull-push',
+ 'when' => 'on_success'
}
expect(pipeline).to be_persisted
@@ -114,7 +117,8 @@ RSpec.describe Ci::CreatePipelineService do
expected = {
'key' => /\$ENV_VAR-[a-f0-9]{40}/,
'paths' => ['logs/'],
- 'policy' => 'pull-push'
+ 'policy' => 'pull-push',
+ 'when' => 'on_success'
}
expect(pipeline).to be_persisted
@@ -129,7 +133,8 @@ RSpec.describe Ci::CreatePipelineService do
expected = {
'key' => /\$ENV_VAR-default/,
'paths' => ['logs/'],
- 'policy' => 'pull-push'
+ 'policy' => 'pull-push',
+ 'when' => 'on_success'
}
expect(pipeline).to be_persisted
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 016a5dfd18b..fb6cdf55be3 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
shared_examples 'successful creation' do
- it 'creates bridge jobs correctly' do
+ it 'creates bridge jobs correctly', :aggregate_failures do
pipeline = create_pipeline!
test = pipeline.statuses.find_by(name: 'test')
@@ -221,6 +221,65 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
end
end
end
+
+ context 'when including configs from a project' do
+ context 'when specifying all attributes' do
+ let(:config) do
+ <<~YAML
+ test:
+ script: rspec
+ deploy:
+ variables:
+ CROSS: downstream
+ stage: deploy
+ trigger:
+ include:
+ - project: my-namespace/my-project
+ file: 'path/to/child.yml'
+ ref: 'master'
+ YAML
+ end
+
+ it_behaves_like 'successful creation' do
+ let(:expected_bridge_options) do
+ {
+ 'trigger' => {
+ 'include' => [
+ {
+ 'file' => 'path/to/child.yml',
+ 'project' => 'my-namespace/my-project',
+ 'ref' => 'master'
+ }
+ ]
+ }
+ }
+ end
+ end
+ end
+
+ context 'without specifying file' do
+ let(:config) do
+ <<~YAML
+ test:
+ script: rspec
+ deploy:
+ variables:
+ CROSS: downstream
+ stage: deploy
+ trigger:
+ include:
+ - project: my-namespace/my-project
+ ref: 'master'
+ YAML
+ end
+
+ it_behaves_like 'creation failure' do
+ let(:expected_error) do
+ /include config must specify the file where to fetch the config from/
+ end
+ end
+ end
+ end
end
def create_pipeline!
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index e0893ed6de3..c28c3449485 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -731,30 +731,11 @@ RSpec.describe Ci::CreatePipelineService do
.and_call_original
end
- context 'when ci_pipeline_rewind_iid is enabled' do
- before do
- stub_feature_flags(ci_pipeline_rewind_iid: true)
- end
-
- it 'rewinds iid' do
- result = execute_service
-
- expect(result).not_to be_persisted
- expect(internal_id.last_value).to eq(0)
- end
- end
-
- context 'when ci_pipeline_rewind_iid is disabled' do
- before do
- stub_feature_flags(ci_pipeline_rewind_iid: false)
- end
-
- it 'does not rewind iid' do
- result = execute_service
+ it 'rewinds iid' do
+ result = execute_service
- expect(result).not_to be_persisted
- expect(internal_id.last_value).to eq(1)
- end
+ expect(result).not_to be_persisted
+ expect(internal_id.last_value).to eq(0)
end
end
end
diff --git a/spec/services/ci/delete_objects_service_spec.rb b/spec/services/ci/delete_objects_service_spec.rb
new file mode 100644
index 00000000000..448f8979681
--- /dev/null
+++ b/spec/services/ci/delete_objects_service_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeleteObjectsService, :aggregate_failure do
+ let(:service) { described_class.new }
+ let(:artifact) { create(:ci_job_artifact, :archive) }
+ let(:data) { [artifact] }
+
+ describe '#execute' do
+ before do
+ Ci::DeletedObject.bulk_import(data)
+ # We disable the check because the specs are wrapped in a transaction
+ allow(service).to receive(:transaction_open?).and_return(false)
+ end
+
+ subject(:execute) { service.execute }
+
+ it 'deletes records' do
+ expect { execute }.to change { Ci::DeletedObject.count }.by(-1)
+ end
+
+ it 'deletes files' do
+ expect { execute }.to change { artifact.file.exists? }
+ end
+
+ context 'when trying to execute without records' do
+ let(:data) { [] }
+
+ it 'does not change the number of objects' do
+ expect { execute }.not_to change { Ci::DeletedObject.count }
+ end
+ end
+
+ context 'when trying to remove the same file multiple times' do
+ let(:objects) { Ci::DeletedObject.all.to_a }
+
+ before do
+ expect(service).to receive(:load_next_batch).twice.and_return(objects)
+ end
+
+ it 'executes successfully' do
+ 2.times { expect(service.execute).to be_truthy }
+ end
+ end
+
+ context 'with artifacts both ready and not ready for deletion' do
+ let(:data) { [] }
+
+ let_it_be(:past_ready) { create(:ci_deleted_object, pick_up_at: 2.days.ago) }
+ let_it_be(:ready) { create(:ci_deleted_object, pick_up_at: 1.day.ago) }
+
+ it 'skips records with pick_up_at in the future' do
+ not_ready = create(:ci_deleted_object, pick_up_at: 1.day.from_now)
+
+ expect { execute }.to change { Ci::DeletedObject.count }.from(3).to(1)
+ expect(not_ready.reload.present?).to be_truthy
+ end
+
+ it 'limits the number of records removed' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ expect { execute }.to change { Ci::DeletedObject.count }.by(-1)
+ end
+
+ it 'removes records in order' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+
+ execute
+
+ expect { past_ready.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect(ready.reload.present?).to be_truthy
+ end
+
+ it 'updates pick_up_at timestamp' do
+ allow(service).to receive(:destroy_everything)
+
+ execute
+
+ expect(past_ready.reload.pick_up_at).to be_like_time(10.minutes.from_now)
+ end
+
+ it 'does not delete objects for which file deletion has failed' do
+ expect(past_ready)
+ .to receive(:delete_file_from_storage)
+ .and_return(false)
+
+ expect(service)
+ .to receive(:load_next_batch)
+ .and_return([past_ready, ready])
+
+ expect { execute }.to change { Ci::DeletedObject.count }.from(2).to(1)
+ expect(past_ready.reload.present?).to be_truthy
+ end
+ end
+
+ context 'with an open database transaction' do
+ it 'raises an exception and does not remove records' do
+ expect(service).to receive(:transaction_open?).and_return(true)
+
+ expect { execute }
+ .to raise_error(Ci::DeleteObjectsService::TransactionInProgressError)
+ .and change { Ci::DeletedObject.count }.by(0)
+ end
+ end
+ end
+
+ describe '#remaining_batches_count' do
+ subject { service.remaining_batches_count(max_batch_count: 3) }
+
+ context 'when there is less than one batch size' do
+ before do
+ Ci::DeletedObject.bulk_import(data)
+ end
+
+ it { is_expected.to eq(1) }
+ end
+
+ context 'when there is more than one batch size' do
+ before do
+ objects_scope = double
+
+ expect(Ci::DeletedObject)
+ .to receive(:ready_for_destruction)
+ .and_return(objects_scope)
+
+ expect(objects_scope).to receive(:size).and_return(110)
+ end
+
+ it { is_expected.to eq(2) }
+ end
+ end
+end
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
index 1c96be42a2f..3d5329811ad 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
@@ -9,9 +9,10 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
subject { service.execute }
let(:service) { described_class.new }
- let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
- before do
+ let_it_be(:artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
+
+ before(:all) do
artifact.job.pipeline.unlocked!
end
@@ -38,7 +39,9 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
context 'when artifact is not expired' do
- let!(:artifact) { create(:ci_job_artifact, expire_at: 1.day.since) }
+ before do
+ artifact.update_column(:expire_at, 1.day.since)
+ end
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
@@ -46,7 +49,9 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
end
context 'when artifact is permanent' do
- let!(:artifact) { create(:ci_job_artifact, expire_at: nil) }
+ before do
+ artifact.update_column(:expire_at, nil)
+ end
it 'does not destroy expired job artifacts' do
expect { subject }.not_to change { Ci::JobArtifact.count }
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index b5d664947de..8df5d0bc159 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -26,9 +26,11 @@ RSpec.describe Ci::ExpirePipelineCacheService do
project = merge_request.target_project
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
+ merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
+ expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
subject.execute(merge_request.all_pipelines.last)
end
diff --git a/spec/services/ci/list_config_variables_service_spec.rb b/spec/services/ci/list_config_variables_service_spec.rb
new file mode 100644
index 00000000000..5cc0481768b
--- /dev/null
+++ b/spec/services/ci/list_config_variables_service_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ListConfigVariablesService do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:service) { described_class.new(project) }
+ let(:result) { YAML.dump(ci_config) }
+
+ subject { service.execute(sha) }
+
+ before do
+ stub_gitlab_ci_yml_for_sha(sha, result)
+ end
+
+ context 'when sending a valid sha' do
+ let(:sha) { 'master' }
+ let(:ci_config) do
+ {
+ variables: {
+ KEY1: { value: 'val 1', description: 'description 1' },
+ KEY2: { value: 'val 2', description: '' },
+ KEY3: { value: 'val 3' },
+ KEY4: 'val 4'
+ },
+ test: {
+ stage: 'test',
+ script: 'echo'
+ }
+ }
+ end
+
+ it 'returns variable list' do
+ expect(subject['KEY1']).to eq({ value: 'val 1', description: 'description 1' })
+ expect(subject['KEY2']).to eq({ value: 'val 2', description: '' })
+ expect(subject['KEY3']).to eq({ value: 'val 3', description: nil })
+ expect(subject['KEY4']).to eq({ value: 'val 4', description: nil })
+ end
+ end
+
+ context 'when sending an invalid sha' do
+ let(:sha) { 'invalid-sha' }
+ let(:ci_config) { nil }
+
+ it 'returns empty json' do
+ expect(subject).to eq({})
+ end
+ end
+
+ context 'when sending an invalid config' do
+ let(:sha) { 'master' }
+ let(:ci_config) do
+ {
+ variables: {
+ KEY1: { value: 'val 1', description: 'description 1' }
+ },
+ test: {
+ stage: 'invalid',
+ script: 'echo'
+ }
+ }
+ end
+
+ it 'returns empty result' do
+ expect(subject).to eq({})
+ end
+ end
+
+ private
+
+ def stub_gitlab_ci_yml_for_sha(sha, result)
+ allow_any_instance_of(Repository)
+ .to receive(:gitlab_ci_yml_for)
+ .with(sha, '.gitlab-ci.yml')
+ .and_return(result)
+ end
+end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index 7de22b6a4cc..bbd7422b435 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -259,14 +259,14 @@ RSpec.shared_examples 'Pipeline Processing Service' do
expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
- Timecop.travel 2.minutes.from_now do
+ travel_to 2.minutes.from_now do
enqueue_scheduled('rollout10%')
end
succeed_pending
expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'success', 'rollout100%': 'scheduled' })
- Timecop.travel 2.minutes.from_now do
+ travel_to 2.minutes.from_now do
enqueue_scheduled('rollout100%')
end
succeed_pending
@@ -330,7 +330,7 @@ RSpec.shared_examples 'Pipeline Processing Service' do
expect(builds_names_and_statuses).to eq({ 'build': 'success', 'rollout10%': 'scheduled' })
- Timecop.travel 2.minutes.from_now do
+ travel_to 2.minutes.from_now do
enqueue_scheduled('rollout10%')
end
fail_running_or_pending
@@ -398,7 +398,7 @@ RSpec.shared_examples 'Pipeline Processing Service' do
expect(process_pipeline).to be_truthy
expect(builds_names_and_statuses).to eq({ 'delayed1': 'scheduled', 'delayed2': 'scheduled' })
- Timecop.travel 2.minutes.from_now do
+ travel_to 2.minutes.from_now do
enqueue_scheduled('delayed1')
end
@@ -419,7 +419,7 @@ RSpec.shared_examples 'Pipeline Processing Service' do
expect(process_pipeline).to be_truthy
expect(builds_names_and_statuses).to eq({ 'delayed': 'scheduled' })
- Timecop.travel 2.minutes.from_now do
+ travel_to 2.minutes.from_now do
enqueue_scheduled('delayed')
end
fail_running_or_pending
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
index 77645298bc7..2936d6fae4d 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -43,12 +43,12 @@ RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
{
pipeline: pipeline.status,
stages: pipeline.stages.pluck(:name, :status).to_h,
- jobs: pipeline.statuses.latest.pluck(:name, :status).to_h
+ jobs: pipeline.latest_statuses.pluck(:name, :status).to_h
}
end
def event_on_jobs(event, job_names)
- statuses = pipeline.statuses.latest.by_name(job_names).to_a
+ statuses = pipeline.latest_statuses.by_name(job_names).to_a
expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
statuses.each { |status| status.public_send("#{event}!") }
diff --git a/spec/services/ci/pipelines/create_artifact_service_spec.rb b/spec/services/ci/pipelines/create_artifact_service_spec.rb
index d5e9cf83a6d..6f177889ed3 100644
--- a/spec/services/ci/pipelines/create_artifact_service_spec.rb
+++ b/spec/services/ci/pipelines/create_artifact_service_spec.rb
@@ -35,16 +35,6 @@ RSpec.describe ::Ci::Pipelines::CreateArtifactService do
end
end
- context 'when feature is disabled' do
- it 'does not create a pipeline artifact' do
- stub_feature_flags(coverage_report_view: false)
-
- subject
-
- expect(Ci::PipelineArtifact.count).to eq(0)
- end
- end
-
context 'when pipeline artifact has already been created' do
it 'do not raise an error and do not persist the same artifact twice' do
expect { 2.times { described_class.new.execute(pipeline) } }.not_to raise_error(ActiveRecord::RecordNotUnique)
diff --git a/spec/services/ci/play_bridge_service_spec.rb b/spec/services/ci/play_bridge_service_spec.rb
new file mode 100644
index 00000000000..0482ad4d76f
--- /dev/null
+++ b/spec/services/ci/play_bridge_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PlayBridgeService, '#execute' do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:downstream_project) { create(:project) }
+ let(:bridge) { create(:ci_bridge, :playable, pipeline: pipeline, downstream: downstream_project) }
+ let(:instance) { described_class.new(project, user) }
+
+ subject(:execute_service) { instance.execute(bridge) }
+
+ context 'when user can run the bridge' do
+ before do
+ allow(instance).to receive(:can?).with(user, :play_job, bridge).and_return(true)
+ end
+
+ it 'marks the bridge pending' do
+ execute_service
+
+ expect(bridge.reload).to be_pending
+ end
+
+ it 'enqueues Ci::CreateCrossProjectPipelineWorker' do
+ expect(::Ci::CreateCrossProjectPipelineWorker).to receive(:perform_async).with(bridge.id)
+
+ execute_service
+ end
+
+ it "updates bridge's user" do
+ execute_service
+
+ expect(bridge.reload.user).to eq(user)
+ end
+
+ context 'when bridge is not playable' do
+ let(:bridge) { create(:ci_bridge, :failed, pipeline: pipeline, downstream: downstream_project) }
+
+ it 'raises StateMachines::InvalidTransition' do
+ expect { execute_service }.to raise_error StateMachines::InvalidTransition
+ end
+ end
+ end
+
+ context 'when user can not run the bridge' do
+ before do
+ allow(instance).to receive(:can?).with(user, :play_job, bridge).and_return(false)
+ end
+
+ it 'allows user with developer role to play a bridge' do
+ expect { execute_service }.to raise_error Gitlab::Access::AccessDeniedError
+ end
+ end
+end
diff --git a/spec/services/ci/play_manual_stage_service_spec.rb b/spec/services/ci/play_manual_stage_service_spec.rb
index e30ec8bfda5..3e2a95ee975 100644
--- a/spec/services/ci/play_manual_stage_service_spec.rb
+++ b/spec/services/ci/play_manual_stage_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
let(:current_user) { create(:user) }
let(:pipeline) { create(:ci_pipeline, user: current_user) }
let(:project) { pipeline.project }
+ let(:downstream_project) { create(:project) }
let(:service) { described_class.new(project, current_user, pipeline: pipeline) }
let(:stage_status) { 'manual' }
@@ -18,40 +19,42 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
before do
project.add_maintainer(current_user)
+ downstream_project.add_maintainer(current_user)
create_builds_for_stage(status: stage_status)
+ create_bridge_for_stage(status: stage_status)
end
- context 'when pipeline has manual builds' do
+ context 'when pipeline has manual processables' do
before do
service.execute(stage)
end
- it 'starts manual builds from pipeline' do
- expect(pipeline.builds.manual.count).to eq(0)
+ it 'starts manual processables from pipeline' do
+ expect(pipeline.processables.manual.count).to eq(0)
end
- it 'updates manual builds' do
- pipeline.builds.each do |build|
- expect(build.user).to eq(current_user)
+ it 'updates manual processables' do
+ pipeline.processables.each do |processable|
+ expect(processable.user).to eq(current_user)
end
end
end
- context 'when pipeline has no manual builds' do
+ context 'when pipeline has no manual processables' do
let(:stage_status) { 'failed' }
before do
service.execute(stage)
end
- it 'does not update the builds' do
- expect(pipeline.builds.failed.count).to eq(3)
+ it 'does not update the processables' do
+ expect(pipeline.processables.failed.count).to eq(4)
end
end
- context 'when user does not have permission on a specific build' do
+ context 'when user does not have permission on a specific processable' do
before do
- allow_next_instance_of(Ci::Build) do |instance|
+ allow_next_instance_of(Ci::Processable) do |instance|
allow(instance).to receive(:play).and_raise(Gitlab::Access::AccessDeniedError)
end
@@ -60,12 +63,14 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
it 'logs the error' do
expect(Gitlab::AppLogger).to receive(:error)
- .exactly(stage.builds.manual.count)
+ .exactly(stage.processables.manual.count)
service.execute(stage)
end
end
+ private
+
def create_builds_for_stage(options)
options.merge!({
when: 'manual',
@@ -77,4 +82,17 @@ RSpec.describe Ci::PlayManualStageService, '#execute' do
create_list(:ci_build, 3, options)
end
+
+ def create_bridge_for_stage(options)
+ options.merge!({
+ when: 'manual',
+ pipeline: pipeline,
+ stage: stage.name,
+ stage_id: stage.id,
+ user: pipeline.user,
+ downstream: downstream_project
+ })
+
+ create(:ci_bridge, options)
+ end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 51741440075..81d56a0e42a 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -3,25 +3,32 @@
require 'spec_helper'
RSpec.describe Ci::RetryBuildService do
- let_it_be(:user) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) do
create(:ci_pipeline, project: project,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
end
- let(:stage) do
+ let_it_be(:stage) do
create(:ci_stage_entity, project: project,
pipeline: pipeline,
name: 'test')
end
- let(:build) { create(:ci_build, pipeline: pipeline, stage_id: stage.id) }
+ let_it_be_with_refind(:build) { create(:ci_build, pipeline: pipeline, stage_id: stage.id) }
+ let(:user) { developer }
let(:service) do
described_class.new(project, user)
end
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
clone_accessors = described_class.clone_accessors
reject_accessors =
@@ -39,7 +46,8 @@ RSpec.describe Ci::RetryBuildService do
job_variables waiting_for_resource_at job_artifacts_metrics_referee
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
- job_artifacts_requirements job_artifacts_coverage_fuzzing].freeze
+ job_artifacts_requirements job_artifacts_coverage_fuzzing
+ job_artifacts_api_fuzzing].freeze
ignore_accessors =
%i[type lock_version target_url base_tags trace_sections
@@ -53,9 +61,9 @@ RSpec.describe Ci::RetryBuildService do
pipeline_id report_results pending_state pages_deployments].freeze
shared_examples 'build duplication' do
- let(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
+ let_it_be(:another_pipeline) { create(:ci_empty_pipeline, project: project) }
- let(:build) do
+ let_it_be(:build) do
create(:ci_build, :failed, :expired, :erased, :queued, :coverage, :tags,
:allowed_to_fail, :on_tag, :triggered, :teardown_environment, :resource_group,
description: 'my-job', stage: 'test', stage_id: stage.id,
@@ -63,7 +71,7 @@ RSpec.describe Ci::RetryBuildService do
scheduled_at: 10.seconds.since)
end
- before do
+ before_all do
# Test correctly behaviour of deprecated artifact because it can be still in use
stub_feature_flags(drop_license_management_artifact: false)
@@ -81,8 +89,6 @@ RSpec.describe Ci::RetryBuildService do
create(:ci_job_variable, job: build)
create(:ci_build_need, build: build)
-
- build.reload
end
describe 'clone accessors' do
@@ -154,7 +160,7 @@ RSpec.describe Ci::RetryBuildService do
describe '#execute' do
let(:new_build) do
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
service.execute(build)
end
end
@@ -162,8 +168,6 @@ RSpec.describe Ci::RetryBuildService do
context 'when user has ability to execute build' do
before do
stub_not_protect_default_branch
-
- project.add_developer(user)
end
it_behaves_like 'build duplication'
@@ -235,7 +239,6 @@ RSpec.describe Ci::RetryBuildService do
context 'when the pipeline is a child pipeline and the bridge is depended' do
let!(:parent_pipeline) { create(:ci_pipeline, project: project) }
- let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:bridge) { create(:ci_bridge, :strategy_depend, pipeline: parent_pipeline, status: 'success') }
let!(:source_pipeline) { create(:ci_sources_pipeline, pipeline: pipeline, source_job: bridge) }
@@ -248,6 +251,8 @@ RSpec.describe Ci::RetryBuildService do
end
context 'when user does not have ability to execute build' do
+ let(:user) { reporter }
+
it 'raises an error' do
expect { service.execute(build) }
.to raise_error Gitlab::Access::AccessDeniedError
@@ -257,7 +262,7 @@ RSpec.describe Ci::RetryBuildService do
describe '#reprocess' do
let(:new_build) do
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
service.reprocess!(build)
end
end
@@ -265,8 +270,6 @@ RSpec.describe Ci::RetryBuildService do
context 'when user has ability to execute build' do
before do
stub_not_protect_default_branch
-
- project.add_developer(user)
end
it_behaves_like 'build duplication'
@@ -316,6 +319,8 @@ RSpec.describe Ci::RetryBuildService do
end
context 'when user does not have ability to execute build' do
+ let(:user) { reporter }
+
it 'raises an error' do
expect { service.reprocess!(build) }
.to raise_error Gitlab::Access::AccessDeniedError
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index 0f4c0fa5ecb..ebccfdc5140 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -45,21 +45,7 @@ RSpec.describe Ci::UpdateBuildQueueService do
runner.update!(contacted_at: Ci::Runner.recent_queue_deadline)
end
- context 'when ci_update_queues_for_online_runners is enabled' do
- before do
- stub_feature_flags(ci_update_queues_for_online_runners: true)
- end
-
- it_behaves_like 'does not refresh runner'
- end
-
- context 'when ci_update_queues_for_online_runners is disabled' do
- before do
- stub_feature_flags(ci_update_queues_for_online_runners: false)
- end
-
- it_behaves_like 'refreshes runner'
- end
+ it_behaves_like 'does not refresh runner'
end
end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index f5ad732bf7e..2545909bf56 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -83,9 +83,26 @@ RSpec.describe Ci::UpdateBuildStateService do
{ checksum: 'crc32:12345678', state: 'failed', failure_reason: 'script_failure' }
end
+ context 'when build does not have associated trace chunks' do
+ it 'updates a build status' do
+ result = subject.execute
+
+ expect(build).to be_failed
+ expect(result.status).to eq 200
+ end
+
+ it 'does not increment invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+ end
+
context 'when build trace has been migrated' do
before do
- create(:ci_build_trace_chunk, :database_with_data, build: build)
+ create(:ci_build_trace_chunk, :persisted, build: build, initial_data: 'abcd')
end
it 'updates a build state' do
@@ -100,6 +117,12 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(result.status).to eq 200
end
+ it 'does not set a backoff value' do
+ result = subject.execute
+
+ expect(result.backoff).to be_nil
+ end
+
it 'increments trace finalized operation metric' do
execute_with_stubbed_metrics!
@@ -107,6 +130,60 @@ RSpec.describe Ci::UpdateBuildStateService do
.to have_received(:increment_trace_operation)
.with(operation: :finalized)
end
+
+ it 'records migration duration in a histogram' do
+ freeze_time do
+ create(:ci_build_pending_state, build: build, created_at: 0.5.seconds.ago)
+
+ execute_with_stubbed_metrics!
+ end
+
+ expect(metrics)
+ .to have_received(:observe_migration_duration)
+ .with(0.5)
+ end
+
+ context 'when trace checksum is not valid' do
+ it 'increments invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+ end
+
+ context 'when trace checksum is valid' do
+ let(:params) { { checksum: 'crc32:ed82cd11', state: 'success' } }
+
+ it 'does not increment invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+ end
+
+ context 'when failed to acquire a build trace lock' do
+ it 'accepts a state update request' do
+ build.trace.lock do
+ result = subject.execute
+
+ expect(result.status).to eq 202
+ end
+ end
+
+ it 'increment locked trace metric' do
+ build.trace.lock do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :locked)
+ end
+ end
+ end
end
context 'when build trace has not been migrated yet' do
@@ -126,6 +203,12 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(result.status).to eq 202
end
+ it 'sets a request backoff value' do
+ result = subject.execute
+
+ expect(result.backoff.to_i).to be > 0
+ end
+
it 'schedules live chunks for migration' do
expect(Ci::BuildTraceChunkFlushWorker)
.to receive(:perform_async)
@@ -134,14 +217,6 @@ RSpec.describe Ci::UpdateBuildStateService do
subject.execute
end
- it 'increments trace accepted operation metric' do
- execute_with_stubbed_metrics!
-
- expect(metrics)
- .to have_received(:increment_trace_operation)
- .with(operation: :accepted)
- end
-
it 'creates a pending state record' do
subject.execute
@@ -153,6 +228,22 @@ RSpec.describe Ci::UpdateBuildStateService do
end
end
+ it 'increments trace accepted operation metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :accepted)
+ end
+
+ it 'does not increment invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+
context 'when build pending state is outdated' do
before do
build.create_pending_state(
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
index be362dc6e23..d8c95a70bd0 100644
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
@@ -83,12 +83,7 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
shared_context 'kubernetes information successfully fetched' do
before do
stub_cloud_platform_get_zone_cluster(
- provider.gcp_project_id, provider.zone, cluster.name,
- {
- endpoint: endpoint,
- username: username,
- password: password
- }
+ provider.gcp_project_id, provider.zone, cluster.name, { endpoint: endpoint, username: username, password: password }
)
stub_kubeclient_discover(api_url)
@@ -101,11 +96,9 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: secret_name,
- token: Base64.encode64(token),
- namespace: 'default'
- }
+ metadata_name: secret_name,
+ token: Base64.encode64(token),
+ namespace: 'default'
)
stub_kubeclient_put_cluster_role_binding(api_url, 'gitlab-admin')
diff --git a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
index b4402aadc88..f26177a56d0 100644
--- a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
+++ b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
@@ -26,27 +26,21 @@ RSpec.describe Clusters::Kubernetes::ConfigureIstioIngressService, '#execute' do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: "#{namespace}-token",
- token: Base64.encode64('sample-token'),
- namespace: namespace
- }
+ metadata_name: "#{namespace}-token",
+ token: Base64.encode64('sample-token'),
+ namespace: namespace
)
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: 'istio-ingressgateway-ca-certs',
- namespace: 'istio-system'
- }
+ metadata_name: 'istio-ingressgateway-ca-certs',
+ namespace: 'istio-system'
)
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: 'istio-ingressgateway-certs',
- namespace: 'istio-system'
- }
+ metadata_name: 'istio-ingressgateway-certs',
+ namespace: 'istio-system'
)
stub_kubeclient_put_secret(api_url, 'istio-ingressgateway-ca-certs', namespace: 'istio-system')
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index ee10c59390e..7e3f1fdb379 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -41,11 +41,9 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute'
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: "#{namespace}-token",
- token: Base64.encode64('sample-token'),
- namespace: namespace
- }
+ metadata_name: "#{namespace}-token",
+ token: Base64.encode64('sample-token'),
+ namespace: namespace
)
end
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index f3fa6c2c0bb..257e2e53733 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -160,26 +160,60 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
it_behaves_like 'creates service account and token'
- it 'creates a namespaced role binding with edit access' do
- subject
+ context 'kubernetes_cluster_namespace_role_admin FF is enabled' do
+ before do
+ stub_feature_flags(kubernetes_cluster_namespace_role_admin: true)
+ end
+
+ it 'creates a namespaced role binding with admin access' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
+ body: hash_including(
+ metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
+ roleRef: {
+ apiGroup: 'rbac.authorization.k8s.io',
+ kind: 'ClusterRole',
+ name: 'admin'
+ },
+ subjects: [
+ {
+ kind: 'ServiceAccount',
+ name: service_account_name,
+ namespace: namespace
+ }
+ ]
+ )
+ )
+ end
+ end
- expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
- body: hash_including(
- metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
- roleRef: {
- apiGroup: 'rbac.authorization.k8s.io',
- kind: 'ClusterRole',
- name: 'edit'
- },
- subjects: [
- {
- kind: 'ServiceAccount',
- name: service_account_name,
- namespace: namespace
- }
- ]
+ context 'kubernetes_cluster_namespace_role_admin FF is disabled' do
+ before do
+ stub_feature_flags(kubernetes_cluster_namespace_role_admin: false)
+ end
+
+ it 'creates a namespaced role binding with edit access' do
+ subject
+
+ expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
+ body: hash_including(
+ metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
+ roleRef: {
+ apiGroup: 'rbac.authorization.k8s.io',
+ kind: 'ClusterRole',
+ name: 'edit'
+ },
+ subjects: [
+ {
+ kind: 'ServiceAccount',
+ name: service_account_name,
+ namespace: namespace
+ }
+ ]
+ )
)
- )
+ end
end
it 'creates a role binding granting crossplane database permissions to the service account' do
diff --git a/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb b/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb
index c4daae9dbf0..03c402fb066 100644
--- a/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb
+++ b/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb
@@ -31,11 +31,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: token
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: token
)
end
@@ -54,11 +52,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret_not_found_then_found(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: token
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: token
)
end
@@ -79,11 +75,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret_missing_token_then_with_token(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: token
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: token
)
end
@@ -96,11 +90,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: nil
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: nil
)
end
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index d1f977c28d3..2d157c9d114 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -19,8 +19,9 @@ RSpec.describe Deployments::CreateService do
status: 'success'
)
- expect(Deployments::SuccessWorker).to receive(:perform_async)
- expect(Deployments::FinishedWorker).to receive(:perform_async)
+ expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
+ expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
+ expect(Deployments::ExecuteHooksWorker).to receive(:perform_async)
expect(service.execute).to be_persisted
end
@@ -34,8 +35,9 @@ RSpec.describe Deployments::CreateService do
tag: false
)
- expect(Deployments::SuccessWorker).not_to receive(:perform_async)
- expect(Deployments::FinishedWorker).not_to receive(:perform_async)
+ expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
+ expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
+ expect(Deployments::ExecuteHooksWorker).not_to receive(:perform_async)
expect(service.execute).to be_persisted
end
diff --git a/spec/services/deployments/after_create_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 6cdb4c88191..92488c62315 100644
--- a/spec/services/deployments/after_create_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Deployments::AfterCreateService do
+RSpec.describe Deployments::UpdateEnvironmentService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:options) { { name: 'production' } }
@@ -31,7 +31,8 @@ RSpec.describe Deployments::AfterCreateService do
subject(:service) { described_class.new(deployment) }
before do
- allow(Deployments::FinishedWorker).to receive(:perform_async)
+ allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
+ allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
job.success! # Create/Succeed deployment
end
@@ -100,8 +101,8 @@ RSpec.describe Deployments::AfterCreateService do
end
before do
- environment.update(name: 'review-apps/master')
- job.update(environment: 'review-apps/$CI_COMMIT_REF_NAME')
+ environment.update!(name: 'review-apps/master')
+ job.update!(environment: 'review-apps/$CI_COMMIT_REF_NAME')
end
it 'does not create a new environment' do
@@ -241,7 +242,7 @@ RSpec.describe Deployments::AfterCreateService do
end
it 'does not raise errors if the merge request does not have a metrics record' do
- merge_request.metrics.destroy
+ merge_request.metrics.destroy!
expect(merge_request.reload.metrics).to be_nil
expect { service.execute }.not_to raise_error
@@ -257,7 +258,7 @@ RSpec.describe Deployments::AfterCreateService do
expect(merge_request.reload.metrics.first_deployed_to_production_at).to be_like_time(deployment.finished_at)
# Current deploy
- Timecop.travel(12.hours.from_now) do
+ travel_to(12.hours.from_now) do
service.execute
expect(merge_request.reload.metrics.first_deployed_to_production_at).to be_like_time(deployment.finished_at)
@@ -269,14 +270,14 @@ RSpec.describe Deployments::AfterCreateService do
it "does not overwrite the older 'first_deployed_to_production_at' time" do
# Previous deploy
time = 5.minutes.from_now
- Timecop.freeze(time) { service.execute }
+ travel_to(time) { service.execute }
expect(merge_request.reload.metrics.merged_at).to be < merge_request.reload.metrics.first_deployed_to_production_at
previous_time = merge_request.reload.metrics.first_deployed_to_production_at
# Current deploy
- Timecop.freeze(time + 12.hours) { service.execute }
+ travel_to(time + 12.hours) { service.execute }
expect(merge_request.reload.metrics.first_deployed_to_production_at).to eq(previous_time)
end
diff --git a/spec/services/design_management/copy_design_collection/copy_service_spec.rb b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
new file mode 100644
index 00000000000..e93e5f13fea
--- /dev/null
+++ b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
@@ -0,0 +1,259 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitlab_redis_shared_state do
+ include DesignManagementTestHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue, refind: true) { create(:issue, project: project) }
+ let(:target_issue) { create(:issue) }
+
+ subject { described_class.new(project, user, issue: issue, target_issue: target_issue).execute }
+
+ before do
+ enable_design_management
+ end
+
+ shared_examples 'service error' do |message:|
+ it 'returns an error response', :aggregate_failures do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq(message)
+ end
+ end
+
+ shared_examples 'service success' do
+ it 'returns a success response', :aggregate_failures do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_success
+ end
+ end
+
+ include_examples 'service error', message: 'User cannot copy design collection to issue'
+
+ context 'when user has permission to read the design collection' do
+ before_all do
+ project.add_reporter(user)
+ end
+
+ include_examples 'service error', message: 'User cannot copy design collection to issue'
+
+ context 'when the user also has permission to admin the target issue' do
+ let(:target_repository) { target_issue.project.design_repository }
+
+ before do
+ target_issue.project.add_reporter(user)
+ end
+
+ include_examples 'service error', message: 'Target design collection must first be queued'
+
+ context 'when the target design collection has been queued' do
+ before do
+ target_issue.design_collection.start_copy!
+ end
+
+ include_examples 'service error', message: 'Design collection has no designs'
+
+ context 'when design collection has designs' do
+ let_it_be(:designs) do
+ create_list(:design, 3, :with_lfs_file, :with_relative_position, issue: issue, project: project)
+ end
+
+ context 'when target issue already has designs' do
+ before do
+ create(:design, issue: target_issue, project: target_issue.project)
+ end
+
+ include_examples 'service error', message: 'Target design collection already has designs'
+ end
+
+ include_examples 'service success'
+
+ it 'creates a design repository for the target project' do
+ expect { subject }.to change { target_repository.exists? }.from(false).to(true)
+ end
+
+ context 'when the target project already has a design repository' do
+ before do
+ target_repository.create_if_not_exists
+ end
+
+ include_examples 'service success'
+ end
+
+ it 'copies the designs correctly', :aggregate_failures do
+ expect { subject }.to change { target_issue.designs.count }.by(3)
+
+ old_designs = issue.designs.ordered
+ new_designs = target_issue.designs.ordered
+
+ new_designs.zip(old_designs).each do |new_design, old_design|
+ expect(new_design).to have_attributes(
+ filename: old_design.filename,
+ relative_position: old_design.relative_position,
+ issue: target_issue,
+ project: target_issue.project
+ )
+ end
+ end
+
+ it 'copies the design versions correctly', :aggregate_failures do
+ expect { subject }.to change { target_issue.design_versions.count }.by(3)
+
+ old_versions = issue.design_versions.ordered
+ new_versions = target_issue.design_versions.ordered
+
+ new_versions.zip(old_versions).each do |new_version, old_version|
+ expect(new_version).to have_attributes(
+ created_at: old_version.created_at,
+ author_id: old_version.author_id
+ )
+ expect(new_version.designs.pluck(:filename)).to eq(old_version.designs.pluck(:filename))
+ expect(new_version.actions.pluck(:event)).to eq(old_version.actions.pluck(:event))
+ end
+ end
+
+ it 'copies the design actions correctly', :aggregate_failures do
+ expect { subject }.to change { DesignManagement::Action.count }.by(3)
+
+ old_actions = issue.design_versions.ordered.flat_map(&:actions)
+ new_actions = target_issue.design_versions.ordered.flat_map(&:actions)
+
+ new_actions.zip(old_actions).each do |new_action, old_action|
+ # This is a way to identify if the versions linked to the actions
+ # are correct is to compare design filenames, as the SHA changes.
+ new_design_filenames = new_action.version.designs.ordered.pluck(:filename)
+ old_design_filenames = old_action.version.designs.ordered.pluck(:filename)
+
+ expect(new_design_filenames).to eq(old_design_filenames)
+ expect(new_action.event).to eq(old_action.event)
+ expect(new_action.design.filename).to eq(old_action.design.filename)
+ end
+ end
+
+ it 'copies design notes correctly', :aggregate_failures, :sidekiq_inline do
+ old_notes = [
+ create(:diff_note_on_design, note: 'first note', noteable: designs.first, project: project, author: create(:user)),
+ create(:diff_note_on_design, note: 'second note', noteable: designs.first, project: project, author: create(:user))
+ ]
+ matchers = old_notes.map do |note|
+ have_attributes(
+ note.attributes.slice(
+ :type,
+ :author_id,
+ :note,
+ :position
+ )
+ )
+ end
+
+ expect { subject }.to change { Note.count }.by(2)
+
+ new_notes = target_issue.designs.first.notes.fresh
+
+ expect(new_notes).to match_array(matchers)
+ end
+
+ it 'links the LfsObjects' do
+ expect { subject }.to change { target_issue.project.lfs_objects.count }.by(3)
+ end
+
+ it 'copies the Git repository data', :aggregate_failures do
+ subject
+
+ commit_shas = target_repository.commits('master', limit: 99).map(&:id)
+
+ expect(commit_shas).to include(*target_issue.design_versions.ordered.pluck(:sha))
+ end
+
+ it 'creates a master branch if none previously existed' do
+ expect { subject }.to change { target_repository.branch_names }.from([]).to(['master'])
+ end
+
+ it 'leaves the design collection in the correct copy state' do
+ subject
+
+ expect(target_issue.design_collection).to be_copy_ready
+ end
+
+ describe 'rollback' do
+ before do
+ # Ensure the very last step throws an error
+ expect_next_instance_of(described_class) do |service|
+ expect(service).to receive(:finalize!).and_raise
+ end
+ end
+
+ include_examples 'service error', message: 'Designs were unable to be copied successfully'
+
+ it 'rollsback all PostgreSQL data created', :aggregate_failures do
+ expect { subject }.not_to change {
+ [
+ DesignManagement::Design.count,
+ DesignManagement::Action.count,
+ DesignManagement::Version.count,
+ Note.count
+ ]
+ }
+
+ collections = [
+ target_issue.design_collection,
+ target_issue.designs,
+ target_issue.design_versions
+ ]
+
+ expect(collections).to all(be_empty)
+ end
+
+ it 'does not alter master branch', :aggregate_failures do
+ # Add some Git data to the target_repository, so we are testing
+ # that any original data remains
+ issue_2 = create(:issue, project: target_issue.project)
+ create(:design, :with_file, issue: issue_2, project: target_issue.project)
+
+ expect { subject }.not_to change {
+ expect(target_repository.commits('master', limit: 10).size).to eq(1)
+ }
+ end
+
+ it 'sets the design collection copy state' do
+ subject
+
+ expect(target_issue.design_collection).to be_copy_error
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe 'Alert if schema changes', :aggregate_failures do
+ let_it_be(:config_file) { Rails.root.join('lib/gitlab/design_management/copy_design_collection_model_attributes.yml') }
+ let_it_be(:config) { YAML.load_file(config_file).symbolize_keys }
+
+ %w(Design Action Version).each do |model|
+ specify do
+ attributes = config["#{model.downcase}_attributes".to_sym] || []
+ ignored_attributes = config["ignore_#{model.downcase}_attributes".to_sym]
+
+ expect(attributes + ignored_attributes).to contain_exactly(
+ *DesignManagement.const_get(model, false).column_names
+ ), failure_message(model)
+ end
+ end
+
+ def failure_message(model)
+ <<-MSG
+ The schema of the `#{model}` model has changed.
+
+ `#{described_class.name}` refers to specific lists of attributes of `#{model}` to either
+ copy or ignore, so that we continue to copy designs correctly after schema changes.
+
+ Please update:
+ #{config_file}
+ to reflect the latest changes to `#{model}`. See that file for more information.
+ MSG
+ end
+ end
+end
diff --git a/spec/services/design_management/copy_design_collection/queue_service_spec.rb b/spec/services/design_management/copy_design_collection/queue_service_spec.rb
new file mode 100644
index 00000000000..2d9ea4633a0
--- /dev/null
+++ b/spec/services/design_management/copy_design_collection/queue_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe DesignManagement::CopyDesignCollection::QueueService, :clean_gitlab_redis_shared_state do
+ include DesignManagementTestHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:target_issue, refind: true) { create(:issue) }
+ let_it_be(:design) { create(:design, issue: issue, project: issue.project) }
+
+ subject { described_class.new(user, issue, target_issue).execute }
+
+ before do
+ enable_design_management
+ end
+
+ it 'returns an error if user does not have permission' do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq('User cannot copy designs to issue')
+ end
+
+ context 'when user has permission' do
+ before_all do
+ issue.project.add_reporter(user)
+ target_issue.project.add_reporter(user)
+ end
+
+ it 'returns an error if design collection copy_state is not queuable' do
+ target_issue.design_collection.start_copy!
+
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq('Target design collection copy state must be `ready`')
+ end
+
+ it 'sets the design collection copy state' do
+ expect { subject }.to change { target_issue.design_collection.copy_state }.from('ready').to('in_progress')
+ end
+
+ it 'queues a DesignManagement::CopyDesignCollectionWorker' do
+ expect { subject }.to change(DesignManagement::CopyDesignCollectionWorker.jobs, :size).by(1)
+ end
+
+ it 'returns success' do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_success
+ end
+ end
+end
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index ace63b6e59c..ed161b4c8ff 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -80,6 +80,16 @@ RSpec.describe DesignManagement::DeleteDesignsService do
expect { run_service rescue nil }
.not_to change { [counter.totals, Event.count] }
end
+
+ it 'does not log any UsageData metrics' do
+ redis_hll = ::Gitlab::UsageDataCounters::HLLRedisCounter
+ event = Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED
+
+ expect { run_service rescue nil }
+ .not_to change { redis_hll.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }
+
+ run_service rescue nil
+ end
end
context 'one design is passed' do
@@ -98,6 +108,12 @@ RSpec.describe DesignManagement::DeleteDesignsService do
expect { run_service }.to change { counter.read(:delete) }.by(1)
end
+ it 'updates UsageData for removed designs' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_removed_action).with(author: user)
+
+ run_service
+ end
+
it 'creates an event in the activity stream' do
expect { run_service }
.to change { Event.count }.by(1)
@@ -105,7 +121,7 @@ RSpec.describe DesignManagement::DeleteDesignsService do
end
it 'informs the new-version-worker' do
- expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer)
+ expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer, false)
run_service
end
diff --git a/spec/services/design_management/generate_image_versions_service_spec.rb b/spec/services/design_management/generate_image_versions_service_spec.rb
index 631eec97e5a..749030af97d 100644
--- a/spec/services/design_management/generate_image_versions_service_spec.rb
+++ b/spec/services/design_management/generate_image_versions_service_spec.rb
@@ -52,25 +52,50 @@ RSpec.describe DesignManagement::GenerateImageVersionsService do
end
context 'when an error is encountered when generating the image versions' do
- before do
- expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
- expect(uploader).to receive(:cache!).and_raise(CarrierWave::DownloadError, 'foo')
+ context "CarrierWave::IntegrityError" do
+ before do
+ expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
+ expect(uploader).to receive(:cache!).and_raise(CarrierWave::IntegrityError, 'foo')
+ end
+ end
+
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(CarrierWave::IntegrityError),
+ project_id: project.id, version_id: version.id, design_id: version.designs.first.id
+ )
+
+ described_class.new(version).execute
end
- end
- it 'logs the error' do
- expect(Gitlab::AppLogger).to receive(:error).with('foo')
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with('foo')
- described_class.new(version).execute
+ described_class.new(version).execute
+ end
end
- it 'tracks the error' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- instance_of(CarrierWave::DownloadError),
- project_id: project.id, version_id: version.id, design_id: version.designs.first.id
- )
+ context "CarrierWave::UploadError" do
+ before do
+ expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
+ expect(uploader).to receive(:cache!).and_raise(CarrierWave::UploadError, 'foo')
+ end
+ end
- described_class.new(version).execute
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with('foo')
+
+ described_class.new(version).execute
+ end
+
+ it 'tracks the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(CarrierWave::UploadError),
+ project_id: project.id, version_id: version.id, design_id: version.designs.first.id
+ )
+
+ described_class.new(version).execute
+ end
end
end
end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index abba5de2c27..f36e68c8dbd 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
allow(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).with(Integer).and_return(nil)
+ .to receive(:perform_async).with(Integer, false).and_return(nil)
end
def run_service(files_to_upload = nil)
@@ -102,9 +102,11 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
end
- it 'creates a commit, an event in the activity stream and updates the creation count' do
+ it 'creates a commit, an event in the activity stream and updates the creation count', :aggregate_failures do
counter = Gitlab::UsageDataCounters::DesignsCounter
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_added_action).with(author: user)
+
expect { run_service }
.to change { Event.count }.by(1)
.and change { Event.for_design.created_action.count }.by(1)
@@ -128,6 +130,25 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(parellism)
end
+ context 'when the design collection is in the process of being copied', :clean_gitlab_redis_shared_state do
+ before do
+ issue.design_collection.start_copy!
+ end
+
+ it_behaves_like 'a service error'
+ end
+
+ context 'when the design collection has a copy error', :clean_gitlab_redis_shared_state do
+ before do
+ issue.design_collection.copy_state = 'error'
+ issue.design_collection.send(:set_stored_copy_state!)
+ end
+
+ it 'resets the copy state' do
+ expect { run_service }.to change { issue.design_collection.copy_state }.from('error').to('ready')
+ end
+ end
+
describe 'the response' do
it 'includes designs with the expected properties' do
updated_designs = response[:designs]
@@ -171,6 +192,12 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(updated_designs.first.versions.size).to eq(2)
end
+ it 'updates UsageData for changed designs' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_modified_action).with(author: user)
+
+ run_service
+ end
+
it 'records the correct events' do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }
@@ -220,7 +247,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).once.with(Integer).and_return(nil)
+ .to receive(:perform_async).once.with(Integer, false).and_return(nil)
expect { run_service }
.to change { Event.count }.by(2)
@@ -254,7 +281,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
design_repository.has_visible_content?
expect(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).once.with(Integer).and_return(nil)
+ .to receive(:perform_async).once.with(Integer, false).and_return(nil)
expect { service.execute }
.to change { issue.designs.count }.from(0).to(2)
@@ -271,6 +298,14 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(response[:message]).to match(/only \d+ files are allowed simultaneously/i)
end
end
+
+ context 'when uploading duplicate files' do
+ let(:files) { [rails_sample, dk_png, rails_sample] }
+
+ it 'returns the correct error' do
+ expect(response[:message]).to match('Duplicate filenames are not allowed!')
+ end
+ end
end
context 'when the user is not allowed to upload designs' do
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
new file mode 100644
index 00000000000..2cd19000f99
--- /dev/null
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::CreateService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let(:user) { developer }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#execute' do
+ subject do
+ described_class.new(project, user, params).execute
+ end
+
+ let(:feature_flag) { subject[:feature_flag] }
+
+ context 'when feature flag can not be created' do
+ let(:params) { {} }
+
+ it 'returns status error' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'returns validation errors' do
+ expect(subject[:message]).to include("Name can't be blank")
+ end
+
+ it 'does not create audit log' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+
+ context 'when feature flag is saved correctly' do
+ let(:params) do
+ {
+ name: 'feature_flag',
+ description: 'description',
+ scopes_attributes: [{ environment_scope: '*', active: true },
+ { environment_scope: 'production', active: false }]
+ }
+ end
+
+ it 'returns status success' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'creates feature flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
+ end
+
+ it 'creates audit event' do
+ expected_message = 'Created feature flag <strong>feature_flag</strong> '\
+ 'with description <strong>"description"</strong>. '\
+ 'Created rule <strong>*</strong> and set it as <strong>active</strong> '\
+ 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>. '\
+ 'Created rule <strong>production</strong> and set it as <strong>inactive</strong> '\
+ 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>.'
+
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(AuditEvent.last.details[:custom_message]).to eq(expected_message)
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Access Denied')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/destroy_service_spec.rb b/spec/services/feature_flags/destroy_service_spec.rb
new file mode 100644
index 00000000000..b35de02c628
--- /dev/null
+++ b/spec/services/feature_flags/destroy_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::DestroyService do
+ include FeatureFlagHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let(:user) { developer }
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, user, params).execute(feature_flag) }
+
+ let(:audit_event_message) { AuditEvent.last.details[:custom_message] }
+ let(:params) { {} }
+
+ it 'returns status success' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'destroys feature flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
+ end
+
+ it 'creates audit log' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to eq("Deleted feature flag <strong>#{feature_flag.name}</strong>.")
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Access Denied')
+ end
+ end
+
+ context 'when feature flag can not be destroyed' do
+ before do
+ allow(feature_flag).to receive(:destroy).and_return(false)
+ end
+
+ it 'returns status error' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'does not create audit log' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/disable_service_spec.rb b/spec/services/feature_flags/disable_service_spec.rb
new file mode 100644
index 00000000000..de0f70bf552
--- /dev/null
+++ b/spec/services/feature_flags/disable_service_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::DisableService do
+ include FeatureFlagHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:params) { {} }
+ let(:service) { described_class.new(project, user, params) }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with params to disable default strategy on prd scope' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: 'prd',
+ strategy: { name: 'userWithId', parameters: { 'userIds': 'User:1' } }.deep_stringify_keys
+ }
+ end
+
+ context 'when there is a persisted feature flag' do
+ let!(:feature_flag) { create_flag(project, params[:name]) }
+
+ context 'when there is a persisted scope' do
+ let!(:scope) do
+ create_scope(feature_flag, params[:environment_scope], true, strategies)
+ end
+
+ context 'when there is a persisted strategy' do
+ let(:strategies) do
+ [
+ { name: 'userWithId', parameters: { 'userIds': 'User:1' } }.deep_stringify_keys,
+ { name: 'userWithId', parameters: { 'userIds': 'User:2' } }.deep_stringify_keys
+ ]
+ end
+
+ it 'deletes the specified strategy' do
+ subject
+
+ scope.reload
+ expect(scope.strategies.count).to eq(1)
+ expect(scope.strategies).not_to include(params[:strategy])
+ end
+
+ context 'when strategies will be empty' do
+ let(:strategies) { [params[:strategy]] }
+
+ it 'deletes the persisted scope' do
+ subject
+
+ expect(feature_flag.scopes.exists?(environment_scope: params[:environment_scope]))
+ .to eq(false)
+ end
+ end
+ end
+
+ context 'when there is no persisted strategy' do
+ let(:strategies) { [{ name: 'default', parameters: {} }] }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Strategy not found')
+ end
+ end
+ end
+
+ context 'when there is no persisted scope' do
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Feature Flag Scope not found')
+ end
+ end
+ end
+
+ context 'when there is no persisted feature flag' do
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Feature Flag not found')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/enable_service_spec.rb b/spec/services/feature_flags/enable_service_spec.rb
new file mode 100644
index 00000000000..88c8028f6c5
--- /dev/null
+++ b/spec/services/feature_flags/enable_service_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::EnableService do
+ include FeatureFlagHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let(:params) { {} }
+ let(:service) { described_class.new(project, user, params) }
+
+ before_all do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with params to enable default strategy on prd scope' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: 'prd',
+ strategy: { name: 'default', parameters: {} }.stringify_keys
+ }
+ end
+
+ context 'when there is no persisted feature flag' do
+ it 'creates a new feature flag with scope' do
+ feature_flag = subject[:feature_flag]
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(subject[:status]).to eq(:success)
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.default_scope).not_to be_active
+ expect(scope).to be_active
+ expect(scope.strategies).to include(params[:strategy])
+ end
+
+ context 'when params include default scope' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: '*',
+ strategy: { name: 'userWithId', parameters: { 'userIds': 'abc' } }.deep_stringify_keys
+ }
+ end
+
+ it 'create a new feature flag with an active default scope with the specified strategy' do
+ feature_flag = subject[:feature_flag]
+ expect(subject[:status]).to eq(:success)
+ expect(feature_flag.default_scope).to be_active
+ expect(feature_flag.default_scope.strategies).to include(params[:strategy])
+ end
+ end
+ end
+
+ context 'when there is a persisted feature flag' do
+ let!(:feature_flag) { create_flag(project, params[:name]) }
+
+ context 'when there is no persisted scope' do
+ it 'creates a new scope for the persisted feature flag' do
+ feature_flag = subject[:feature_flag]
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(subject[:status]).to eq(:success)
+ expect(feature_flag.name).to eq(params[:name])
+ expect(scope).to be_active
+ expect(scope.strategies).to include(params[:strategy])
+ end
+ end
+
+ context 'when there is a persisted scope' do
+ let!(:feature_flag_scope) do
+ create_scope(feature_flag, params[:environment_scope], active, strategies)
+ end
+
+ let(:active) { true }
+
+ context 'when the persisted scope does not have the specified strategy yet' do
+ let(:strategies) { [{ name: 'userWithId', parameters: { 'userIds': 'abc' } }] }
+
+ it 'adds the specified strategy to the scope' do
+ subject
+
+ feature_flag_scope.reload
+ expect(feature_flag_scope.strategies).to include(params[:strategy])
+ end
+
+ context 'when the persisted scope is inactive' do
+ let(:active) { false }
+
+ it 'reactivates the scope' do
+ expect { subject }
+ .to change { feature_flag_scope.reload.active }.from(false).to(true)
+ end
+ end
+ end
+
+ context 'when the persisted scope has the specified strategy already' do
+ let(:strategies) { [params[:strategy]] }
+
+ it 'does not add a duplicated strategy to the scope' do
+ expect { subject }
+ .not_to change { feature_flag_scope.reload.strategies.count }
+ end
+ end
+ end
+ end
+ end
+
+ context 'when strategy is not specified in params' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: 'prd'
+ }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Scopes strategies must be an array of strategy hashes')
+ end
+ end
+
+ context 'when environment scope is not specified in params' do
+ let(:params) do
+ {
+ name: 'awesome',
+ strategy: { name: 'default', parameters: {} }.stringify_keys
+ }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include("Scopes environment scope can't be blank")
+ end
+ end
+
+ context 'when name is not specified in params' do
+ let(:params) do
+ {
+ environment_scope: 'prd',
+ strategy: { name: 'default', parameters: {} }.stringify_keys
+ }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include("Name can't be blank")
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
new file mode 100644
index 00000000000..a982dd5166b
--- /dev/null
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -0,0 +1,250 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::UpdateService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let(:user) { developer }
+ let(:feature_flag) { create(:operations_feature_flag, project: project, active: true) }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, user, params).execute(feature_flag) }
+
+ let(:params) { { name: 'new_name' } }
+ let(:audit_event_message) do
+ AuditEvent.last.details[:custom_message]
+ end
+
+ it 'returns success status' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'creates audit event with correct message' do
+ name_was = feature_flag.name
+
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ eq("Updated feature flag <strong>new_name</strong>. "\
+ "Updated name from <strong>\"#{name_was}\"</strong> "\
+ "to <strong>\"new_name\"</strong>.")
+ )
+ end
+
+ context 'with invalid params' do
+ let(:params) { { name: nil } }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+
+ it 'returns error messages' do
+ expect(subject[:message]).to include("Name can't be blank")
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Access Denied')
+ end
+ end
+
+ context 'when nothing is changed' do
+ let(:params) { {} }
+
+ it 'returns success status' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+
+ context 'description is being changed' do
+ let(:params) { { description: 'new description' } }
+
+ it 'creates audit event with changed description' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include("Updated description from <strong>\"\"</strong>"\
+ " to <strong>\"new description\"</strong>.")
+ )
+ end
+ end
+
+ context 'when flag active state is changed' do
+ let(:params) do
+ {
+ active: false
+ }
+ end
+
+ it 'creates audit event about changing active state' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include('Updated active from <strong>"true"</strong> to <strong>"false"</strong>.')
+ )
+ end
+ end
+
+ context 'when scope active state is changed' do
+ let(:params) do
+ {
+ scopes_attributes: [{ id: feature_flag.scopes.first.id, active: false }]
+ }
+ end
+
+ it 'creates audit event about changing active state' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include("Updated rule <strong>*</strong> active state "\
+ "from <strong>true</strong> to <strong>false</strong>.")
+ )
+ end
+ end
+
+ context 'when scope is renamed' do
+ let(:changed_scope) { feature_flag.scopes.create!(environment_scope: 'review', active: true) }
+ let(:params) do
+ {
+ scopes_attributes: [{ id: changed_scope.id, environment_scope: 'staging' }]
+ }
+ end
+
+ it 'creates audit event with changed name' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include("Updated rule <strong>staging</strong> environment scope "\
+ "from <strong>review</strong> to <strong>staging</strong>.")
+ )
+ end
+
+ context 'when scope can not be updated' do
+ let(:params) do
+ {
+ scopes_attributes: [{ id: changed_scope.id, environment_scope: '' }]
+ }
+ end
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'returns error messages' do
+ expect(subject[:message]).to include("Scopes environment scope can't be blank")
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+ end
+
+ context 'when scope is deleted' do
+ let(:deleted_scope) { feature_flag.scopes.create!(environment_scope: 'review', active: true) }
+ let(:params) do
+ {
+ scopes_attributes: [{ id: deleted_scope.id, '_destroy': true }]
+ }
+ end
+
+ it 'creates audit event with deleted scope' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to include("Deleted rule <strong>review</strong>.")
+ end
+
+ context 'when scope can not be deleted' do
+ before do
+ allow(deleted_scope).to receive(:destroy).and_return(false)
+ end
+
+ it 'does not create audit event' do
+ expect do
+ subject
+ end.to not_change { AuditEvent.count }.and raise_error(ActiveRecord::RecordNotDestroyed)
+ end
+ end
+ end
+
+ context 'when new scope is being added' do
+ let(:new_environment_scope) { 'review' }
+ let(:params) do
+ {
+ scopes_attributes: [{ environment_scope: new_environment_scope, active: true }]
+ }
+ end
+
+ it 'creates audit event with new scope' do
+ expected = 'Created rule <strong>review</strong> and set it as <strong>active</strong> '\
+ 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>.'
+
+ subject
+
+ expect(audit_event_message).to include(expected)
+ end
+
+ context 'when scope can not be created' do
+ let(:new_environment_scope) { '' }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'returns error messages' do
+ expect(subject[:message]).to include("Scopes environment scope can't be blank")
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+ end
+
+ context 'when the strategy is changed' do
+ let(:scope) do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'sandbox',
+ strategies: [{ name: "default", parameters: {} }])
+ end
+
+ let(:params) do
+ {
+ scopes_attributes: [{
+ id: scope.id,
+ environment_scope: 'sandbox',
+ strategies: [{
+ name: 'gradualRolloutUserId',
+ parameters: {
+ groupId: 'mygroup',
+ percentage: "40"
+ }
+ }]
+ }]
+ }
+ end
+
+ it 'creates an audit event' do
+ expected = %r{Updated rule <strong>sandbox</strong> strategies from <strong>.*</strong> to <strong>.*</strong>.}
+
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to match(expected)
+ end
+ end
+ end
+end
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index db25bb766c9..a5290f0be68 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -429,4 +429,26 @@ RSpec.describe Git::BranchHooksService do
end
end
end
+
+ describe 'Metrics dashboard sync' do
+ context 'with feature flag enabled' do
+ before do
+ Feature.enable(:metrics_dashboards_sync)
+ end
+
+ it 'imports metrics to database' do
+ expect(Metrics::Dashboard::SyncDashboardsWorker).to receive(:perform_async)
+
+ service.execute
+ end
+ end
+
+ context 'with feature flag disabled' do
+ it 'imports metrics to database' do
+ expect(Metrics::Dashboard::SyncDashboardsWorker).to receive(:perform_async)
+
+ service.execute
+ end
+ end
+ end
end
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index 816f20f0bc3..cd38f2e97fb 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -254,24 +254,6 @@ RSpec.describe Git::WikiPushService, services: true do
service.execute
end
end
-
- context 'the wiki_events_on_git_push feature is disabled' do
- before do
- stub_feature_flags(wiki_events_on_git_push: false)
- end
-
- it_behaves_like 'a no-op push'
-
- context 'but is enabled for a given container' do
- before do
- stub_feature_flags(wiki_events_on_git_push: wiki.container)
- end
-
- it 'creates events' do
- expect { process_changes { write_new_page } }.to change(Event, :count).by(1)
- end
- end
- end
end
end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index fc877f45a39..4f5bc3a3d5a 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -45,6 +45,15 @@ RSpec.describe Groups::CreateService, '#execute' do
end
end
+ context 'creating a group with `allow_mfa_for_subgroups` attribute' do
+ let(:params) { group_params.merge(allow_mfa_for_subgroups: false) }
+ let(:service) { described_class.new(user, params) }
+
+ it 'creates group without error' do
+ expect(service.execute).to be_persisted
+ end
+ end
+
describe 'creating a top level group' do
let(:service) { described_class.new(user, group_params) }
@@ -138,4 +147,91 @@ RSpec.describe Groups::CreateService, '#execute' do
expect(group.namespace_settings).to be_persisted
end
end
+
+ describe 'create service for the group' do
+ let(:service) { described_class.new(user, group_params) }
+ let(:created_group) { service.execute }
+
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+
+ it 'creates a service from the instance-level integration' do
+ expect(created_group.services.count).to eq(1)
+ expect(created_group.services.first.api_url).to eq(instance_integration.api_url)
+ expect(created_group.services.first.inherit_from_id).to eq(instance_integration.id)
+ end
+
+ context 'with an active group-level integration' do
+ let(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+ let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ it 'creates a service from the group-level integration' do
+ expect(created_group.services.count).to eq(1)
+ expect(created_group.services.first.api_url).to eq(group_integration.api_url)
+ expect(created_group.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+
+ context 'with an active subgroup' do
+ let(:service) { described_class.new(user, group_params.merge(parent_id: subgroup.id)) }
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let(:subgroup) do
+ create(:group, parent: group).tap do |subgroup|
+ subgroup.add_owner(user)
+ end
+ end
+
+ it 'creates a service from the subgroup-level integration' do
+ expect(created_group.services.count).to eq(1)
+ expect(created_group.services.first.api_url).to eq(subgroup_integration.api_url)
+ expect(created_group.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+ end
+ end
+ end
+ end
+
+ context 'shared runners configuration' do
+ context 'parent group present' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_config, :descendants_override_disabled_shared_runners_config) do
+ true | false
+ false | false
+ # true | true # invalid at the group level, leaving as comment to make explicit
+ false | true
+ end
+
+ with_them do
+ let!(:group) { create(:group, shared_runners_enabled: shared_runners_config, allow_descendants_override_disabled_shared_runners: descendants_override_disabled_shared_runners_config) }
+ let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'creates group following the parent config' do
+ new_group = service.execute
+
+ expect(new_group.shared_runners_enabled).to eq(shared_runners_config)
+ expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(descendants_override_disabled_shared_runners_config)
+ end
+ end
+ end
+
+ context 'root group' do
+ let!(:service) { described_class.new(user) }
+
+ it 'follows default config' do
+ new_group = service.execute
+
+ expect(new_group.shared_runners_enabled).to eq(true)
+ expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index 4aac602a6da..f284225e23a 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -10,6 +10,15 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when the job can be successfully scheduled' do
subject(:import_service) { described_class.new(group: group, user: user) }
+ it 'creates group import state' do
+ import_service.async_execute
+
+ import_state = group.import_state
+
+ expect(import_state.user).to eq(user)
+ expect(import_state.group).to eq(group)
+ end
+
it 'enqueues an import job' do
expect(GroupImportWorker).to receive(:perform_async).with(user.id, group.id)
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 89e4d091ff7..ae04eca3a9f 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -285,6 +285,44 @@ RSpec.describe Groups::TransferService do
end
end
+ context 'shared runners configuration' do
+ before do
+ create(:group_member, :owner, group: new_parent_group, user: user)
+ end
+
+ context 'if parent group has disabled shared runners but allows overrides' do
+ let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
+
+ it 'calls update service' do
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_with_override' }).and_call_original
+
+ transfer_service.execute(new_parent_group)
+ end
+ end
+
+ context 'if parent group does not allow shared runners' do
+ let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false) }
+
+ it 'calls update service' do
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_and_unoverridable' }).and_call_original
+
+ transfer_service.execute(new_parent_group)
+ end
+ end
+
+ context 'if parent group allows shared runners' do
+ let(:group) { create(:group, :public, :nested, shared_runners_enabled: false) }
+ let(:new_parent_group) { create(:group, shared_runners_enabled: true) }
+
+ it 'does not call update service and keeps them disabled on the group' do
+ expect(Groups::UpdateSharedRunnersService).not_to receive(:new)
+
+ transfer_service.execute(new_parent_group)
+ expect(group.reload.shared_runners_enabled).to be_falsy
+ end
+ end
+ end
+
context 'when a group is transferred to its subgroup' do
let(:new_parent_group) { create(:group, parent: group) }
@@ -529,6 +567,39 @@ RSpec.describe Groups::TransferService do
end
end
+ context 'when transferring a group with two factor authentication switched on' do
+ before do
+ TestEnv.clean_test_path
+ create(:group_member, :owner, group: new_parent_group, user: user)
+ create(:group, :private, parent: group, require_two_factor_authentication: true)
+ group.update!(require_two_factor_authentication: true)
+ end
+
+ it 'does not update group two factor authentication setting' do
+ transfer_service.execute(new_parent_group)
+
+ expect(group.require_two_factor_authentication).to eq(true)
+ end
+
+ context 'when new parent disallows two factor authentication switched on for descendants' do
+ before do
+ new_parent_group.namespace_settings.update!(allow_mfa_for_subgroups: false)
+ end
+
+ it 'updates group two factor authentication setting' do
+ transfer_service.execute(new_parent_group)
+
+ expect(group.require_two_factor_authentication).to eq(false)
+ end
+
+ it 'schedules update of group two factor authentication setting for descendants' do
+ expect(DisallowTwoFactorForSubgroupsWorker).to receive(:perform_async).with(group.id)
+
+ transfer_service.execute(new_parent_group)
+ end
+ end
+ end
+
context 'when updating the group goes wrong' do
let!(:subgroup1) { create(:group, :public, parent: group) }
let!(:subgroup2) { create(:group, :public, parent: group) }
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 1e6a8d53354..bc7c066fa04 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -283,6 +283,50 @@ RSpec.describe Groups::UpdateService do
end
end
+ context 'change shared Runners config' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, shared_runners_enabled: true, group: group) }
+
+ subject { described_class.new(group, user, shared_runners_setting: 'disabled_and_unoverridable').execute }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'calls the shared runners update service' do
+ expect_any_instance_of(::Groups::UpdateSharedRunnersService).to receive(:execute).and_return({ status: :success })
+
+ expect(subject).to be_truthy
+ end
+
+ it 'handles errors in the shared runners update service' do
+ expect_any_instance_of(::Groups::UpdateSharedRunnersService).to receive(:execute).and_return({ status: :error, message: 'something happened' })
+
+ expect(subject).to be_falsy
+
+ expect(group.errors[:update_shared_runners].first).to eq('something happened')
+ end
+ end
+
+ context 'changes allowing subgroups to establish own 2FA' do
+ let(:group) { create(:group) }
+ let(:params) { { allow_mfa_for_subgroups: false } }
+
+ subject { described_class.new(group, user, params).execute }
+
+ it 'changes settings' do
+ subject
+
+ expect(group.namespace_settings.reload.allow_mfa_for_subgroups).to eq(false)
+ end
+
+ it 'enqueues update subgroups and its members' do
+ expect(DisallowTwoFactorForSubgroupsWorker).to receive(:perform_async).with(group.id)
+
+ subject
+ end
+ end
+
def update_group(group, user, opts)
Groups::UpdateService.new(group, user, opts).execute
end
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 9fd8477a455..e2838c4ce0b 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -13,17 +13,14 @@ RSpec.describe Groups::UpdateSharedRunnersService do
context 'when current_user is not the group owner' do
let_it_be(:group) { create(:group) }
- let(:params) { { shared_runners_enabled: '0' } }
+ let(:params) { { shared_runners_setting: 'enabled' } }
before do
group.add_maintainer(user)
end
it 'results error and does not call any method' do
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
+ expect(group).not_to receive(:update_shared_runners_setting!)
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Operation not allowed')
@@ -37,191 +34,60 @@ RSpec.describe Groups::UpdateSharedRunnersService do
end
context 'enable shared Runners' do
- where(:desired_params) do
- ['1', true]
- end
-
- with_them do
- let(:params) { { shared_runners_enabled: desired_params } }
-
- context 'group that its ancestors have shared runners disabled' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
-
- it 'results error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Shared Runners disabled for the parent group')
- end
- end
+ let(:params) { { shared_runners_setting: 'enabled' } }
- context 'root group with shared runners disabled' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ context 'group that its ancestors have shared runners disabled' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- it 'receives correct method and succeeds' do
- expect(group).to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
-
- expect(subject[:status]).to eq(:success)
- end
+ it 'results error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
end
end
- end
-
- context 'disable shared Runners' do
- let_it_be(:group) { create(:group) }
-
- where(:desired_params) do
- ['0', false]
- end
- with_them do
- let(:params) { { shared_runners_enabled: desired_params } }
+ context 'root group with shared runners disabled' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
it 'receives correct method and succeeds' do
- expect(group).to receive(:disable_shared_runners!)
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
+ expect(group).to receive(:update_shared_runners_setting!).with('enabled')
expect(subject[:status]).to eq(:success)
end
end
end
- context 'allow descendants to override' do
- where(:desired_params) do
- ['1', true]
- end
-
- with_them do
- let(:params) { { allow_descendants_override_disabled_shared_runners: desired_params } }
-
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
-
- it 'receives correct method and succeeds' do
- expect(group).to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
-
- expect(subject[:status]).to eq(:success)
- end
- end
+ context 'disable shared Runners' do
+ let_it_be(:group) { create(:group) }
+ let(:params) { { shared_runners_setting: 'disabled_and_unoverridable' } }
- context 'when parent does not allow' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+ it 'receives correct method and succeeds' do
+ expect(group).to receive(:update_shared_runners_setting!).with('disabled_and_unoverridable')
- it 'results error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Group level shared Runners not allowed')
- end
- end
+ expect(subject[:status]).to eq(:success)
end
end
- context 'disallow descendants to override' do
- where(:desired_params) do
- ['0', false]
- end
-
- with_them do
- let(:params) { { allow_descendants_override_disabled_shared_runners: desired_params } }
-
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners ) }
-
- it 'receives correct method and succeeds' do
- expect(group).to receive(:disallow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
-
- expect(subject[:status]).to eq(:success)
- end
- end
-
- context 'top level group that has shared Runners enabled' do
- let_it_be(:group) { create(:group, shared_runners_enabled: true) }
-
- it 'results error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Shared Runners enabled')
- end
- end
- end
- end
+ context 'allow descendants to override' do
+ let(:params) { { shared_runners_setting: 'disabled_with_override' } }
- context 'both params are present' do
- context 'shared_runners_enabled: 1 and allow_descendants_override_disabled_shared_runners' do
+ context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- where(:allow_descendants_override) do
- ['1', true, '0', false]
- end
+ it 'receives correct method and succeeds' do
+ expect(group).to receive(:update_shared_runners_setting!).with('disabled_with_override')
- with_them do
- let(:params) { { shared_runners_enabled: '1', allow_descendants_override_disabled_shared_runners: allow_descendants_override } }
-
- it 'results in an error because shared Runners are enabled' do
- expect { subject }
- .to not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Cannot set shared_runners_enabled to true and allow_descendants_override_disabled_shared_runners')
- end
+ expect(subject[:status]).to eq(:success)
end
end
- context 'shared_runners_enabled: 0 and allow_descendants_override_disabled_shared_runners: 0' do
- let_it_be(:group) { create(:group, :allow_descendants_override_disabled_shared_runners) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
- let_it_be(:sub_group_2) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
- let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
-
- let(:params) { { shared_runners_enabled: '0', allow_descendants_override_disabled_shared_runners: '0' } }
-
- it 'disables shared Runners and disable allow_descendants_override_disabled_shared_runners' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(true).to(false)
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and change { sub_group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and change { sub_group_2.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { sub_group_2.reload.allow_descendants_override_disabled_shared_runners }
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- .and change { project_2.reload.shared_runners_enabled }.from(true).to(false)
- end
- end
+ context 'when parent does not allow' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
- context 'shared_runners_enabled: 0 and allow_descendants_override_disabled_shared_runners: 1' do
- let_it_be(:group) { create(:group) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:sub_group_2) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
- let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
-
- let(:params) { { shared_runners_enabled: '0', allow_descendants_override_disabled_shared_runners: '1' } }
-
- it 'disables shared Runners and enable allow_descendants_override_disabled_shared_runners only for itself' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(true).to(false)
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and change { sub_group_2.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { sub_group_2.reload.allow_descendants_override_disabled_shared_runners }
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- .and change { project_2.reload.shared_runners_enabled }.from(true).to(false)
+ it 'results error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
end
end
end
diff --git a/spec/services/incident_management/create_incident_label_service_spec.rb b/spec/services/incident_management/create_incident_label_service_spec.rb
index 4771dfc9e64..441cddf1d2e 100644
--- a/spec/services/incident_management/create_incident_label_service_spec.rb
+++ b/spec/services/incident_management/create_incident_label_service_spec.rb
@@ -3,65 +3,5 @@
require 'spec_helper'
RSpec.describe IncidentManagement::CreateIncidentLabelService do
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:user) { User.alert_bot }
- let(:service) { described_class.new(project, user) }
-
- subject(:execute) { service.execute }
-
- describe 'execute' do
- let(:incident_label_attributes) { attributes_for(:label, :incident) }
- let(:title) { incident_label_attributes[:title] }
- let(:color) { incident_label_attributes[:color] }
- let(:description) { incident_label_attributes[:description] }
-
- shared_examples 'existing label' do
- it 'returns the existing label' do
- expect { execute }.not_to change(Label, :count)
-
- expect(execute).to be_success
- expect(execute.payload).to eq(label: label)
- end
- end
-
- shared_examples 'new label' do
- it 'creates a new label' do
- expect { execute }.to change(Label, :count).by(1)
-
- label = project.reload.labels.last
- expect(execute).to be_success
- expect(execute.payload).to eq(label: label)
- expect(label.title).to eq(title)
- expect(label.color).to eq(color)
- expect(label.description).to eq(description)
- end
- end
-
- context 'with predefined project label' do
- it_behaves_like 'existing label' do
- let!(:label) { create(:label, project: project, title: title) }
- end
- end
-
- context 'with predefined group label' do
- let(:project) { create(:project, group: group) }
- let(:group) { create(:group) }
-
- it_behaves_like 'existing label' do
- let!(:label) { create(:group_label, group: group, title: title) }
- end
- end
-
- context 'without label' do
- context 'when user has permissions to create labels' do
- it_behaves_like 'new label'
- end
-
- context 'when user has no permissions to create labels' do
- let_it_be(:user) { create(:user) }
-
- it_behaves_like 'new label'
- end
- end
- end
+ it_behaves_like 'incident management label service'
end
diff --git a/spec/services/incident_management/incidents/update_severity_service_spec.rb b/spec/services/incident_management/incidents/update_severity_service_spec.rb
new file mode 100644
index 00000000000..bc1abf82cf2
--- /dev/null
+++ b/spec/services/incident_management/incidents/update_severity_service_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::Incidents::UpdateSeverityService do
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ let(:severity) { 'low' }
+ let(:system_note_worker) { ::IncidentManagement::AddSeveritySystemNoteWorker }
+
+ subject(:update_severity) { described_class.new(issuable, user, severity).execute }
+
+ before do
+ allow(system_note_worker).to receive(:perform_async)
+ end
+
+ shared_examples 'adds a system note' do
+ it 'calls AddSeveritySystemNoteWorker' do
+ update_severity
+
+ expect(system_note_worker).to have_received(:perform_async).with(issuable.id, user.id)
+ end
+ end
+
+ context 'when issuable not an incident' do
+ %i(issue merge_request).each do |issuable_type|
+ let(:issuable) { build_stubbed(issuable_type) }
+
+ it { is_expected.to be_nil }
+
+ it 'does not set severity' do
+ expect { update_severity }.not_to change(IssuableSeverity, :count)
+ end
+
+ it 'does not add a system note' do
+ update_severity
+
+ expect(system_note_worker).not_to have_received(:perform_async)
+ end
+ end
+ end
+
+ context 'when issuable is an incident' do
+ let!(:issuable) { create(:incident) }
+
+ context 'when issuable does not have issuable severity yet' do
+ it 'creates new record' do
+ expect { update_severity }.to change { IssuableSeverity.where(issue: issuable).count }.to(1)
+ end
+
+ it 'sets severity to specified value' do
+ expect { update_severity }.to change { issuable.severity }.to('low')
+ end
+
+ it_behaves_like 'adds a system note'
+ end
+
+ context 'when issuable has an issuable severity' do
+ let!(:issuable_severity) { create(:issuable_severity, issue: issuable, severity: 'medium') }
+
+ it 'does not create new record' do
+ expect { update_severity }.not_to change(IssuableSeverity, :count)
+ end
+
+ it 'updates existing issuable severity' do
+ expect { update_severity }.to change { issuable_severity.severity }.to(severity)
+ end
+
+ it_behaves_like 'adds a system note'
+ end
+
+ context 'when severity value is unsupported' do
+ let(:severity) { 'unsupported-severity' }
+
+ it 'sets the severity to default value' do
+ update_severity
+
+ expect(issuable.issuable_severity.severity).to eq(IssuableSeverity::DEFAULT)
+ end
+
+ it_behaves_like 'adds a system note'
+ end
+ end
+ end
+end
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index 168a80a97c0..f2bc4f717af 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -254,7 +254,7 @@ RSpec.describe Issuable::BulkUpdateService do
describe 'unsubscribe from issues' do
let(:issues) do
create_list(:closed_issue, 2, project: project) do |issue|
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ issue.subscriptions.create!(user: user, project: project, subscribed: true)
end
end
diff --git a/spec/services/issuable/clone/attributes_rewriter_spec.rb b/spec/services/issuable/clone/attributes_rewriter_spec.rb
index 372e6d480e3..7f434b8b246 100644
--- a/spec/services/issuable/clone/attributes_rewriter_spec.rb
+++ b/spec/services/issuable/clone/attributes_rewriter_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
group_label = create(:group_label, title: 'group_label', group: group)
create(:label, title: 'label3', project: project2)
- original_issue.update(labels: [project1_label_1, project1_label_2, group_label])
+ original_issue.update!(labels: [project1_label_1, project1_label_2, group_label])
subject.execute
@@ -48,7 +48,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
it 'sets milestone to nil when old issue milestone is not in the new project' do
milestone = create(:milestone, title: 'milestone', project: project1)
- original_issue.update(milestone: milestone)
+ original_issue.update!(milestone: milestone)
subject.execute
@@ -59,7 +59,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
milestone_project1 = create(:milestone, title: 'milestone', project: project1)
milestone_project2 = create(:milestone, title: 'milestone', project: project2)
- original_issue.update(milestone: milestone_project1)
+ original_issue.update!(milestone: milestone_project1)
subject.execute
@@ -69,7 +69,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
it 'copies the milestone when old issue milestone is a group milestone' do
milestone = create(:milestone, title: 'milestone', group: group)
- original_issue.update(milestone: milestone)
+ original_issue.update!(milestone: milestone)
subject.execute
@@ -85,7 +85,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
before do
- original_issue.update(milestone: milestone2_project1)
+ original_issue.update!(milestone: milestone2_project1)
create_event(milestone1_project1)
create_event(milestone2_project1)
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 217550542bb..fc01ee8f672 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
before do
issuable.labels << label
- issuable.save
+ issuable.save!
end
it 'creates a resource label event' do
@@ -69,7 +69,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
subject { described_class.new(project, user).execute(issuable, old_labels: [], is_update: false) }
it 'does not create system note for title and description' do
- issuable.save
+ issuable.save!
expect { subject }.not_to change { issuable.notes.count }
end
@@ -78,7 +78,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
label = create(:label, project: project)
issuable.labels << label
- issuable.save
+ issuable.save!
expect { subject }.to change { issuable.resource_label_events.count }.from(0).to(1)
@@ -104,7 +104,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
it 'creates a system note for due_date set' do
issuable.due_date = Date.today
- issuable.save
+ issuable.save!
expect { subject }.to change { issuable.notes.count }.from(0).to(1)
expect(issuable.notes.last.note).to match('changed due date')
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 93eef8a2732..16433d49ca1 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -3,11 +3,14 @@
require 'spec_helper.rb'
RSpec.describe Issues::BuildService do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
-
- before do
- project.add_developer(user)
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let(:user) { developer }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_guest(guest)
end
def build_issue(issue_params = {})
@@ -134,31 +137,56 @@ RSpec.describe Issues::BuildService do
end
describe '#execute' do
- it 'builds a new issues with given params' do
- milestone = create(:milestone, project: project)
- issue = build_issue(milestone_id: milestone.id)
+ context 'as developer' do
+ it 'builds a new issues with given params' do
+ milestone = create(:milestone, project: project)
+ issue = build_issue(milestone_id: milestone.id)
- expect(issue.milestone).to eq(milestone)
- end
+ expect(issue.milestone).to eq(milestone)
+ end
- it 'sets milestone to nil if it is not available for the project' do
- milestone = create(:milestone, project: create(:project))
- issue = build_issue(milestone_id: milestone.id)
+ it 'sets milestone to nil if it is not available for the project' do
+ milestone = create(:milestone, project: create(:project))
+ issue = build_issue(milestone_id: milestone.id)
- expect(issue.milestone).to be_nil
+ expect(issue.milestone).to be_nil
+ end
end
- context 'setting issue type' do
- it 'sets the issue_type on the issue' do
- issue = build_issue(issue_type: 'incident')
+ context 'as guest' do
+ let(:user) { guest }
- expect(issue.issue_type).to eq('incident')
+ it 'cannot set milestone' do
+ milestone = create(:milestone, project: project)
+ issue = build_issue(milestone_id: milestone.id)
+
+ expect(issue.milestone).to be_nil
end
- it 'defaults to issue if issue_type not given' do
- issue = build_issue
+ context 'setting issue type' do
+ it 'defaults to issue if issue_type not given' do
+ issue = build_issue
+
+ expect(issue).to be_issue
+ end
+
+ it 'sets issue' do
+ issue = build_issue(issue_type: 'issue')
+
+ expect(issue).to be_issue
+ end
+
+ it 'sets incident' do
+ issue = build_issue(issue_type: 'incident')
- expect(issue.issue_type).to eq('issue')
+ expect(issue).to be_incident
+ end
+
+ it 'cannot set invalid type' do
+ expect do
+ build_issue(issue_type: 'invalid type')
+ end.to raise_error(ArgumentError, "'invalid type' is not a valid issue_type")
+ end
end
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 4db6e5cac12..9076fb11c9b 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -233,26 +233,11 @@ RSpec.describe Issues::CloseService do
expect(email.subject).to include(issue.title)
end
- context 'when resource state events are disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
-
- it 'creates system note about the issue being closed' do
- close_issue
-
- note = issue.notes.last
- expect(note.note).to include "closed"
- end
- end
-
- context 'when resource state events are enabled' do
- it 'creates resource state event about the issue being closed' do
- close_issue
+ it 'creates resource state event about the issue being closed' do
+ close_issue
- event = issue.resource_state_events.last
- expect(event.state).to eq('closed')
- end
+ event = issue.resource_state_events.last
+ expect(event.state).to eq('closed')
end
it 'marks todos as done' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index c2989dc86cf..ae1454ce9bb 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Issues::MoveService do
+ include DesignManagementTestHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:title) { 'Some issue' }
@@ -201,6 +203,47 @@ RSpec.describe Issues::MoveService do
expect(copied_notes.order('id ASC').pluck(:note)).to eq(notes.map(&:note))
end
end
+
+ context 'issue with a design', :clean_gitlab_redis_shared_state do
+ let_it_be(:new_project) { create(:project) }
+ let!(:design) { create(:design, :with_lfs_file, issue: old_issue) }
+ let!(:note) { create(:diff_note_on_design, noteable: design, issue: old_issue, project: old_issue.project) }
+ let(:subject) { move_service.execute(old_issue, new_project) }
+
+ before do
+ enable_design_management
+ end
+
+ it 'calls CopyDesignCollection::QueueService' do
+ expect(DesignManagement::CopyDesignCollection::QueueService).to receive(:new)
+ .with(user, old_issue, kind_of(Issue))
+ .and_call_original
+
+ subject
+ end
+
+ it 'logs if QueueService returns an error', :aggregate_failures do
+ error_message = 'error'
+
+ expect_next_instance_of(DesignManagement::CopyDesignCollection::QueueService) do |service|
+ expect(service).to receive(:execute).and_return(
+ ServiceResponse.error(message: error_message)
+ )
+ end
+ expect(Gitlab::AppLogger).to receive(:error).with(error_message)
+
+ subject
+ end
+
+ # Perform a small integration test to ensure the services and worker
+ # can correctly create designs.
+ it 'copies the design and its notes', :sidekiq_inline, :aggregate_failures do
+ new_issue = subject
+
+ expect(new_issue.designs.size).to eq(1)
+ expect(new_issue.designs.first.notes.size).to eq(1)
+ end
+ end
end
describe 'move permissions' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index b3e8fba4e9a..cfda27795c7 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -650,7 +650,7 @@ RSpec.describe Issues::UpdateService, :mailer do
context 'when the labels change' do
before do
- Timecop.freeze(1.minute.from_now) do
+ travel_to(1.minute.from_now) do
update_issue(label_ids: [label.id])
end
end
diff --git a/spec/services/jira/requests/projects/list_service_spec.rb b/spec/services/jira/requests/projects/list_service_spec.rb
index 415dd42c795..f7bcfa997df 100644
--- a/spec/services/jira/requests/projects/list_service_spec.rb
+++ b/spec/services/jira/requests/projects/list_service_spec.rb
@@ -45,6 +45,10 @@ RSpec.describe Jira::Requests::Projects::ListService do
end
it 'returns an error response' do
+ expect(Gitlab::ProjectServiceLogger).to receive(:error).with(
+ hash_including(
+ error: hash_including(:exception_class, :exception_message, :exception_backtrace)))
+ .and_call_original
expect(subject.error?).to be_truthy
expect(subject.message).to eq('Jira request error: Timeout::Error')
end
diff --git a/spec/services/keys/last_used_service_spec.rb b/spec/services/keys/last_used_service_spec.rb
index 82b6b05975b..a2cd5ffdd38 100644
--- a/spec/services/keys/last_used_service_spec.rb
+++ b/spec/services/keys/last_used_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Keys::LastUsedService do
key = create(:key, last_used_at: 1.year.ago)
time = Time.zone.now
- Timecop.freeze(time) { described_class.new(key).execute }
+ travel_to(time) { described_class.new(key).execute }
expect(key.reload.last_used_at).to be_like_time(time)
end
diff --git a/spec/services/lfs/push_service_spec.rb b/spec/services/lfs/push_service_spec.rb
index 8e5b98fdc9c..f67284ff48d 100644
--- a/spec/services/lfs/push_service_spec.rb
+++ b/spec/services/lfs/push_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Lfs::PushService do
stub_lfs_batch(lfs_object)
expect(lfs_client)
- .to receive(:upload)
+ .to receive(:upload!)
.with(lfs_object, upload_action_spec(lfs_object), authenticated: true)
expect(service.execute).to eq(status: :success)
@@ -28,7 +28,7 @@ RSpec.describe Lfs::PushService do
it 'does nothing if there are no LFS objects' do
lfs_object.destroy!
- expect(lfs_client).not_to receive(:upload)
+ expect(lfs_client).not_to receive(:upload!)
expect(service.execute).to eq(status: :success)
end
@@ -36,20 +36,39 @@ RSpec.describe Lfs::PushService do
it 'does not upload the object when upload is not requested' do
stub_lfs_batch(lfs_object, upload: false)
- expect(lfs_client).not_to receive(:upload)
+ expect(lfs_client).not_to receive(:upload!)
expect(service.execute).to eq(status: :success)
end
+ it 'verifies the upload if requested' do
+ stub_lfs_batch(lfs_object, verify: true)
+
+ expect(lfs_client).to receive(:upload!)
+ expect(lfs_client)
+ .to receive(:verify!)
+ .with(lfs_object, verify_action_spec(lfs_object), authenticated: true)
+
+ expect(service.execute).to eq(status: :success)
+ end
+
+ it 'skips verification if requested but upload fails' do
+ stub_lfs_batch(lfs_object, verify: true)
+
+ expect(lfs_client).to receive(:upload!) { raise 'failed' }
+ expect(lfs_client).not_to receive(:verify!)
+ expect(service.execute).to eq(status: :error, message: 'failed')
+ end
+
it 'returns a failure when submitting a batch fails' do
- expect(lfs_client).to receive(:batch) { raise 'failed' }
+ expect(lfs_client).to receive(:batch!) { raise 'failed' }
expect(service.execute).to eq(status: :error, message: 'failed')
end
it 'returns a failure when submitting an upload fails' do
stub_lfs_batch(lfs_object)
- expect(lfs_client).to receive(:upload) { raise 'failed' }
+ expect(lfs_client).to receive(:upload!) { raise 'failed' }
expect(service.execute).to eq(status: :error, message: 'failed')
end
@@ -71,23 +90,28 @@ RSpec.describe Lfs::PushService do
create(:lfs_objects_project, project: project, repository_type: type).lfs_object
end
- def stub_lfs_batch(*objects, upload: true)
+ def stub_lfs_batch(*objects, upload: true, verify: false)
expect(lfs_client)
- .to receive(:batch).with('upload', containing_exactly(*objects))
- .and_return('transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload) })
+ .to receive(:batch!).with('upload', containing_exactly(*objects))
+ .and_return('transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload, verify: verify) })
end
- def batch_spec(*objects, upload: true)
+ def batch_spec(*objects, upload: true, verify: false)
{ 'transfer' => 'basic', 'objects' => objects.map {|o| object_spec(o, upload: upload) } }
end
- def object_spec(object, upload: true)
- { 'oid' => object.oid, 'size' => object.size, 'authenticated' => true }.tap do |spec|
- spec['actions'] = { 'upload' => upload_action_spec(object) } if upload
+ def object_spec(object, upload: true, verify: false)
+ { 'oid' => object.oid, 'size' => object.size, 'authenticated' => true, 'actions' => {} }.tap do |spec|
+ spec['actions']['upload'] = upload_action_spec(object) if upload
+ spec['actions']['verify'] = verify_action_spec(object) if verify
end
end
def upload_action_spec(object)
{ 'href' => "https://example.com/#{object.oid}/#{object.size}", 'header' => { 'Key' => 'value' } }
end
+
+ def verify_action_spec(object)
+ { 'href' => "https://example.com/#{object.oid}/#{object.size}/verify", 'header' => { 'Key' => 'value' } }
+ end
end
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 3b3f2f3b95a..4f731ad5852 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -29,15 +29,15 @@ RSpec.describe Members::DestroyService do
end
it 'destroys the member' do
- expect { described_class.new(current_user).execute(member, opts) }.to change { member.source.members_and_requesters.count }.by(-1)
+ expect { described_class.new(current_user).execute(member, **opts) }.to change { member.source.members_and_requesters.count }.by(-1)
end
it 'destroys member notification_settings' do
if member_user.notification_settings.any?
- expect { described_class.new(current_user).execute(member, opts) }
+ expect { described_class.new(current_user).execute(member, **opts) }
.to change { member_user.notification_settings.count }.by(-1)
else
- expect { described_class.new(current_user).execute(member, opts) }
+ expect { described_class.new(current_user).execute(member, **opts) }
.not_to change { member_user.notification_settings.count }
end
end
@@ -63,7 +63,7 @@ RSpec.describe Members::DestroyService do
expect(service).to receive(:enqueue_unassign_issuables).with(member)
end
- service.execute(member, opts)
+ service.execute(member, **opts)
expect(member_user.assigned_open_merge_requests_count).to be(0)
expect(member_user.assigned_open_issues_count).to be(0)
@@ -83,14 +83,14 @@ RSpec.describe Members::DestroyService do
it 'calls Member#after_decline_request' do
expect_any_instance_of(NotificationService).to receive(:decline_access_request).with(member)
- described_class.new(current_user).execute(member, opts)
+ described_class.new(current_user).execute(member, **opts)
end
context 'when current user is the member' do
it 'does not call Member#after_decline_request' do
expect_any_instance_of(NotificationService).not_to receive(:decline_access_request).with(member)
- described_class.new(member_user).execute(member, opts)
+ described_class.new(member_user).execute(member, **opts)
end
end
end
@@ -280,7 +280,6 @@ RSpec.describe Members::DestroyService do
context 'subresources' do
let(:user) { create(:user) }
let(:member_user) { create(:user) }
- let(:opts) { {} }
let(:group) { create(:group, :public) }
let(:subgroup) { create(:group, parent: group) }
@@ -303,7 +302,7 @@ RSpec.describe Members::DestroyService do
group_member = create(:group_member, :developer, group: group, user: member_user)
- described_class.new(user).execute(group_member, opts)
+ described_class.new(user).execute(group_member)
end
it 'removes the project membership' do
@@ -350,7 +349,6 @@ RSpec.describe Members::DestroyService do
context 'deletion of invitations created by deleted project member' do
let(:user) { project.owner }
let(:member_user) { create(:user) }
- let(:opts) { {} }
let(:project) { create(:project) }
@@ -359,7 +357,7 @@ RSpec.describe Members::DestroyService do
project_member = create(:project_member, :maintainer, user: member_user, project: project)
- described_class.new(user).execute(project_member, opts)
+ described_class.new(user).execute(project_member)
end
it 'removes project members invited by deleted user' do
diff --git a/spec/services/members/invitation_reminder_email_service_spec.rb b/spec/services/members/invitation_reminder_email_service_spec.rb
new file mode 100644
index 00000000000..88280869476
--- /dev/null
+++ b/spec/services/members/invitation_reminder_email_service_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::InvitationReminderEmailService do
+ describe 'sending invitation reminders' do
+ subject { described_class.new(invitation).execute }
+
+ let_it_be(:frozen_time) { Date.today.beginning_of_day }
+ let_it_be(:invitation) { build(:group_member, :invited, created_at: frozen_time) }
+
+ context 'when the experiment is disabled' do
+ before do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).and_return(false)
+ invitation.expires_at = frozen_time + 2.days
+ end
+
+ it 'does not send an invitation' do
+ travel_to(frozen_time + 1.day) do
+ expect(invitation).not_to receive(:send_invitation_reminder)
+
+ subject
+ end
+ end
+ end
+
+ context 'when the experiment is enabled' do
+ before do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).and_return(true)
+ invitation.expires_at = frozen_time + expires_at_days.days if expires_at_days
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:expires_at_days, :send_reminder_at_days) do
+ 0 | []
+ 1 | []
+ 2 | [1]
+ 3 | [1, 2]
+ 4 | [1, 2, 3]
+ 5 | [1, 2, 4]
+ 6 | [1, 3, 5]
+ 7 | [1, 3, 5]
+ 8 | [2, 3, 6]
+ 9 | [2, 4, 7]
+ 10 | [2, 4, 8]
+ 11 | [2, 4, 8]
+ 12 | [2, 5, 9]
+ 13 | [2, 5, 10]
+ 14 | [2, 5, 10]
+ 15 | [2, 5, 10]
+ nil | [2, 5, 10]
+ end
+
+ with_them do
+ # Create an invitation today with an expiration date from 0 to 10 days in the future or without an expiration date
+ # We chose 10 days here, because we fetch invitations that were created at most 10 days ago.
+ (0..10).each do |day|
+ it 'sends an invitation reminder only on the expected days' do
+ next if day > (expires_at_days || 10) # We don't need to test after the invitation has already expired
+
+ # We are traveling in a loop from today to 10 days from now
+ travel_to(frozen_time + day.days) do
+ # Given an expiration date and the number of days after the creation of the invitation based on the current day in the loop, a reminder may be sent
+ if (reminder_index = send_reminder_at_days.index(day))
+ expect(invitation).to receive(:send_invitation_reminder).with(reminder_index)
+ else
+ expect(invitation).not_to receive(:send_invitation_reminder)
+ end
+
+ subject
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/cleanup_refs_service_spec.rb b/spec/services/merge_requests/cleanup_refs_service_spec.rb
index b38ccee4aa0..a051b3c9355 100644
--- a/spec/services/merge_requests/cleanup_refs_service_spec.rb
+++ b/spec/services/merge_requests/cleanup_refs_service_spec.rb
@@ -35,6 +35,17 @@ RSpec.describe MergeRequests::CleanupRefsService do
end
end
+ context 'when merge request has no head ref' do
+ before do
+ # Simulate a merge request with no head ref
+ merge_request.project.repository.delete_refs(merge_request.ref_path)
+ end
+
+ it 'does not fail' do
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
context 'when merge request has merge ref' do
before do
MergeRequests::MergeToRefService
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index e7ac286f48b..67fb4eaade5 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -19,54 +19,45 @@ RSpec.describe MergeRequests::CloseService do
describe '#execute' do
it_behaves_like 'cache counters invalidator'
- [true, false].each do |state_tracking_enabled|
- context "valid params with state_tracking #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- let(:service) { described_class.new(project, user, {}) }
+ context 'valid params' do
+ let(:service) { described_class.new(project, user, {}) }
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
- allow(service).to receive(:execute_hooks)
+ before do
+ allow(service).to receive(:execute_hooks)
- perform_enqueued_jobs do
- @merge_request = service.execute(merge_request)
- end
+ perform_enqueued_jobs do
+ @merge_request = service.execute(merge_request)
end
+ end
- it { expect(@merge_request).to be_valid }
- it { expect(@merge_request).to be_closed }
+ it { expect(@merge_request).to be_valid }
+ it { expect(@merge_request).to be_closed }
- it 'executes hooks with close action' do
- expect(service).to have_received(:execute_hooks)
- .with(@merge_request, 'close')
- end
+ it 'executes hooks with close action' do
+ expect(service).to have_received(:execute_hooks)
+ .with(@merge_request, 'close')
+ end
- it 'sends email to user2 about assign of new merge_request', :sidekiq_might_not_need_inline do
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it 'sends email to user2 about assign of new merge_request', :sidekiq_might_not_need_inline do
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
- it 'creates system note about merge_request reassign' do
- if state_tracking_enabled
- event = @merge_request.resource_state_events.last
- expect(event.state).to eq('closed')
- else
- note = @merge_request.notes.last
- expect(note.note).to include 'closed'
- end
- end
+ it 'creates a resource event' do
+ event = @merge_request.resource_state_events.last
+ expect(event.state).to eq('closed')
+ end
- it 'marks todos as done' do
- expect(todo.reload).to be_done
- end
+ it 'marks todos as done' do
+ expect(todo.reload).to be_done
+ end
- context 'when auto merge is enabled' do
- let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
+ context 'when auto merge is enabled' do
+ let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
- it 'cancels the auto merge' do
- expect(@merge_request).not_to be_auto_merge_enabled
- end
+ it 'cancels the auto merge' do
+ expect(@merge_request).not_to be_auto_merge_enabled
end
end
end
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index fa70ad8c559..86e49fe601c 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -154,7 +154,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
result = service.execute
- expect(result[:merge_request].label_ids).to eq(label_ids)
+ expect(result[:merge_request].label_ids).to match_array(label_ids)
end
it "inherits milestones" do
diff --git a/spec/services/merge_requests/export_csv_service_spec.rb b/spec/services/merge_requests/export_csv_service_spec.rb
new file mode 100644
index 00000000000..ecb17b3fe77
--- /dev/null
+++ b/spec/services/merge_requests/export_csv_service_spec.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ExportCsvService do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let(:csv) { CSV.parse(subject.csv_data, headers: true).first }
+
+ subject { described_class.new(MergeRequest.where(id: merge_request.id), merge_request.project) }
+
+ describe 'csv_data' do
+ it 'contains the correct information', :aggregate_failures do
+ expect(csv['MR IID']).to eq(merge_request.iid.to_s)
+ expect(csv['Title']).to eq(merge_request.title)
+ expect(csv['State']).to eq(merge_request.state)
+ expect(csv['Description']).to eq(merge_request.description)
+ expect(csv['Source Branch']).to eq(merge_request.source_branch)
+ expect(csv['Target Branch']).to eq(merge_request.target_branch)
+ expect(csv['Source Project ID']).to eq(merge_request.source_project_id.to_s)
+ expect(csv['Target Project ID']).to eq(merge_request.target_project_id.to_s)
+ expect(csv['Author']).to eq(merge_request.author.name)
+ expect(csv['Author Username']).to eq(merge_request.author.username)
+ end
+
+ describe 'assignees' do
+ context 'when assigned' do
+ let_it_be(:merge_request) { create(:merge_request, assignees: create_list(:user, 2)) }
+
+ it 'contains the names of assignees' do
+ expect(csv['Assignees']).to eq(merge_request.assignees.map(&:name).join(', '))
+ end
+
+ it 'contains the usernames of assignees' do
+ expect(csv['Assignee Usernames']).to eq(merge_request.assignees.map(&:username).join(', '))
+ end
+ end
+
+ context 'when not assigned' do
+ it 'returns empty strings' do
+ expect(csv['Assignees']).to eq('')
+ expect(csv['Assignee Usernames']).to eq('')
+ end
+ end
+ end
+
+ describe 'approvers' do
+ context 'when approved' do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let(:approvers) { create_list(:user, 2) }
+
+ before do
+ merge_request.approved_by_users = approvers
+ end
+
+ it 'contains the names of approvers separated by a comma' do
+ expect(csv['Approvers'].split(', ')).to contain_exactly(approvers[0].name, approvers[1].name)
+ end
+
+ it 'contains the usernames of approvers separated by a comma' do
+ expect(csv['Approver Usernames'].split(', ')).to contain_exactly(approvers[0].username, approvers[1].username)
+ end
+ end
+
+ context 'when not approved' do
+ it 'returns empty strings' do
+ expect(csv['Approvers']).to eq('')
+ expect(csv['Approver Usernames']).to eq('')
+ end
+ end
+ end
+
+ describe 'merged user' do
+ context 'MR is merged' do
+ let_it_be(:merge_request) { create(:merge_request, :merged, :with_merged_metrics) }
+
+ it 'is merged' do
+ expect(csv['State']).to eq('merged')
+ end
+
+ it 'has a merged user' do
+ expect(csv['Merged User']).to eq(merge_request.metrics.merged_by.name)
+ expect(csv['Merged Username']).to eq(merge_request.metrics.merged_by.username)
+ end
+ end
+
+ context 'MR is not merged' do
+ it 'returns empty strings' do
+ expect(csv['Merged User']).to eq('')
+ expect(csv['Merged Username']).to eq('')
+ end
+ end
+ end
+
+ describe 'milestone' do
+ context 'milestone is assigned' do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:milestone) { create(:milestone, :active, project: merge_request.project) }
+
+ before do
+ merge_request.update!(milestone_id: milestone.id)
+ end
+
+ it 'contains the milestone ID' do
+ expect(csv['Milestone ID']).to eq(merge_request.milestone.id.to_s)
+ end
+ end
+
+ context 'no milestone is assigned' do
+ it 'returns an empty string' do
+ expect(csv['Milestone ID']).to eq('')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index 5c44af87470..aec5a3b3fa3 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -22,74 +22,72 @@ RSpec.describe MergeRequests::FfMergeService do
end
describe '#execute' do
- [true, false].each do |state_tracking_enabled|
- context "valid params with state_tracking #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- let(:service) { described_class.new(project, user, valid_merge_params) }
-
- def execute_ff_merge
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ context 'valid params' do
+ let(:service) { described_class.new(project, user, valid_merge_params) }
+
+ def execute_ff_merge
+ perform_enqueued_jobs do
+ service.execute(merge_request)
end
+ end
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
+ before do
+ allow(service).to receive(:execute_hooks)
+ end
- allow(service).to receive(:execute_hooks)
- end
+ it "does not create merge commit" do
+ execute_ff_merge
- it "does not create merge commit" do
- execute_ff_merge
+ source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
+ target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
- source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
- target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
+ expect(source_branch_sha).to eq(target_branch_sha)
+ end
- expect(source_branch_sha).to eq(target_branch_sha)
- end
+ it 'keeps the merge request valid' do
+ expect { execute_ff_merge }
+ .not_to change { merge_request.valid? }
+ end
- it 'keeps the merge request valid' do
- expect { execute_ff_merge }
- .not_to change { merge_request.valid? }
- end
+ it 'updates the merge request to merged' do
+ expect { execute_ff_merge }
+ .to change { merge_request.merged? }
+ .from(false)
+ .to(true)
+ end
- it 'updates the merge request to merged' do
- expect { execute_ff_merge }
- .to change { merge_request.merged? }
- .from(false)
- .to(true)
- end
+ it 'sends email to user2 about merge of new merge_request' do
+ execute_ff_merge
- it 'sends email to user2 about merge of new merge_request' do
- execute_ff_merge
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it 'creates resource event about merge_request merge' do
+ execute_ff_merge
- it 'creates system note about merge_request merge' do
- execute_ff_merge
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
+ end
- if state_tracking_enabled
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- else
- note = merge_request.notes.last
- expect(note.note).to include 'merged'
- end
- end
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- it 'does not update squash_commit_sha if it is not a squash' do
- expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
- end
+ expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- it 'updates squash_commit_sha if it is a squash' do
- merge_request.update!(squash: true)
+ it 'updates squash_commit_sha if it is a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- expect { execute_ff_merge }
- .to change { merge_request.squash_commit_sha }
- .from(nil)
- end
+ merge_request.update!(squash: true)
+
+ expect { execute_ff_merge }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
+
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 8328f461029..d0e3102f157 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -20,12 +20,9 @@ RSpec.describe MergeRequests::MergeService do
end
context 'valid params' do
- let(:state_tracking) { true }
-
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking)
-
allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
perform_enqueued_jobs do
service.execute(merge_request)
@@ -47,20 +44,9 @@ RSpec.describe MergeRequests::MergeService do
end
context 'note creation' do
- context 'when resource state event tracking is disabled' do
- let(:state_tracking) { false }
-
- it 'creates system note about merge_request merge' do
- note = merge_request.notes.last
- expect(note.note).to include 'merged'
- end
- end
-
- context 'when resource state event tracking is enabled' do
- it 'creates resource state event about merge_request merge' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- end
+ it 'creates resource state event about merge_request merge' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
end
end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index b482e8d6724..14ef5b0b772 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -252,5 +252,16 @@ RSpec.describe MergeRequests::MergeToRefService do
end
end
end
+
+ context 'allow conflicts to be merged in diff' do
+ let(:params) { { allow_conflicts: true } }
+
+ it 'calls merge_to_ref with allow_conflicts param' do
+ expect(project.repository).to receive(:merge_to_ref)
+ .with(anything, anything, anything, anything, anything, anything, true)
+
+ service.execute(merge_request)
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index 543da46f883..725fc16fa7c 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -41,16 +41,6 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
subject
end
- context 'when merge_ref_head_comments is disabled' do
- it 'does not update diff discussion positions' do
- stub_feature_flags(merge_ref_head_comments: false)
-
- expect(Discussions::CaptureDiffNotePositionsService).not_to receive(:new)
-
- subject
- end
- end
-
it 'updates the merge ref' do
expect { subject }.to change(merge_request, :merge_ref_head).from(nil)
end
@@ -221,11 +211,18 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
target_branch: 'conflict-start')
end
- it_behaves_like 'unmergeable merge request'
+ it 'does not change the merge ref HEAD' do
+ expect(merge_request.merge_ref_head).to be_nil
- it 'returns ServiceResponse.error' do
+ subject
+
+ expect(merge_request.reload.merge_ref_head).not_to be_nil
+ end
+
+ it 'returns ServiceResponse.error and keeps merge status as cannot_be_merged' do
result = subject
+ expect(merge_request.merge_status).to eq('cannot_be_merged')
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
expect(result.message).to eq('Merge request is not mergeable')
@@ -383,5 +380,27 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
end
end
end
+
+ context 'merge with conflicts' do
+ it 'calls MergeToRefService with true allow_conflicts param' do
+ expect(MergeRequests::MergeToRefService).to receive(:new)
+ .with(project, merge_request.author, { allow_conflicts: true }).and_call_original
+
+ subject
+ end
+
+ context 'when display_merge_conflicts_in_diff is disabled' do
+ before do
+ stub_feature_flags(display_merge_conflicts_in_diff: false)
+ end
+
+ it 'calls MergeToRefService with false allow_conflicts param' do
+ expect(MergeRequests::MergeToRefService).to receive(:new)
+ .with(project, merge_request.author, { allow_conflicts: false }).and_call_original
+
+ subject
+ end
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index cace1e0bf09..d603cbb16aa 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -367,76 +367,58 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- [true, false].each do |state_tracking_enabled|
- context "push to origin repo target branch with state tracking #{state_tracking_enabled ? 'enabled' : 'disabled'}", :sidekiq_might_not_need_inline do
+ context 'push to origin repo target branch', :sidekiq_might_not_need_inline do
+ context 'when all MRs to the target branch had diffs' do
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ reload_mrs
end
- context 'when all MRs to the target branch had diffs' do
- before do
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
- reload_mrs
- end
+ it 'updates the merge state' do
+ expect(@merge_request).to be_merged
+ expect(@fork_merge_request).to be_merged
+ expect(@build_failed_todo).to be_done
+ expect(@fork_build_failed_todo).to be_done
- it 'updates the merge state' do
- expect(@merge_request).to be_merged
- expect(@fork_merge_request).to be_merged
- expect(@build_failed_todo).to be_done
- expect(@fork_build_failed_todo).to be_done
-
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- expect(@fork_merge_request.notes.last.note).to include('merged')
- end
- end
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
+ expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
end
+ end
- context 'when an MR to be closed was empty already' do
- let!(:empty_fork_merge_request) do
- create(:merge_request,
- source_project: @fork_project,
- source_branch: 'master',
- target_branch: 'master',
- target_project: @project)
- end
+ context 'when an MR to be closed was empty already' do
+ let!(:empty_fork_merge_request) do
+ create(:merge_request,
+ source_project: @fork_project,
+ source_branch: 'master',
+ target_branch: 'master',
+ target_project: @project)
+ end
- before do
- # This spec already has a fake push, so pretend that we were targeting
- # feature all along.
- empty_fork_merge_request.update_columns(target_branch: 'feature')
+ before do
+ # This spec already has a fake push, so pretend that we were targeting
+ # feature all along.
+ empty_fork_merge_request.update_columns(target_branch: 'feature')
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
- reload_mrs
- empty_fork_merge_request.reload
- end
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ reload_mrs
+ empty_fork_merge_request.reload
+ end
- it 'only updates the non-empty MRs' do
- expect(@merge_request).to be_merged
- expect(@fork_merge_request).to be_merged
-
- expect(empty_fork_merge_request).to be_open
- expect(empty_fork_merge_request.merge_request_diff.state).to eq('empty')
- expect(empty_fork_merge_request.notes).to be_empty
-
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- expect(@fork_merge_request.notes.last.note).to include('merged')
- end
- end
+ it 'only updates the non-empty MRs' do
+ expect(@merge_request).to be_merged
+ expect(@fork_merge_request).to be_merged
+
+ expect(empty_fork_merge_request).to be_open
+ expect(empty_fork_merge_request.merge_request_diff.state).to eq('empty')
+ expect(empty_fork_merge_request.notes).to be_empty
+
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
+ expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
end
end
- context "manual merge of source branch #{state_tracking_enabled ? 'enabled' : 'disabled'}", :sidekiq_might_not_need_inline do
+ context 'manual merge of source branch', :sidekiq_might_not_need_inline do
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
# Merge master -> feature branch
@project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
commit = @project.repository.commit('feature')
@@ -445,13 +427,8 @@ RSpec.describe MergeRequests::RefreshService do
end
it 'updates the merge state' do
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- expect(@fork_merge_request.notes.last.note).to include('merged')
- end
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
+ expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
expect(@merge_request).to be_merged
expect(@merge_request.diffs.size).to be > 0
@@ -616,29 +593,21 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- [true, false].each do |state_tracking_enabled|
- context "push to origin repo target branch after fork project was removed #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
- @fork_project.destroy!
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
- reload_mrs
- end
+ context 'push to origin repo target branch after fork project was removed' do
+ before do
+ @fork_project.destroy!
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ reload_mrs
+ end
- it 'updates the merge request state' do
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- end
+ it 'updates the merge request state' do
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@merge_request).to be_merged
- expect(@fork_merge_request).to be_open
- expect(@fork_merge_request.notes).to be_empty
- expect(@build_failed_todo).to be_done
- expect(@fork_build_failed_todo).to be_done
- end
+ expect(@merge_request).to be_merged
+ expect(@fork_merge_request).to be_open
+ expect(@fork_merge_request.notes).to be_empty
+ expect(@build_failed_todo).to be_done
+ expect(@fork_build_failed_todo).to be_done
end
end
@@ -827,10 +796,6 @@ RSpec.describe MergeRequests::RefreshService do
subject { service.execute(oldrev, newrev, 'refs/heads/merge-commit-analyze-before') }
context 'feature enabled' do
- before do
- stub_feature_flags(branch_push_merge_commit_analyze: true)
- end
-
it "updates merge requests' merge_commits" do
expect(Gitlab::BranchPushMergeCommitAnalyzer).to receive(:new).and_wrap_original do |original_method, commits|
expect(commits.map(&:id)).to eq(%w{646ece5cfed840eca0a4feb21bcd6a81bb19bda3 29284d9bcc350bcae005872d0be6edd016e2efb5 5f82584f0a907f3b30cfce5bb8df371454a90051 8a994512e8c8f0dfcf22bb16df6e876be7a61036 689600b91aabec706e657e38ea706ece1ee8268f db46a1c5a5e474aa169b6cdb7a522d891bc4c5f9})
@@ -847,24 +812,6 @@ RSpec.describe MergeRequests::RefreshService do
expect(merge_request_side_branch.merge_commit.id).to eq('29284d9bcc350bcae005872d0be6edd016e2efb5')
end
end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(branch_push_merge_commit_analyze: false)
- end
-
- it "does not trigger analysis" do
- expect(Gitlab::BranchPushMergeCommitAnalyzer).not_to receive(:new)
-
- subject
-
- merge_request.reload
- merge_request_side_branch.reload
-
- expect(merge_request.merge_commit).to eq(nil)
- expect(merge_request_side_branch.merge_commit).to eq(nil)
- end
- end
end
describe '#abort_ff_merge_requests_with_when_pipeline_succeeds' do
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index 0066834180e..ffc2ebb344c 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -20,11 +20,8 @@ RSpec.describe MergeRequests::ReopenService do
context 'valid params' do
let(:service) { described_class.new(project, user, {}) }
- let(:state_tracking) { true }
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking)
-
allow(service).to receive(:execute_hooks)
perform_enqueued_jobs do
@@ -47,20 +44,9 @@ RSpec.describe MergeRequests::ReopenService do
end
context 'note creation' do
- context 'when state event tracking is disabled' do
- let(:state_tracking) { false }
-
- it 'creates system note about merge_request reopen' do
- note = merge_request.notes.last
- expect(note.note).to include 'reopened'
- end
- end
-
- context 'when state event tracking is enabled' do
- it 'creates resource state event about merge_request reopen' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('reopened')
- end
+ it 'creates resource state event about merge_request reopen' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('reopened')
end
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 3c3e10495d3..ed8872b71f7 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
title: 'New title',
description: 'Also please fix',
assignee_ids: [user.id],
- reviewer_ids: [user.id],
+ reviewer_ids: [],
state_event: 'close',
label_ids: [label.id],
target_branch: 'target',
@@ -78,7 +78,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(@merge_request).to be_valid
expect(@merge_request.title).to eq('New title')
expect(@merge_request.assignees).to match_array([user])
- expect(@merge_request.reviewers).to match_array([user])
+ expect(@merge_request.reviewers).to match_array([])
expect(@merge_request).to be_closed
expect(@merge_request.labels.count).to eq(1)
expect(@merge_request.labels.first.title).to eq(label.name)
@@ -116,6 +116,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
labels: [],
mentioned_users: [user2],
assignees: [user3],
+ reviewers: [],
milestone: nil,
total_time_spent: 0,
description: "FYI #{user2.to_reference}"
@@ -138,6 +139,35 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(note.note).to include "assigned to #{user.to_reference} and unassigned #{user3.to_reference}"
end
+ context 'with reviewers' do
+ let(:opts) { { reviewer_ids: [user2.id] } }
+
+ context 'when merge_request_reviewers feature is disabled' do
+ before(:context) do
+ stub_feature_flags(merge_request_reviewers: false)
+ end
+
+ it 'does not create a system note about merge_request review request' do
+ note = find_note('review requested from')
+
+ expect(note).to be_nil
+ end
+ end
+
+ context 'when merge_request_reviewers feature is enabled' do
+ before(:context) do
+ stub_feature_flags(merge_request_reviewers: true)
+ end
+
+ it 'creates system note about merge_request review request' do
+ note = find_note('requested review from')
+
+ expect(note).not_to be_nil
+ expect(note.note).to include "requested review from #{user2.to_reference}"
+ end
+ end
+ end
+
it 'creates a resource label event' do
event = merge_request.resource_label_events.last
@@ -467,15 +497,15 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'when reviewers gets changed' do
- before do
+ it 'marks pending todo as done' do
update_merge_request({ reviewer_ids: [user2.id] })
- end
- it 'marks pending todo as done' do
expect(pending_todo.reload).to be_done
end
it 'creates a pending todo for new review request' do
+ update_merge_request({ reviewer_ids: [user2.id] })
+
attributes = {
project: project,
author: user,
@@ -488,6 +518,17 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(Todo.where(attributes).count).to eq 1
end
+
+ it 'sends email reviewer change notifications to old and new reviewers', :sidekiq_might_not_need_inline do
+ merge_request.reviewers = [user2]
+
+ perform_enqueued_jobs do
+ update_merge_request({ reviewer_ids: [user3.id] })
+ end
+
+ should_email(user2)
+ should_email(user3)
+ end
end
context 'when the milestone is removed' do
@@ -542,7 +583,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
context 'when the labels change' do
before do
- Timecop.freeze(1.minute.from_now) do
+ travel_to(1.minute.from_now) do
update_merge_request({ label_ids: [label.id] })
end
end
diff --git a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
index aea9c25d104..5dc30c156ac 100644
--- a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
@@ -67,6 +67,23 @@ RSpec.describe Metrics::Dashboard::CustomDashboardService, :use_clean_rails_memo
.at_least(:once)
end
+ context 'with metric in database' do
+ let!(:prometheus_metric) do
+ create(:prometheus_metric, project: project, identifier: 'metric_a1', group: 'custom')
+ end
+
+ it 'includes metric_id' do
+ dashboard = described_class.new(*service_params).get_dashboard
+
+ metric_id = dashboard[:dashboard][:panel_groups].find { |panel_group| panel_group[:group] == 'Group A' }
+ .fetch(:panels).find { |panel| panel[:title] == 'Super Chart A1' }
+ .fetch(:metrics).find { |metric| metric[:id] == 'metric_a1' }
+ .fetch(:metric_id)
+
+ expect(metric_id).to eq(prometheus_metric.id)
+ end
+ end
+
context 'and the dashboard is then deleted' do
it 'does not return the previously cached dashboard' do
described_class.new(*service_params).get_dashboard
diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb
index 66c5c504c64..dd68471d927 100644
--- a/spec/services/milestones/destroy_service_spec.rb
+++ b/spec/services/milestones/destroy_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Milestones::DestroyService do
let(:group_milestone) { create(:milestone, group: group) }
before do
- project.update(namespace: group)
+ project.update!(namespace: group)
group.add_developer(user)
end
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index f0a34241c74..8f4201d8d94 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Milestones::PromoteService do
end
it 'raises error if project does not belong to a group' do
- project.update(namespace: user.namespace)
+ project.update!(namespace: user.namespace)
expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
end
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index 4a626fe688a..6f4f55b2bd0 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Milestones::TransferService do
new_group.add_maintainer(user)
project.add_maintainer(user)
# simulate project transfer
- project.update(group: new_group)
+ project.update!(group: new_group)
end
context 'without existing milestone at the new group level' do
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
new file mode 100644
index 00000000000..b588bf2034d
--- /dev/null
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespaceSettings::UpdateService do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:settings) { {} }
+
+ subject(:service) { described_class.new(user, group, settings) }
+
+ describe "#execute" do
+ context "group has no namespace_settings" do
+ before do
+ group.namespace_settings.destroy!
+ end
+
+ it "builds out a new namespace_settings record" do
+ expect do
+ service.execute
+ end.to change { NamespaceSetting.count }.by(1)
+ end
+ end
+
+ context "group has a namespace_settings" do
+ before do
+ service.execute
+ end
+
+ it "doesn't create a new namespace_setting record" do
+ expect do
+ service.execute
+ end.not_to change { NamespaceSetting.count }
+ end
+ end
+
+ context "updating :default_branch_name" do
+ let(:example_branch_name) { "example_branch_name" }
+ let(:settings) { { default_branch_name: example_branch_name } }
+
+ it "changes settings" do
+ expect { service.execute }
+ .to change { group.namespace_settings.default_branch_name }
+ .from(nil).to(example_branch_name)
+ end
+ end
+ end
+end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 7c0d4b756bd..1e5536a2d0b 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -163,14 +163,6 @@ RSpec.describe Notes::CreateService do
expect(note.note_diff_file).to be_present
expect(note.diff_note_positions).to be_present
end
-
- it 'does not create diff positions merge_ref_head_comments is disabled' do
- stub_feature_flags(merge_ref_head_comments: false)
-
- expect(Discussions::CaptureDiffNotePositionService).not_to receive(:new)
-
- described_class.new(project_with_repo, user, new_opts).execute
- end
end
context 'when DiffNote is a reply' do
@@ -437,7 +429,7 @@ RSpec.describe Notes::CreateService do
expect do
existing_note
- Timecop.freeze(Time.current + 1.minute) { subject }
+ travel_to(Time.current + 1.minute) { subject }
existing_note.reload
end.to change { existing_note.type }.from(nil).to('DiscussionNote')
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 47b8ba0cd72..66efdf8abe7 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Notes::UpdateService do
end
it 'does not update the note when params is blank' do
- Timecop.freeze(1.day.from_now) do
+ travel_to(1.day.from_now) do
expect { update_note({}) }.not_to change { note.reload.updated_at }
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 03e24524f9f..caa9961424e 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -150,6 +150,16 @@ RSpec.describe NotificationService, :mailer do
end
end
+ shared_examples 'participating by reviewer notification' do
+ it 'emails the participant' do
+ issuable.reviewers << participant
+
+ notification_trigger
+
+ should_email(participant)
+ end
+ end
+
shared_examples_for 'participating notifications' do
it_behaves_like 'participating by note notification'
it_behaves_like 'participating by author notification'
@@ -1778,6 +1788,60 @@ RSpec.describe NotificationService, :mailer do
end
end
+ describe '#changed_reviewer_of_merge_request' do
+ let(:merge_request) { create(:merge_request, author: author, source_project: project, reviewers: [reviewer], description: 'cc @participant') }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:reviewer) { create(:user) }
+
+ before do
+ update_custom_notification(:change_reviewer_merge_request, @u_guest_custom, resource: project)
+ update_custom_notification(:change_reviewer_merge_request, @u_custom_global)
+ end
+
+ it 'sends emails to relevant users only', :aggregate_failures do
+ notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer])
+
+ merge_request.reviewers.each { |reviewer| should_email(reviewer) }
+ should_email(merge_request.author)
+ should_email(@u_watcher)
+ should_email(@u_participant_mentioned)
+ should_email(@subscriber)
+ should_email(@watcher_and_subscriber)
+ should_email(@u_guest_watcher)
+ should_email(@u_guest_custom)
+ should_email(@u_custom_global)
+ should_not_email(@unsubscriber)
+ should_not_email(@u_participating)
+ should_not_email(@u_disabled)
+ should_not_email(@u_lazy_participant)
+ end
+
+ it 'adds "review requested" reason for new reviewer' do
+ notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer])
+
+ merge_request.reviewers.each do |assignee|
+ email = find_email_for(assignee)
+
+ expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::REVIEW_REQUESTED)
+ end
+ end
+
+ context 'participating notifications with reviewers' do
+ let(:participant) { create(:user, username: 'user-participant') }
+ let(:issuable) { merge_request }
+ let(:notification_trigger) { notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer]) }
+
+ it_behaves_like 'participating notifications'
+ it_behaves_like 'participating by reviewer notification'
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { merge_request }
+ let(:notification_trigger) { notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer]) }
+ end
+ end
+
describe '#push_to_merge_request' do
before do
update_custom_notification(:push_to_merge_request, @u_guest_custom, resource: project)
@@ -2229,6 +2293,25 @@ RSpec.describe NotificationService, :mailer do
end
end
+ describe '#invite_member_reminder' do
+ let_it_be(:group_member) { create(:group_member) }
+
+ subject { notification.invite_member_reminder(group_member, 'token', 0) }
+
+ it 'calls the Notify.invite_member_reminder method with the right params' do
+ expect(Notify).to receive(:member_invited_reminder_email).with('Group', group_member.id, 'token', 0).at_least(:once).and_call_original
+
+ subject
+ end
+
+ it 'sends exactly one email' do
+ subject
+
+ expect_delivery_jobs_count(1)
+ expect_enqueud_email('Group', group_member.id, 'token', 0, mail: 'member_invited_reminder_email')
+ end
+ end
+
describe 'GroupMember', :deliver_mails_inline do
let(:added_user) { create(:user) }
@@ -3018,32 +3101,25 @@ RSpec.describe NotificationService, :mailer do
describe '#prometheus_alerts_fired' do
let!(:project) { create(:project) }
- let!(:prometheus_alert) { create(:prometheus_alert, project: project) }
let!(:master) { create(:user) }
let!(:developer) { create(:user) }
+ let(:alert_attributes) { build(:alert_management_alert, project: project).attributes }
before do
project.add_maintainer(master)
end
it 'sends the email to owners and masters' do
- expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, master.id, prometheus_alert).and_call_original
- expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, project.owner.id, prometheus_alert).and_call_original
- expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project.id, developer.id, prometheus_alert)
+ expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, master.id, alert_attributes).and_call_original
+ expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, project.owner.id, alert_attributes).and_call_original
+ expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project.id, developer.id, alert_attributes)
- subject.prometheus_alerts_fired(prometheus_alert.project, [prometheus_alert])
+ subject.prometheus_alerts_fired(project, [alert_attributes])
end
it_behaves_like 'project emails are disabled' do
- before do
- allow_next_instance_of(::Gitlab::Alerting::Alert) do |instance|
- allow(instance).to receive(:valid?).and_return(true)
- end
- end
-
- let(:alert_params) { { 'labels' => { 'gitlab_alert_id' => 'unknown' } } }
- let(:notification_target) { prometheus_alert.project }
- let(:notification_trigger) { subject.prometheus_alerts_fired(prometheus_alert.project, [alert_params]) }
+ let(:notification_target) { project }
+ let(:notification_trigger) { subject.prometheus_alerts_fired(project, [alert_attributes]) }
around do |example|
perform_enqueued_jobs { example.run }
diff --git a/spec/services/packages/composer/composer_json_service_spec.rb b/spec/services/packages/composer/composer_json_service_spec.rb
index 3996fcea679..378016a6ffb 100644
--- a/spec/services/packages/composer/composer_json_service_spec.rb
+++ b/spec/services/packages/composer/composer_json_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Packages::Composer::ComposerJsonService do
let(:json) { '{ name": "package-name"}' }
it 'raises an error' do
- expect { subject }.to raise_error(/Invalid/)
+ expect { subject }.to raise_error(described_class::InvalidJson, /Invalid/)
end
end
end
@@ -32,7 +32,7 @@ RSpec.describe Packages::Composer::ComposerJsonService do
let(:project) { create(:project, :repository) }
it 'raises an error' do
- expect { subject }.to raise_error(/not found/)
+ expect { subject }.to raise_error(described_class::InvalidJson, /not found/)
end
end
end
diff --git a/spec/services/packages/create_event_service_spec.rb b/spec/services/packages/create_event_service_spec.rb
new file mode 100644
index 00000000000..7e66b430a8c
--- /dev/null
+++ b/spec/services/packages/create_event_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::CreateEventService do
+ let(:scope) { 'container' }
+ let(:event_name) { 'push_package' }
+
+ let(:params) do
+ {
+ scope: scope,
+ event_name: event_name
+ }
+ end
+
+ subject { described_class.new(nil, user, params).execute }
+
+ describe '#execute' do
+ shared_examples 'package event creation' do |originator_type, expected_scope|
+ it 'creates the event' do
+ expect { subject }.to change { Packages::Event.count }.by(1)
+
+ expect(subject.originator_type).to eq(originator_type)
+ expect(subject.originator).to eq(user&.id)
+ expect(subject.event_scope).to eq(expected_scope)
+ expect(subject.event_type).to eq(event_name)
+ end
+ end
+
+ context 'with a user' do
+ let(:user) { create(:user) }
+
+ it_behaves_like 'package event creation', 'user', 'container'
+ end
+
+ context 'with a deploy token' do
+ let(:user) { create(:deploy_token) }
+
+ it_behaves_like 'package event creation', 'deploy_token', 'container'
+ end
+
+ context 'with no user' do
+ let(:user) { nil }
+
+ it_behaves_like 'package event creation', 'guest', 'container'
+ end
+
+ context 'with a package as scope' do
+ let(:user) { nil }
+ let(:scope) { create(:npm_package) }
+
+ it_behaves_like 'package event creation', 'guest', 'npm'
+ end
+ end
+end
diff --git a/spec/services/packages/generic/create_package_file_service_spec.rb b/spec/services/packages/generic/create_package_file_service_spec.rb
new file mode 100644
index 00000000000..0ae109ef996
--- /dev/null
+++ b/spec/services/packages/generic/create_package_file_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Generic::CreatePackageFileService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ let(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ let(:temp_file) { Tempfile.new("test") }
+ let(:file) { UploadedFile.new(temp_file.path, sha256: sha256) }
+ let(:package) { create(:generic_package, project: project) }
+ let(:params) do
+ {
+ package_name: 'mypackage',
+ package_version: '0.0.1',
+ file: file,
+ file_name: 'myfile.tar.gz.1'
+ }
+ end
+
+ before do
+ FileUtils.touch(temp_file)
+ end
+
+ after do
+ FileUtils.rm_f(temp_file)
+ end
+
+ it 'creates package file' do
+ package_service = double
+ package_params = {
+ name: params[:package_name],
+ version: params[:package_version],
+ build: params[:build]
+ }
+ expect(::Packages::Generic::FindOrCreatePackageService).to receive(:new).with(project, user, package_params).and_return(package_service)
+ expect(package_service).to receive(:execute).and_return(package)
+
+ service = described_class.new(project, user, params)
+
+ expect { service.execute }.to change { package.package_files.count }.by(1)
+
+ package_file = package.package_files.last
+ aggregate_failures do
+ expect(package_file.package).to eq(package)
+ expect(package_file.file_name).to eq('myfile.tar.gz.1')
+ expect(package_file.size).to eq(file.size)
+ expect(package_file.file_sha256).to eq(sha256)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/generic/find_or_create_package_service_spec.rb b/spec/services/packages/generic/find_or_create_package_service_spec.rb
new file mode 100644
index 00000000000..5a9b8b03279
--- /dev/null
+++ b/spec/services/packages/generic/find_or_create_package_service_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Generic::FindOrCreatePackageService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:ci_build) { create(:ci_build, :running, user: user) }
+
+ let(:params) do
+ {
+ name: 'mypackage',
+ version: '0.0.1'
+ }
+ end
+
+ describe '#execute' do
+ context 'when packages does not exist yet' do
+ it 'creates package' do
+ service = described_class.new(project, user, params)
+
+ expect { service.execute }.to change { project.packages.generic.count }.by(1)
+
+ package = project.packages.generic.last
+
+ aggregate_failures do
+ expect(package.creator).to eq(user)
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info).to be_nil
+ end
+ end
+
+ it 'creates package and package build info when build is provided' do
+ service = described_class.new(project, user, params.merge(build: ci_build))
+
+ expect { service.execute }.to change { project.packages.generic.count }.by(1)
+
+ package = project.packages.generic.last
+
+ aggregate_failures do
+ expect(package.creator).to eq(user)
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info.pipeline).to eq(ci_build.pipeline)
+ end
+ end
+ end
+
+ context 'when packages already exists' do
+ let!(:package) { project.packages.generic.create!(params) }
+
+ context 'when package was created manually' do
+ it 'finds the package and does not create package build info even if build is provided' do
+ service = described_class.new(project, user, params.merge(build: ci_build))
+
+ expect do
+ found_package = service.execute
+
+ expect(found_package).to eq(package)
+ end.not_to change { project.packages.generic.count }
+
+ expect(package.reload.build_info).to be_nil
+ end
+ end
+
+ context 'when package was created by pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ package.create_build_info!(pipeline: pipeline)
+ end
+
+ it 'finds the package and does not change package build info even if build is provided' do
+ service = described_class.new(project, user, params.merge(build: ci_build))
+
+ expect do
+ found_package = service.execute
+
+ expect(found_package).to eq(package)
+ end.not_to change { project.packages.generic.count }
+
+ expect(package.reload.build_info.pipeline).to eq(pipeline)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index f03e1ed0e22..a8db87e48d0 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -243,7 +243,7 @@ RSpec.describe Projects::AfterRenameService do
def service_execute
# AfterRenameService is called by UpdateService after a successful model.update
# the initialization will include before and after paths values
- project.update(path: path_after_rename)
+ project.update!(path: path_after_rename)
described_class.new(project, path_before: path_before_rename, full_path_before: full_path_before_rename).execute
end
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index 77a0e330109..809b12910a1 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Projects::Alerting::NotifyService do
title: payload_raw.fetch(:title),
started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
severity: payload_raw.fetch(:severity),
- status: AlertManagement::Alert::STATUSES[:triggered],
+ status: AlertManagement::Alert.status_value(:triggered),
events: 1,
hosts: payload_raw.fetch(:hosts),
payload: payload_raw.with_indifferent_access,
@@ -89,6 +89,7 @@ RSpec.describe Projects::Alerting::NotifyService do
it 'creates a system note corresponding to alert creation' do
expect { subject }.to change(Note, :count).by(1)
+ expect(Note.last.note).to include(payload_raw.fetch(:monitoring_tool))
end
context 'existing alert with same fingerprint' do
@@ -127,23 +128,8 @@ RSpec.describe Projects::Alerting::NotifyService do
let(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint_sha) }
let(:issue) { alert.issue }
- context 'state_tracking is enabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: true)
- end
-
- it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
- it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
- end
-
- context 'state_tracking is disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
-
- it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
- it { expect { subject }.to change(Note, :count).by(1) }
- end
+ it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
+ it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
end
end
end
@@ -194,7 +180,7 @@ RSpec.describe Projects::Alerting::NotifyService do
title: payload_raw.fetch(:title),
started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
severity: 'critical',
- status: AlertManagement::Alert::STATUSES[:triggered],
+ status: AlertManagement::Alert.status_value(:triggered),
events: 1,
hosts: [],
payload: payload_raw.with_indifferent_access,
@@ -208,15 +194,19 @@ RSpec.describe Projects::Alerting::NotifyService do
environment_id: nil
)
end
+
+ it 'creates a system note corresponding to alert creation' do
+ expect { subject }.to change(Note, :count).by(1)
+ expect(Note.last.note).to include('Generic Alert Endpoint')
+ end
end
end
context 'with overlong payload' do
- let(:payload_raw) do
- {
- title: 'a' * Gitlab::Utils::DeepSize::DEFAULT_MAX_SIZE,
- start_time: starts_at.rfc3339
- }
+ let(:deep_size_object) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
+
+ before do
+ allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
end
it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
@@ -230,17 +220,6 @@ RSpec.describe Projects::Alerting::NotifyService do
it_behaves_like 'processes incident issues'
- context 'with an invalid payload' do
- before do
- allow(Gitlab::Alerting::NotificationPayloadParser)
- .to receive(:call)
- .and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
- end
-
- it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
- it_behaves_like 'does not an create alert management alert'
- end
-
context 'when alert already exists' do
let(:fingerprint_sha) { Digest::SHA1.hexdigest(fingerprint) }
let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint_sha) }
diff --git a/spec/services/projects/autocomplete_service_spec.rb b/spec/services/projects/autocomplete_service_spec.rb
index 336aa37096a..aff1aa41091 100644
--- a/spec/services/projects/autocomplete_service_spec.rb
+++ b/spec/services/projects/autocomplete_service_spec.rb
@@ -123,7 +123,7 @@ RSpec.describe Projects::AutocompleteService do
let!(:subgroup_milestone) { create(:milestone, group: subgroup) }
before do
- project.update(namespace: subgroup)
+ project.update!(namespace: subgroup)
end
it 'includes project milestones and all acestors milestones' do
@@ -138,7 +138,7 @@ RSpec.describe Projects::AutocompleteService do
def expect_labels_to_equal(labels, expected_labels)
expect(labels.size).to eq(expected_labels.size)
extract_title = lambda { |label| label['title'] }
- expect(labels.map(&extract_title)).to eq(expected_labels.map(&extract_title))
+ expect(labels.map(&extract_title)).to match_array(expected_labels.map(&extract_title))
end
let(:user) { create(:user) }
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 2f2474f2681..8ddcb8ce660 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
before do
project.add_maintainer(user)
- stub_feature_flags(container_registry_cleanup: true)
-
stub_container_registry_config(enabled: true)
stub_container_registry_tags(
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index 54375193067..c3ae26b1f05 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -87,59 +87,35 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
RSpec.shared_examples 'supporting fast delete' do
context 'when the registry supports fast delete' do
- context 'and the feature is enabled' do
- before do
- allow(repository.client).to receive(:supports_tag_delete?).and_return(true)
- end
-
- it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::Gitlab::DeleteTagsService
-
- it_behaves_like 'handling invalid params'
+ before do
+ allow(repository.client).to receive(:supports_tag_delete?).and_return(true)
+ end
- context 'with the real service' do
- before do
- stub_delete_reference_requests(tags)
- expect_delete_tag_by_names(tags)
- end
+ it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::Gitlab::DeleteTagsService
- it { is_expected.to include(status: :success) }
+ it_behaves_like 'handling invalid params'
- it_behaves_like 'logging a success response'
+ context 'with the real service' do
+ before do
+ stub_delete_reference_requests(tags)
+ expect_delete_tag_by_names(tags)
end
- context 'with a timeout error' do
- before do
- expect_next_instance_of(::Projects::ContainerRepository::Gitlab::DeleteTagsService) do |delete_service|
- expect(delete_service).to receive(:delete_tags).and_raise(::Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError)
- end
- end
-
- it { is_expected.to include(status: :error, message: 'timeout while deleting tags') }
+ it { is_expected.to include(status: :success) }
- it_behaves_like 'logging an error response', message: 'timeout while deleting tags'
- end
+ it_behaves_like 'logging a success response'
end
- context 'and the feature is disabled' do
+ context 'with a timeout error' do
before do
- stub_feature_flags(container_registry_fast_tag_delete: false)
- end
-
- it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::ThirdParty::DeleteTagsService
-
- it_behaves_like 'handling invalid params'
-
- context 'with the real service' do
- before do
- stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
- tags.each { |tag| stub_put_manifest_request(tag) }
- expect_delete_tag_by_digest('sha256:dummy')
+ expect_next_instance_of(::Projects::ContainerRepository::Gitlab::DeleteTagsService) do |delete_service|
+ expect(delete_service).to receive(:delete_tags).and_raise(::Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError)
end
+ end
- it { is_expected.to include(status: :success) }
+ it { is_expected.to include(status: :error, message: 'timeout while deleting tags') }
- it_behaves_like 'logging a success response'
- end
+ it_behaves_like 'logging an error response', message: 'timeout while deleting tags'
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e1df8700795..d959cc87901 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -14,15 +14,30 @@ RSpec.describe Projects::CreateService, '#execute' do
}
end
- it 'creates labels on Project creation if there are templates' do
- Label.create(title: "bug", template: true)
- project = create_project(user, opts)
+ context 'with labels' do
+ subject(:project) { create_project(user, opts) }
+
+ before_all do
+ Label.create!(title: 'bug', template: true)
+ end
+
+ it 'creates labels on project creation' do
+ created_label = project.labels.last
- created_label = project.reload.labels.last
+ expect(created_label.type).to eq('ProjectLabel')
+ expect(created_label.project_id).to eq(project.id)
+ expect(created_label.title).to eq('bug')
+ end
+
+ context 'using gitlab project import' do
+ before do
+ opts[:import_type] = 'gitlab_project'
+ end
- expect(created_label.type).to eq('ProjectLabel')
- expect(created_label.project_id).to eq(project.id)
- expect(created_label.title).to eq('bug')
+ it 'does not creates labels on project creation' do
+ expect(project.labels.size).to eq(0)
+ end
+ end
end
context 'user namespace' do
@@ -59,10 +74,6 @@ RSpec.describe Projects::CreateService, '#execute' do
context "admin creates project with other user's namespace_id" do
it 'sets the correct permissions' do
admin = create(:admin)
- opts = {
- name: 'GitLab',
- namespace_id: user.namespace.id
- }
project = create_project(admin, opts)
expect(project).to be_persisted
@@ -487,18 +498,7 @@ RSpec.describe Projects::CreateService, '#execute' do
describe 'create service for the project' do
subject(:project) { create_project(user, opts) }
- context 'when there is an active instance-level and an active template integration' do
- let!(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
- let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
-
- it 'creates a service from the instance-level integration' do
- expect(project.services.count).to eq(1)
- expect(project.services.first.api_url).to eq(instance_integration.api_url)
- expect(project.services.first.inherit_from_id).to eq(instance_integration.id)
- end
- end
-
- context 'when there is an active service template' do
+ context 'with an active service template' do
let!(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
it 'creates a service from the template' do
@@ -506,6 +506,60 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.services.first.api_url).to eq(template_integration.api_url)
expect(project.services.first.inherit_from_id).to be_nil
end
+
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+
+ it 'creates a service from the instance-level integration' do
+ expect(project.services.count).to eq(1)
+ expect(project.services.first.api_url).to eq(instance_integration.api_url)
+ expect(project.services.first.inherit_from_id).to eq(instance_integration.id)
+ end
+
+ context 'with an active group-level integration' do
+ let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ let(:opts) do
+ {
+ name: 'GitLab',
+ namespace_id: group.id
+ }
+ end
+
+ it 'creates a service from the group-level integration' do
+ expect(project.services.count).to eq(1)
+ expect(project.services.first.api_url).to eq(group_integration.api_url)
+ expect(project.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+
+ context 'with an active subgroup' do
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup) do
+ create(:group, parent: group).tap do |subgroup|
+ subgroup.add_owner(user)
+ end
+ end
+
+ let(:opts) do
+ {
+ name: 'GitLab',
+ namespace_id: subgroup.id
+ }
+ end
+
+ it 'creates a service from the subgroup-level integration' do
+ expect(project.services.count).to eq(1)
+ expect(project.services.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+ end
+ end
+ end
end
context 'when there is an invalid integration' do
@@ -739,4 +793,100 @@ RSpec.describe Projects::CreateService, '#execute' do
def create_project(user, opts)
Projects::CreateService.new(user, opts).execute
end
+
+ context 'shared Runners config' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create :user }
+
+ context 'when parent group is present' do
+ let_it_be(:group) do
+ create(:group) do |group|
+ group.add_owner(user)
+ end
+ end
+
+ before do
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ end
+
+ user.refresh_authorized_projects # Ensure cache is warm
+ end
+
+ context 'default value based on parent group setting' do
+ where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
+ 'enabled' | nil | true
+ 'disabled_with_override' | nil | false
+ 'disabled_and_unoverridable' | nil | false
+ end
+
+ with_them do
+ it 'creates project following the parent config' do
+ params = opts.merge(namespace_id: group.id)
+ params = params.merge(shared_runners_enabled: desired_config_for_new_project) unless desired_config_for_new_project.nil?
+ project = create_project(user, params)
+
+ expect(project).to be_valid
+ expect(project.shared_runners_enabled).to eq(expected_result_for_project)
+ end
+ end
+ end
+
+ context 'parent group is present and allows desired config' do
+ where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
+ 'enabled' | true | true
+ 'enabled' | false | false
+ 'disabled_with_override' | false | false
+ 'disabled_with_override' | true | true
+ 'disabled_and_unoverridable' | false | false
+ end
+
+ with_them do
+ it 'creates project following the parent config' do
+ params = opts.merge(namespace_id: group.id, shared_runners_enabled: desired_config_for_new_project)
+ project = create_project(user, params)
+
+ expect(project).to be_valid
+ expect(project.shared_runners_enabled).to eq(expected_result_for_project)
+ end
+ end
+ end
+
+ context 'parent group is present and disallows desired config' do
+ where(:shared_runners_setting, :desired_config_for_new_project) do
+ 'disabled_and_unoverridable' | true
+ end
+
+ with_them do
+ it 'does not create project' do
+ params = opts.merge(namespace_id: group.id, shared_runners_enabled: desired_config_for_new_project)
+ project = create_project(user, params)
+
+ expect(project.persisted?).to eq(false)
+ expect(project).to be_invalid
+ expect(project.errors[:shared_runners_enabled]).to include('cannot be enabled because parent group does not allow it')
+ end
+ end
+ end
+ end
+
+ context 'parent group is not present' do
+ where(:desired_config, :expected_result) do
+ true | true
+ false | false
+ nil | true
+ end
+
+ with_them do
+ it 'follows desired config' do
+ opts[:shared_runners_enabled] = desired_config unless desired_config.nil?
+ project = create_project(user, opts)
+
+ expect(project).to be_valid
+ expect(project.shared_runners_enabled).to eq(expected_result)
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index a3711c9e17f..f0f09218b06 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
context 'when project has remote mirrors' do
let!(:project) do
create(:project, :repository, namespace: user.namespace).tap do |project|
- project.remote_mirrors.create(url: 'http://test.com')
+ project.remote_mirrors.create!(url: 'http://test.com')
end
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 166a2dae55b..555f2f5a5e5 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -179,7 +179,7 @@ RSpec.describe Projects::ForkService do
context "when origin has git depth specified" do
before do
- @from_project.update(ci_default_git_depth: 42)
+ @from_project.update!(ci_default_git_depth: 42)
end
it "inherits default_git_depth from the origin project" do
@@ -201,7 +201,7 @@ RSpec.describe Projects::ForkService do
context "when project has restricted visibility level" do
context "and only one visibility level is restricted" do
before do
- @from_project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ @from_project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
end
diff --git a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
index 969381b8748..86e3fb3820c 100644
--- a/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
+++ b/spec/services/projects/hashed_storage/base_attachment_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Projects::HashedStorage::BaseAttachmentService do
describe '#move_folder!' do
context 'when old_path is not a directory' do
it 'adds information to the logger and returns true' do
- Tempfile.create do |old_path|
+ Tempfile.create do |old_path| # rubocop:disable Rails/SaveBang
new_path = "#{old_path}-new"
expect(subject.send(:move_folder!, old_path, new_path)).to be_truthy
diff --git a/spec/services/projects/move_access_service_spec.rb b/spec/services/projects/move_access_service_spec.rb
index de3871414af..02f80988dd1 100644
--- a/spec/services/projects/move_access_service_spec.rb
+++ b/spec/services/projects/move_access_service_spec.rb
@@ -17,9 +17,9 @@ RSpec.describe Projects::MoveAccessService do
project_with_access.add_maintainer(maintainer_user)
project_with_access.add_developer(developer_user)
project_with_access.add_reporter(reporter_user)
- project_with_access.project_group_links.create(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
- project_with_access.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
- project_with_access.project_group_links.create(group: reporter_group, group_access: Gitlab::Access::REPORTER)
+ project_with_access.project_group_links.create!(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
+ project_with_access.project_group_links.create!(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
+ project_with_access.project_group_links.create!(group: reporter_group, group_access: Gitlab::Access::REPORTER)
end
subject { described_class.new(target_project, user) }
@@ -97,7 +97,7 @@ RSpec.describe Projects::MoveAccessService do
end
it 'does not remove remaining group links' do
- target_project.project_group_links.create(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
+ target_project.project_group_links.create!(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
subject.execute(project_with_access, options)
diff --git a/spec/services/projects/move_project_group_links_service_spec.rb b/spec/services/projects/move_project_group_links_service_spec.rb
index 196a8f2b339..6304eded8d3 100644
--- a/spec/services/projects/move_project_group_links_service_spec.rb
+++ b/spec/services/projects/move_project_group_links_service_spec.rb
@@ -14,9 +14,9 @@ RSpec.describe Projects::MoveProjectGroupLinksService do
describe '#execute' do
before do
- project_with_groups.project_group_links.create(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
- project_with_groups.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
- project_with_groups.project_group_links.create(group: reporter_group, group_access: Gitlab::Access::REPORTER)
+ project_with_groups.project_group_links.create!(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
+ project_with_groups.project_group_links.create!(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
+ project_with_groups.project_group_links.create!(group: reporter_group, group_access: Gitlab::Access::REPORTER)
end
it 'moves the group links from one project to another' do
@@ -30,8 +30,8 @@ RSpec.describe Projects::MoveProjectGroupLinksService do
end
it 'does not move existent group links in the current project' do
- target_project.project_group_links.create(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
- target_project.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
+ target_project.project_group_links.create!(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
+ target_project.project_group_links.create!(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
expect(project_with_groups.project_group_links.count).to eq 3
expect(target_project.project_group_links.count).to eq 2
@@ -55,8 +55,8 @@ RSpec.describe Projects::MoveProjectGroupLinksService do
let(:options) { { remove_remaining_elements: false } }
it 'does not remove remaining project group links' do
- target_project.project_group_links.create(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
- target_project.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
+ target_project.project_group_links.create!(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
+ target_project.project_group_links.create!(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
subject.execute(project_with_groups, options)
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 8a538bc67ed..018bfa8ef61 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::Operations::UpdateService do
+ let_it_be_with_refind(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let_it_be(:project, refind: true) { create(:project) }
let(:result) { subject.execute }
@@ -12,7 +12,7 @@ RSpec.describe Projects::Operations::UpdateService do
describe '#execute' do
context 'alerting setting' do
- before do
+ before_all do
project.add_maintainer(user)
end
@@ -430,5 +430,93 @@ RSpec.describe Projects::Operations::UpdateService do
end
end
end
+
+ context 'tracing setting' do
+ context 'with valid params' do
+ let(:params) do
+ {
+ tracing_setting_attributes: {
+ external_url: 'http://some-url.com'
+ }
+ }
+ end
+
+ context 'with an existing setting' do
+ before do
+ create(:project_tracing_setting, project: project)
+ end
+
+ shared_examples 'setting deletion' do
+ let!(:original_params) { params.deep_dup }
+
+ it 'deletes the setting' do
+ expect(result[:status]).to eq(:success)
+ expect(project.reload.tracing_setting).to be_nil
+ end
+
+ it 'does not modify original params' do
+ subject.execute
+
+ expect(params).to eq(original_params)
+ end
+ end
+
+ it 'updates the setting' do
+ expect(project.tracing_setting).not_to be_nil
+
+ expect(result[:status]).to eq(:success)
+ expect(project.reload.tracing_setting.external_url)
+ .to eq('http://some-url.com')
+ end
+
+ context 'with missing external_url' do
+ before do
+ params[:tracing_setting_attributes].delete(:external_url)
+ end
+
+ it_behaves_like 'setting deletion'
+ end
+
+ context 'with empty external_url' do
+ before do
+ params[:tracing_setting_attributes][:external_url] = ''
+ end
+
+ it_behaves_like 'setting deletion'
+ end
+
+ context 'with blank external_url' do
+ before do
+ params[:tracing_setting_attributes][:external_url] = ' '
+ end
+
+ it_behaves_like 'setting deletion'
+ end
+ end
+
+ context 'without an existing setting' do
+ it 'creates a setting' do
+ expect(project.tracing_setting).to be_nil
+
+ expect(result[:status]).to eq(:success)
+ expect(project.reload.tracing_setting.external_url)
+ .to eq('http://some-url.com')
+ end
+ end
+ end
+
+ context 'with empty params' do
+ let(:params) { {} }
+
+ let!(:tracing_setting) do
+ create(:project_tracing_setting, project: project)
+ end
+
+ it 'does nothing' do
+ expect(result[:status]).to eq(:success)
+ expect(project.reload.tracing_setting).to eq(tracing_setting)
+ end
+ end
+ end
end
end
diff --git a/spec/services/projects/overwrite_project_service_spec.rb b/spec/services/projects/overwrite_project_service_spec.rb
index a03746d0271..cc6a863a11d 100644
--- a/spec/services/projects/overwrite_project_service_spec.rb
+++ b/spec/services/projects/overwrite_project_service_spec.rb
@@ -111,9 +111,9 @@ RSpec.describe Projects::OverwriteProjectService do
create_list(:deploy_keys_project, 2, project: project_from)
create_list(:notification_setting, 2, source: project_from)
create_list(:users_star_project, 2, project: project_from)
- project_from.project_group_links.create(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
- project_from.project_group_links.create(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
- project_from.project_group_links.create(group: reporter_group, group_access: Gitlab::Access::REPORTER)
+ project_from.project_group_links.create!(group: maintainer_group, group_access: Gitlab::Access::MAINTAINER)
+ project_from.project_group_links.create!(group: developer_group, group_access: Gitlab::Access::DEVELOPER)
+ project_from.project_group_links.create!(group: reporter_group, group_access: Gitlab::Access::REPORTER)
project_from.add_maintainer(maintainer_user)
project_from.add_developer(developer_user)
project_from.add_reporter(reporter_user)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index a0e83fb4a21..3ae96d7a5ab 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -314,6 +314,37 @@ RSpec.describe Projects::TransferService do
end
end
+ context 'shared Runners group level configurations' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
+ true | 'disabled_and_unoverridable' | false
+ false | 'disabled_and_unoverridable' | false
+ true | 'disabled_with_override' | true
+ false | 'disabled_with_override' | false
+ true | 'enabled' | true
+ false | 'enabled' | false
+ end
+
+ with_them do
+ let(:project) { create(:project, :public, :repository, namespace: user.namespace, shared_runners_enabled: project_shared_runners_enabled) }
+ let(:group) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ expect_next_found_instance_of(Group) do |group|
+ expect(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ end
+
+ execute_transfer
+ end
+
+ it 'updates shared runners based on the parent group' do
+ expect(project.shared_runners_enabled).to eq(expected_shared_runners_enabled)
+ end
+ end
+ end
+
context 'missing group labels applied to issues or merge requests' do
it 'delegates transfer to Labels::TransferService' do
group.add_owner(user)
diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb
index 073e2e09397..2a8965e62ce 100644
--- a/spec/services/projects/unlink_fork_service_spec.rb
+++ b/spec/services/projects/unlink_fork_service_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
context 'when the original project was deleted' do
it 'does not fail when the original project is deleted' do
source = forked_project.forked_from_project
- source.destroy
+ source.destroy!
forked_project.reload
expect { subject.execute }.not_to raise_error
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index bfb3cbb0131..d3eb84a3137 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -16,8 +16,6 @@ RSpec.describe Projects::UpdatePagesService do
subject { described_class.new(project, build) }
before do
- stub_feature_flags(safezip_use_rubyzip: true)
-
project.remove_pages
end
@@ -59,6 +57,28 @@ RSpec.describe Projects::UpdatePagesService do
end
end
+ it 'creates pages_deployment and saves it in the metadata' do
+ expect do
+ expect(execute).to eq(:success)
+ end.to change { project.pages_deployments.count }.by(1)
+
+ deployment = project.pages_deployments.last
+
+ expect(deployment.size).to eq(file.size)
+ expect(deployment.file).to be
+ expect(project.pages_metadatum.reload.pages_deployment_id).to eq(deployment.id)
+ end
+
+ it 'does not create deployment when zip_pages_deployments feature flag is disabled' do
+ stub_feature_flags(zip_pages_deployments: false)
+
+ expect do
+ expect(execute).to eq(:success)
+ end.not_to change { project.pages_deployments.count }
+
+ expect(project.pages_metadatum.reload.pages_deployment_id).to be_nil
+ end
+
it 'limits pages size' do
stub_application_setting(max_pages_size: 1)
expect(execute).not_to eq(:success)
@@ -75,14 +95,14 @@ RSpec.describe Projects::UpdatePagesService do
expect(project.pages_deployed?).to be_truthy
expect(Dir.exist?(File.join(project.pages_path))).to be_truthy
- project.destroy
+ project.destroy!
expect(Dir.exist?(File.join(project.pages_path))).to be_falsey
expect(ProjectPagesMetadatum.find_by_project_id(project)).to be_nil
end
it 'fails if sha on branch is not latest' do
- build.update(ref: 'feature')
+ build.update!(ref: 'feature')
expect(execute).not_to eq(:success)
expect(project.pages_metadatum).not_to be_deployed
@@ -104,10 +124,6 @@ RSpec.describe Projects::UpdatePagesService do
let(:file) { fixture_file_upload("spec/fixtures/pages_non_writeable.zip") }
context 'when using RubyZip' do
- before do
- stub_feature_flags(safezip_use_rubyzip: true)
- end
-
it 'succeeds to extract' do
expect(execute).to eq(:success)
expect(project.pages_metadatum).to be_deployed
@@ -175,7 +191,7 @@ RSpec.describe Projects::UpdatePagesService do
it 'fails to remove project pages when no pages is deployed' do
expect(PagesWorker).not_to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
- project.destroy
+ project.destroy!
end
it 'fails if no artifacts' do
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index 1de04888e0a..30530da8013 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -68,25 +68,12 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
end
context "when given URLs containing escaped elements" do
- using RSpec::Parameterized::TableSyntax
+ it_behaves_like "URLs containing escaped elements return expected status" do
+ let(:result) { execute! }
- where(:url, :result_status) do
- "https://user:0a%23@test.example.com/project.git" | :success
- "https://git.example.com:1%2F%2F@source.developers.google.com/project.git" | :success
- CGI.escape("git://localhost:1234/some-path?some-query=some-val\#@example.com/") | :error
- CGI.escape(CGI.escape("https://user:0a%23@test.example.com/project.git")) | :error
- end
-
- with_them do
before do
allow(remote_mirror).to receive(:url).and_return(url)
end
-
- it "returns expected status" do
- result = execute!
-
- expect(result[:status]).to eq(result_status)
- end
end
end
@@ -136,54 +123,36 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
stub_lfs_setting(enabled: true)
end
- context 'feature flag enabled' do
- before do
- stub_feature_flags(push_mirror_syncs_lfs: true)
- end
-
- it 'pushes LFS objects to a HTTP repository' do
- expect_next_instance_of(Lfs::PushService) do |service|
- expect(service).to receive(:execute)
- end
-
- execute!
+ it 'pushes LFS objects to a HTTP repository' do
+ expect_next_instance_of(Lfs::PushService) do |service|
+ expect(service).to receive(:execute)
end
- it 'does nothing to an SSH repository' do
- remote_mirror.update!(url: 'ssh://example.com')
-
- expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
-
- execute!
- end
+ execute!
+ end
- it 'does nothing if LFS is disabled' do
- expect(project).to receive(:lfs_enabled?) { false }
+ it 'does nothing to an SSH repository' do
+ remote_mirror.update!(url: 'ssh://example.com')
- expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
+ expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
- execute!
- end
+ execute!
+ end
- it 'does nothing if non-password auth is specified' do
- remote_mirror.update!(auth_method: 'ssh_public_key')
+ it 'does nothing if LFS is disabled' do
+ expect(project).to receive(:lfs_enabled?) { false }
- expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
+ expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
- execute!
- end
+ execute!
end
- context 'feature flag disabled' do
- before do
- stub_feature_flags(push_mirror_syncs_lfs: false)
- end
+ it 'does nothing if non-password auth is specified' do
+ remote_mirror.update!(auth_method: 'ssh_public_key')
- it 'does nothing' do
- expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
+ expect_any_instance_of(Lfs::PushService).not_to receive(:execute)
- execute!
- end
+ execute!
end
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 7832d727220..989426fde8b 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -141,7 +141,7 @@ RSpec.describe Projects::UpdateService do
let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
before do
- project.update(namespace: group, visibility_level: group.visibility_level)
+ project.update!(namespace: group, visibility_level: group.visibility_level)
end
it 'does not update project visibility level' do
@@ -151,6 +151,32 @@ RSpec.describe Projects::UpdateService do
expect(project.reload).to be_internal
end
end
+
+ context 'when updating shared runners' do
+ context 'can enable shared runners' do
+ let(:group) { create(:group, shared_runners_enabled: true) }
+ let(:project) { create(:project, namespace: group, shared_runners_enabled: false) }
+
+ it 'enables shared runners' do
+ result = update_project(project, user, shared_runners_enabled: true)
+
+ expect(result).to eq({ status: :success })
+ expect(project.reload.shared_runners_enabled).to be_truthy
+ end
+ end
+
+ context 'cannot enable shared runners' do
+ let(:group) { create(:group, :shared_runners_disabled) }
+ let(:project) { create(:project, namespace: group, shared_runners_enabled: false) }
+
+ it 'does not enable shared runners' do
+ result = update_project(project, user, shared_runners_enabled: true)
+
+ expect(result).to eq({ status: :error, message: 'Shared runners enabled cannot be enabled because parent group does not allow it' })
+ expect(project.reload.shared_runners_enabled).to be_falsey
+ end
+ end
+ end
end
describe 'when updating project that has forks' do
@@ -230,7 +256,7 @@ RSpec.describe Projects::UpdateService do
end
it 'handles empty project feature attributes' do
- project.project_feature.update(wiki_access_level: ProjectFeature::DISABLED)
+ project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
result = update_project(project, user, { name: 'test1' })
@@ -241,7 +267,7 @@ RSpec.describe Projects::UpdateService do
context 'when enabling a wiki' do
it 'creates a wiki' do
- project.project_feature.update(wiki_access_level: ProjectFeature::DISABLED)
+ project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
TestEnv.rm_storage_dir(project.repository_storage, project.wiki.path)
result = update_project(project, user, project_feature_attributes: { wiki_access_level: ProjectFeature::ENABLED })
@@ -252,7 +278,7 @@ RSpec.describe Projects::UpdateService do
end
it 'logs an error and creates a metric when wiki can not be created' do
- project.project_feature.update(wiki_access_level: ProjectFeature::DISABLED)
+ project.project_feature.update!(wiki_access_level: ProjectFeature::DISABLED)
expect_any_instance_of(ProjectWiki).to receive(:wiki).and_raise(Wiki::CouldNotCreateWikiError)
expect_any_instance_of(described_class).to receive(:log_error).with("Could not create wiki for #{project.full_name}")
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index b970a48051f..6f3814095f9 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -3,23 +3,27 @@
require 'spec_helper'
RSpec.describe QuickActions::InterpretService do
- let(:project) { create(:project, :public) }
- let(:developer) { create(:user) }
- let(:developer2) { create(:user) }
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:repository_project) { create(:project, :repository) }
+ let_it_be(:project) { public_project }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:developer2) { create(:user) }
+ let_it_be_with_reload(:issue) { create(:issue, project: project) }
let(:milestone) { create(:milestone, project: project, title: '9.10') }
let(:commit) { create(:commit, project: project) }
- let(:inprogress) { create(:label, project: project, title: 'In Progress') }
- let(:helmchart) { create(:label, project: project, title: 'Helm Chart Registry') }
- let(:bug) { create(:label, project: project, title: 'Bug') }
- let(:note) { build(:note, commit_id: merge_request.diff_head_sha) }
+ let_it_be(:inprogress) { create(:label, project: project, title: 'In Progress') }
+ let_it_be(:helmchart) { create(:label, project: project, title: 'Helm Chart Registry') }
+ let_it_be(:bug) { create(:label, project: project, title: 'Bug') }
let(:service) { described_class.new(project, developer) }
+ before_all do
+ public_project.add_developer(developer)
+ repository_project.add_developer(developer)
+ end
+
before do
stub_licensed_features(multiple_issue_assignees: false,
multiple_merge_request_assignees: false)
-
- project.add_developer(developer)
end
describe '#execute' do
@@ -146,7 +150,6 @@ RSpec.describe QuickActions::InterpretService do
shared_examples 'multiword label name starting without ~' do
it 'fetches label ids and populates add_label_ids if content contains /label' do
- helmchart # populate the label
_, updates = service.execute(content, issuable)
expect(updates).to eq(add_label_ids: [helmchart.id])
@@ -155,7 +158,6 @@ RSpec.describe QuickActions::InterpretService do
shared_examples 'label name is included in the middle of another label name' do
it 'ignores the sublabel when the content contains the includer label name' do
- helmchart # populate the label
create(:label, project: project, title: 'Chart')
_, updates = service.execute(content, issuable)
@@ -226,7 +228,7 @@ RSpec.describe QuickActions::InterpretService do
it 'returns the todo message' do
_, _, message = service.execute(content, issuable)
- expect(message).to eq('Added a To Do.')
+ expect(message).to eq('Added a to do.')
end
end
@@ -242,7 +244,7 @@ RSpec.describe QuickActions::InterpretService do
TodoService.new.mark_todo(issuable, developer)
_, _, message = service.execute(content, issuable)
- expect(message).to eq('Marked To Do as done.')
+ expect(message).to eq('Marked to do as done.')
end
end
@@ -493,7 +495,7 @@ RSpec.describe QuickActions::InterpretService do
end
shared_examples 'merge immediately command' do
- let(:project) { create(:project, :repository) }
+ let(:project) { repository_project }
it 'runs merge command if content contains /merge' do
_, updates, _ = service.execute(content, issuable)
@@ -509,7 +511,7 @@ RSpec.describe QuickActions::InterpretService do
end
shared_examples 'merge automatically command' do
- let(:project) { create(:project, :repository) }
+ let(:project) { repository_project }
it 'runs merge command if content contains /merge and returns merge message' do
_, updates, message = service.execute(content, issuable)
@@ -600,7 +602,7 @@ RSpec.describe QuickActions::InterpretService do
context 'when issuable is already confidential' do
before do
- issuable.update(confidential: true)
+ issuable.update!(confidential: true)
end
it 'does not return the success message' do
@@ -722,7 +724,7 @@ RSpec.describe QuickActions::InterpretService do
end
context 'when sha is missing' do
- let(:project) { create(:project, :repository) }
+ let(:project) { repository_project }
let(:service) { described_class.new(project, developer, {}) }
it 'precheck passes and returns merge command' do
@@ -844,7 +846,7 @@ RSpec.describe QuickActions::InterpretService do
end
it 'returns the unassign message for all the assignee if content contains /unassign' do
- issue.update(assignee_ids: [developer.id, developer2.id])
+ issue.update!(assignee_ids: [developer.id, developer2.id])
_, _, message = service.execute(content, issue)
expect(message).to eq("Removed assignees #{developer.to_reference} and #{developer2.to_reference}.")
@@ -860,7 +862,7 @@ RSpec.describe QuickActions::InterpretService do
end
it 'returns the unassign message for all the assignee if content contains /unassign' do
- merge_request.update(assignee_ids: [developer.id, developer2.id])
+ merge_request.update!(assignee_ids: [developer.id, developer2.id])
_, _, message = service.execute(content, merge_request)
expect(message).to eq("Removed assignees #{developer.to_reference} and #{developer2.to_reference}.")
@@ -879,10 +881,14 @@ RSpec.describe QuickActions::InterpretService do
end
context 'only group milestones available' do
- let(:ancestor_group) { create(:group) }
- let(:group) { create(:group, parent: ancestor_group) }
- let(:project) { create(:project, :public, namespace: group) }
- let(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') }
+ let_it_be(:ancestor_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: ancestor_group) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') }
+
+ before_all do
+ project.add_developer(developer)
+ end
it_behaves_like 'milestone command' do
let(:content) { "/milestone %#{milestone.title}" }
@@ -1457,14 +1463,14 @@ RSpec.describe QuickActions::InterpretService do
end
context '/board_move command' do
- let(:todo) { create(:label, project: project, title: 'To Do') }
- let(:inreview) { create(:label, project: project, title: 'In Review') }
+ let_it_be(:todo) { create(:label, project: project, title: 'To Do') }
+ let_it_be(:inreview) { create(:label, project: project, title: 'In Review') }
let(:content) { %{/board_move ~"#{inreview.title}"} }
- let!(:board) { create(:board, project: project) }
- let!(:todo_list) { create(:list, board: board, label: todo) }
- let!(:inreview_list) { create(:list, board: board, label: inreview) }
- let!(:inprogress_list) { create(:list, board: board, label: inprogress) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:todo_list) { create(:list, board: board, label: todo) }
+ let_it_be(:inreview_list) { create(:list, board: board, label: inreview) }
+ let_it_be(:inprogress_list) { create(:list, board: board, label: inprogress) }
it 'populates remove_label_ids for all current board columns' do
issue.update!(label_ids: [todo.id, inprogress.id])
@@ -1599,6 +1605,10 @@ RSpec.describe QuickActions::InterpretService do
context "when logged user cannot create_merge_requests in the project" do
let(:project) { create(:project, :archived) }
+ before do
+ project.add_developer(developer)
+ end
+
it_behaves_like 'empty command'
end
@@ -1844,8 +1854,7 @@ RSpec.describe QuickActions::InterpretService do
end
describe 'relabel command' do
- let(:content) { '/relabel Bug' }
- let!(:bug) { create(:label, project: project, title: 'Bug') }
+ let(:content) { "/relabel #{bug.title}" }
let(:feature) { create(:label, project: project, title: 'Feature') }
it 'includes label name' do
@@ -1938,8 +1947,7 @@ RSpec.describe QuickActions::InterpretService do
end
describe 'board move command' do
- let(:content) { '/board_move ~bug' }
- let!(:bug) { create(:label, project: project, title: 'bug') }
+ let(:content) { "/board_move ~#{bug.title}" }
let!(:board) { create(:board, project: project) }
it 'includes the label name' do
diff --git a/spec/services/repositories/destroy_service_spec.rb b/spec/services/repositories/destroy_service_spec.rb
index 30ec84b44e7..81bda2130a6 100644
--- a/spec/services/repositories/destroy_service_spec.rb
+++ b/spec/services/repositories/destroy_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Repositories::DestroyService do
let(:path) { repository.disk_path }
let(:remove_path) { "#{path}+#{project.id}#{described_class::DELETED_FLAG}" }
- subject { described_class.new(project.repository).execute }
+ subject { described_class.new(repository).execute }
it 'moves the repository to a +deleted folder' do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
@@ -92,4 +92,22 @@ RSpec.describe Repositories::DestroyService do
service.execute
end
end
+
+ context 'with a project wiki repository' do
+ let(:project) { create(:project, :wiki_repo) }
+ let(:repository) { project.wiki.repository }
+
+ it 'schedules the repository deletion' do
+ subject
+
+ expect(Repositories::ShellDestroyService).to receive(:new).with(repository).and_call_original
+
+ expect(GitlabShellWorker).to receive(:perform_in)
+ .with(Repositories::ShellDestroyService::REPO_REMOVAL_DELAY, :remove_repository, project.repository_storage, remove_path)
+
+ # Because GitlabShellWorker is inside a run_after_commit callback we need to
+ # trigger the callback
+ project.touch
+ end
+ end
end
diff --git a/spec/services/repository_archive_clean_up_service_spec.rb b/spec/services/repository_archive_clean_up_service_spec.rb
index c6d673fb1b5..8db1a6858fa 100644
--- a/spec/services/repository_archive_clean_up_service_spec.rb
+++ b/spec/services/repository_archive_clean_up_service_spec.rb
@@ -18,6 +18,16 @@ RSpec.describe RepositoryArchiveCleanUpService do
end
end
+ it 'removes outdated archives and directories in a versioned path' do
+ in_directory_with_files("project-#{non_existing_record_id}/#{sha}/@v2", %w[tar tar.bz2 tar.gz zip], 3.hours) do |dirname, files|
+ service.execute
+
+ files.each { |filename| expect(File.exist?(filename)).to be_falsy }
+ expect(File.directory?(dirname)).to be_falsy
+ expect(File.directory?(File.dirname(dirname))).to be_falsy
+ end
+ end
+
it 'does not remove directories when they contain outdated non-archives' do
in_directory_with_files("project-#{non_existing_record_id}/#{sha}", %w[tar conf rb], 2.hours) do |dirname, files|
service.execute
@@ -64,7 +74,9 @@ RSpec.describe RepositoryArchiveCleanUpService do
end
it 'removes files older than 2 hours that matches valid archive extensions' do
- in_directory_with_files('sample.git', %w[tar tar.bz2 tar.gz zip], 2.hours) do |dir, files|
+ # In macOS, the the `mmin` parameter for `find` rounds up, so add a full
+ # minute to ensure these files are deemed old.
+ in_directory_with_files('sample.git', %w[tar tar.bz2 tar.gz zip], 121.minutes) do |dir, files|
service.execute
files.each { |file| expect(File.exist?(file)).to eq false }
@@ -73,11 +85,11 @@ RSpec.describe RepositoryArchiveCleanUpService do
end
context 'with files older than 2 hours that does not matches valid archive extensions' do
- it_behaves_like 'invalid archive files', 'sample.git', %w[conf rb], 2.hours
+ it_behaves_like 'invalid archive files', 'sample.git', %w[conf rb], 121.minutes
end
context 'with files older than 2 hours inside invalid directories' do
- it_behaves_like 'invalid archive files', 'john/doe/sample.git', %w[conf rb tar tar.gz], 2.hours
+ it_behaves_like 'invalid archive files', 'john/t/doe/sample.git', %w[conf rb tar tar.gz], 121.minutes
end
context 'with files newer than 2 hours that matches valid archive extensions' do
@@ -110,8 +122,6 @@ RSpec.describe RepositoryArchiveCleanUpService do
def create_temporary_files(dir, extensions, mtime)
FileUtils.mkdir_p(dir)
- # rubocop: disable Rails/TimeZone
- FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now - mtime)
- # rubocop: enable Rails/TimeZone
+ FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now.utc - mtime)
end
end
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 7dbd55a6909..d8b12cda632 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -24,26 +24,6 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
end
- shared_examples 'fails when flag is disabled' do
- before do
- stub_feature_flags(resource_access_token: false)
- end
-
- it 'returns nil' do
- expect(subject).to be nil
- end
- end
-
- shared_examples 'fails on gitlab.com' do
- before do
- allow(Gitlab).to receive(:com?) { true }
- end
-
- it 'returns nil' do
- expect(subject).to be nil
- end
- end
-
shared_examples 'allows creation of bot with valid params' do
it { expect { subject }.to change { User.count }.by(1) }
@@ -53,6 +33,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
access_token = response.payload[:access_token]
expect(access_token.user.reload.user_type).to eq("#{resource_type}_bot")
+ expect(access_token.user.created_by_id).to eq(user.id)
end
context 'email confirmation status' do
@@ -77,8 +58,8 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
context 'bot name' do
- context 'when no value is passed' do
- it 'uses default value' do
+ context 'when no name is passed' do
+ it 'uses default name' do
response = subject
access_token = response.payload[:access_token]
@@ -86,10 +67,10 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
end
- context 'when user provides value' do
+ context 'when user provides name' do
let_it_be(:params) { { name: 'Random bot' } }
- it 'overrides the default value' do
+ it 'overrides the default name value' do
response = subject
access_token = response.payload[:access_token]
@@ -121,7 +102,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
context 'when user provides scope explicitly' do
let_it_be(:params) { { scopes: Gitlab::Auth::REPOSITORY_SCOPES } }
- it 'overrides the default value' do
+ it 'overrides the default scope value' do
response = subject
access_token = response.payload[:access_token]
@@ -130,24 +111,44 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
context 'expires_at' do
- context 'when no value is passed' do
- it 'uses default value' do
+ context 'when no expiration value is passed' do
+ it 'uses nil expiration value' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.expires_at).to eq(nil)
end
+
+ context 'expiry of the project bot member' do
+ it 'project bot membership does not expire' do
+ response = subject
+ access_token = response.payload[:access_token]
+ project_bot = access_token.user
+
+ expect(project.members.find_by(user_id: project_bot.id).expires_at).to eq(nil)
+ end
+ end
end
- context 'when user provides value' do
+ context 'when user provides expiration value' do
let_it_be(:params) { { expires_at: Date.today + 1.month } }
- it 'overrides the default value' do
+ it 'overrides the default expiration value' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.expires_at).to eq(params[:expires_at])
end
+
+ context 'expiry of the project bot member' do
+ it 'sets the project bot to expire on the same day as the token' do
+ response = subject
+ access_token = response.payload[:access_token]
+ project_bot = access_token.user
+
+ expect(project.members.find_by(user_id: project_bot.id).expires_at).to eq(params[:expires_at])
+ end
+ end
end
context 'when invalid scope is passed' do
@@ -164,7 +165,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
context 'when access provisioning fails' do
before do
- allow(resource).to receive(:add_maintainer).and_return(nil)
+ allow(resource).to receive(:add_user).and_return(nil)
end
it 'returns error' do
@@ -180,8 +181,6 @@ RSpec.describe ResourceAccessTokens::CreateService do
let_it_be(:resource) { project }
it_behaves_like 'fails when user does not have the permission to create a Resource Bot'
- it_behaves_like 'fails when flag is disabled'
- it_behaves_like 'fails on gitlab.com'
context 'user with valid permission' do
before_all do
diff --git a/spec/services/resource_access_tokens/revoke_service_spec.rb b/spec/services/resource_access_tokens/revoke_service_spec.rb
index ffc06d770f8..af29ee2a721 100644
--- a/spec/services/resource_access_tokens/revoke_service_spec.rb
+++ b/spec/services/resource_access_tokens/revoke_service_spec.rb
@@ -8,17 +8,17 @@ RSpec.describe ResourceAccessTokens::RevokeService do
let_it_be(:user) { create(:user) }
let(:access_token) { create(:personal_access_token, user: resource_bot) }
- describe '#execute' do
+ describe '#execute', :sidekiq_inline do
# Created shared_examples as it will easy to include specs for group bots in https://gitlab.com/gitlab-org/gitlab/-/issues/214046
shared_examples 'revokes access token' do
it { expect(subject.success?).to be true }
- it { expect(subject.message).to eq("Revoked access token: #{access_token.name}") }
+ it { expect(subject.message).to eq("Access token #{access_token.name} has been revoked and the bot user has been scheduled for deletion.") }
- it 'revokes token access' do
- subject
+ it 'calls delete user worker' do
+ expect(DeleteUserWorker).to receive(:perform_async).with(user.id, resource_bot.id, skip_authorization: true)
- expect(access_token.reload.revoked?).to be true
+ subject
end
it 'removes membership of bot user' do
@@ -34,6 +34,12 @@ RSpec.describe ResourceAccessTokens::RevokeService do
expect(issue.reload.author.ghost?).to be true
end
+
+ it 'deletes project bot user' do
+ subject
+
+ expect(User.exists?(resource_bot.id)).to be_falsy
+ end
end
shared_examples 'rollback revoke steps' do
@@ -56,49 +62,71 @@ RSpec.describe ResourceAccessTokens::RevokeService do
expect(issue.reload.author.ghost?).to be false
end
+
+ it 'does not destroy project bot user' do
+ subject
+
+ expect(User.exists?(resource_bot.id)).to be_truthy
+ end
end
context 'when resource is a project' do
let_it_be(:resource) { create(:project, :private) }
- let_it_be(:resource_bot) { create(:user, :project_bot) }
+ let(:resource_bot) { create(:user, :project_bot) }
- before_all do
+ before do
resource.add_maintainer(user)
resource.add_maintainer(resource_bot)
end
it_behaves_like 'revokes access token'
- context 'when revoke fails' do
- context 'invalid resource type' do
- subject { described_class.new(user, resource, access_token).execute }
+ context 'revoke fails' do
+ let_it_be(:other_user) { create(:user) }
- let_it_be(:resource) { double }
- let_it_be(:resource_bot) { create(:user, :project_bot) }
+ context 'when access token does not belong to this project' do
+ it 'does not find the bot' do
+ other_access_token = create(:personal_access_token, user: other_user)
- it 'returns error response' do
- response = subject
+ response = described_class.new(user, resource, other_access_token).execute
expect(response.success?).to be false
expect(response.message).to eq("Failed to find bot user")
+ expect(access_token.reload.revoked?).to be false
end
-
- it { expect { subject }.not_to change(access_token.reload, :revoked) }
end
- context 'when migration to ghost user fails' do
- before do
- allow_next_instance_of(::Members::DestroyService) do |service|
- allow(service).to receive(:execute).and_return(false)
+ context 'when user does not have permission to destroy bot' do
+ context 'when non-project member tries to delete project bot' do
+ it 'does not allow other user to delete bot' do
+ response = described_class.new(other_user, resource, access_token).execute
+
+ expect(response.success?).to be false
+ expect(response.message).to eq("#{other_user.name} cannot delete #{access_token.user.name}")
+ expect(access_token.reload.revoked?).to be false
end
end
- it_behaves_like 'rollback revoke steps'
+ context 'when non-maintainer project member tries to delete project bot' do
+ let(:developer) { create(:user) }
+
+ before do
+ resource.add_developer(developer)
+ end
+
+ it 'does not allow developer to delete bot' do
+ response = described_class.new(developer, resource, access_token).execute
+
+ expect(response.success?).to be false
+ expect(response.message).to eq("#{developer.name} cannot delete #{access_token.user.name}")
+ expect(access_token.reload.revoked?).to be false
+ end
+ end
end
- context 'when migration to ghost user fails' do
+ context 'when deletion of bot user fails' do
before do
- allow_next_instance_of(::Users::MigrateToGhostUserService) do |service|
+ allow_next_instance_of(::ResourceAccessTokens::RevokeService) do |service|
allow(service).to receive(:execute).and_return(false)
end
end
diff --git a/spec/services/search/global_service_spec.rb b/spec/services/search/global_service_spec.rb
index 90ad18e5571..7b914a4d3d6 100644
--- a/spec/services/search/global_service_spec.rb
+++ b/spec/services/search/global_service_spec.rb
@@ -52,4 +52,34 @@ RSpec.describe Search::GlobalService do
end
end
end
+
+ context 'issues' do
+ let(:scope) { 'issues' }
+
+ context 'sort by created_at' do
+ let!(:project) { create(:project, :public) }
+ let!(:old_result) { create(:issue, project: project, title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:issue, project: project, title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:issue, project: project, title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted' do
+ let(:results) { described_class.new(nil, search: 'sorted', sort: sort).execute }
+ end
+ end
+ end
+
+ context 'merge_request' do
+ let(:scope) { 'merge_requests' }
+
+ context 'sort by created_at' do
+ let!(:project) { create(:project, :public) }
+ let!(:old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'old-1', title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:merge_request, :opened, source_project: project, source_branch: 'new-1', title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'very-old-1', title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted' do
+ let(:results) { described_class.new(nil, search: 'sorted', sort: sort).execute }
+ end
+ end
+ end
end
diff --git a/spec/services/search/group_service_spec.rb b/spec/services/search/group_service_spec.rb
index d3026d158d4..2bfe714f393 100644
--- a/spec/services/search/group_service_spec.rb
+++ b/spec/services/search/group_service_spec.rb
@@ -40,4 +40,36 @@ RSpec.describe Search::GroupService do
describe 'basic search' do
include_examples 'group search'
end
+
+ context 'issues' do
+ let(:scope) { 'issues' }
+
+ context 'sort by created_at' do
+ let!(:group) { create(:group) }
+ let!(:project) { create(:project, :public, group: group) }
+ let!(:old_result) { create(:issue, project: project, title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:issue, project: project, title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:issue, project: project, title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted' do
+ let(:results) { described_class.new(nil, group, search: 'sorted', sort: sort).execute }
+ end
+ end
+ end
+
+ context 'merge requests' do
+ let(:scope) { 'merge_requests' }
+
+ context 'sort by created_at' do
+ let!(:group) { create(:group) }
+ let!(:project) { create(:project, :public, group: group) }
+ let!(:old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'old-1', title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:merge_request, :opened, source_project: project, source_branch: 'new-1', title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'very-old-1', title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted' do
+ let(:results) { described_class.new(nil, group, search: 'sorted', sort: sort).execute }
+ end
+ end
+ end
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index f6bb7acee57..fc613a6224a 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -444,7 +444,7 @@ RSpec.describe SearchService do
context 'with :with_api_entity_associations' do
let(:unredacted_results) { ar_relation(MergeRequest.with_api_entity_associations, readable, unreadable) }
- it_behaves_like "redaction limits N+1 queries", limit: 7
+ it_behaves_like "redaction limits N+1 queries", limit: 8
end
end
@@ -481,7 +481,7 @@ RSpec.describe SearchService do
end
context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 12
+ it_behaves_like "redaction limits N+1 queries", limit: 13
end
end
@@ -496,7 +496,7 @@ RSpec.describe SearchService do
end
context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 3
+ it_behaves_like "redaction limits N+1 queries", limit: 4
end
end
diff --git a/spec/services/snippets/repository_validation_service_spec.rb b/spec/services/snippets/repository_validation_service_spec.rb
index e2a0d0faa18..8166ce144e1 100644
--- a/spec/services/snippets/repository_validation_service_spec.rb
+++ b/spec/services/snippets/repository_validation_service_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Snippets::RepositoryValidationService do
end
it 'returns error when the repository has more file than the limit' do
- limit = Snippet.max_file_limit(user) + 1
+ limit = Snippet.max_file_limit + 1
files = Array.new(limit) { FFaker::Filesystem.file_name }
allow(repository).to receive(:ls_files).and_return(files)
@@ -56,7 +56,9 @@ RSpec.describe Snippets::RepositoryValidationService do
end
it 'returns error when the repository size is over the limit' do
- expect_any_instance_of(Gitlab::RepositorySizeChecker).to receive(:above_size_limit?).and_return(true)
+ expect_next_instance_of(Gitlab::RepositorySizeChecker) do |checker|
+ expect(checker).to receive(:above_size_limit?).and_return(true)
+ end
expect(subject).to be_error
expect(subject.message).to match /Repository size is above the limit/
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 641fc56294a..406ece30bd7 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Snippets::UpdateService do
- describe '#execute' do
+ describe '#execute', :aggregate_failures do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create :user, admin: true }
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
@@ -97,40 +97,81 @@ RSpec.describe Snippets::UpdateService do
end
shared_examples 'creates repository and creates file' do
- it 'creates repository' do
- expect(snippet.repository).not_to exist
+ context 'when file_name and content params are used' do
+ it 'creates repository' do
+ expect(snippet.repository).not_to exist
- subject
+ subject
- expect(snippet.repository).to exist
- end
+ expect(snippet.repository).to exist
+ end
- it 'commits the files to the repository' do
- subject
+ it 'commits the files to the repository' do
+ subject
- expect(snippet.blobs.count).to eq 1
+ expect(snippet.blobs.count).to eq 1
- blob = snippet.repository.blob_at('master', options[:file_name])
+ blob = snippet.repository.blob_at('master', options[:file_name])
- expect(blob.data).to eq options[:content]
+ expect(blob.data).to eq options[:content]
+ end
+
+ context 'when the repository creation fails' do
+ before do
+ allow(snippet).to receive(:repository_exists?).and_return(false)
+ end
+
+ it 'raise an error' do
+ expect(subject).to be_error
+ expect(subject.payload[:snippet].errors[:repository].to_sentence).to eq 'Error updating the snippet - Repository could not be created'
+ end
+
+ it 'does not try to commit file' do
+ expect(service).not_to receive(:create_commit)
+
+ subject
+ end
+ end
end
- context 'when the repository creation fails' do
- before do
- allow(snippet).to receive(:repository_exists?).and_return(false)
+ context 'when snippet_actions param is used' do
+ let(:file_path) { 'CHANGELOG' }
+ let(:created_file_path) { 'New file'}
+ let(:content) { 'foobar' }
+ let(:snippet_actions) { [{ action: :move, previous_path: snippet.file_name, file_path: file_path }, { action: :create, file_path: created_file_path, content: content }] }
+ let(:base_opts) do
+ {
+ snippet_actions: snippet_actions
+ }
end
- it 'raise an error' do
- response = subject
+ it 'performs operation without raising errors' do
+ db_content = snippet.content
- expect(response).to be_error
- expect(response.payload[:snippet].errors[:repository].to_sentence).to eq 'Error updating the snippet - Repository could not be created'
+ expect(subject).to be_success
+
+ new_blob = snippet.repository.blob_at('master', file_path)
+ created_file = snippet.repository.blob_at('master', created_file_path)
+
+ expect(new_blob.data).to eq db_content
+ expect(created_file.data).to eq content
end
- it 'does not try to commit file' do
- expect(service).not_to receive(:create_commit)
+ context 'when the repository is not created' do
+ it 'keeps snippet database data' do
+ old_file_name = snippet.file_name
+ old_file_content = snippet.content
- subject
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:create_repository_for).and_raise(StandardError)
+ end
+
+ snippet = subject.payload[:snippet]
+
+ expect(subject).to be_error
+ expect(snippet.file_name).to eq(old_file_name)
+ expect(snippet.content).to eq(old_file_content)
+ end
end
end
end
@@ -366,10 +407,9 @@ RSpec.describe Snippets::UpdateService do
let(:snippet_actions) { [{ action: 'invalid_action' }] }
it 'raises a validation error' do
- response = subject
- snippet = response.payload[:snippet]
+ snippet = subject.payload[:snippet]
- expect(response).to be_error
+ expect(subject).to be_error
expect(snippet.errors.full_messages_for(:snippet_actions)).to eq ['Snippet actions have invalid data']
end
end
@@ -377,13 +417,12 @@ RSpec.describe Snippets::UpdateService do
context 'when an error is raised committing the file' do
it 'keeps any snippet modifications' do
expect_next_instance_of(described_class) do |instance|
- expect(instance).to receive(:create_repository_for).and_raise(StandardError)
+ expect(instance).to receive(:create_commit).and_raise(StandardError)
end
- response = subject
- snippet = response.payload[:snippet]
+ snippet = subject.payload[:snippet]
- expect(response).to be_error
+ expect(subject).to be_error
expect(snippet.title).to eq(new_title)
expect(snippet.file_name).to eq(file_path)
expect(snippet.content).to eq(content)
diff --git a/spec/services/static_site_editor/config_service_spec.rb b/spec/services/static_site_editor/config_service_spec.rb
index 5fff4e0af53..fed373828a1 100644
--- a/spec/services/static_site_editor/config_service_spec.rb
+++ b/spec/services/static_site_editor/config_service_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe StaticSiteEditor::ConfigService do
let_it_be(:user) { create(:user) }
# params
- let(:ref) { double(:ref) }
- let(:path) { double(:path) }
+ let(:ref) { 'master' }
+ let(:path) { 'README.md' }
let(:return_url) { double(:return_url) }
# stub data
@@ -42,22 +42,84 @@ RSpec.describe StaticSiteEditor::ConfigService do
allow_next_instance_of(Gitlab::StaticSiteEditor::Config::GeneratedConfig) do |config|
allow(config).to receive(:data) { generated_data }
end
+ end
+
+ context 'when reading file from repo fails with an unexpected error' do
+ let(:unexpected_error) { RuntimeError.new('some unexpected error') }
- allow_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig) do |config|
- allow(config).to receive(:data) { file_data }
+ before do
+ allow(project.repository).to receive(:blob_data_at).and_raise(unexpected_error)
+ end
+
+ it 'returns an error response' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_exception).with(unexpected_error).and_call_original
+ expect { execute }.to raise_error(unexpected_error)
end
end
- it 'returns merged generated data and config file data' do
- expect(execute).to be_success
- expect(execute.payload).to eq(generated: true, file: true)
+ context 'when file is missing' do
+ before do
+ allow(project.repository).to receive(:blob_data_at).and_raise(GRPC::NotFound)
+ expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, '{}') do |config|
+ allow(config).to receive(:valid?) { true }
+ allow(config).to receive(:to_hash_with_defaults) { file_data }
+ end
+ end
+
+ it 'returns default config' do
+ expect(execute).to be_success
+ expect(execute.payload).to eq(generated: true, file: true)
+ end
end
- it 'returns an error if any keys would be overwritten by the merge' do
- generated_data[:duplicate_key] = true
- file_data[:duplicate_key] = true
- expect(execute).to be_error
- expect(execute.message).to match(/duplicate key.*duplicate_key.*found/i)
+ context 'when file is present' do
+ before do
+ allow(project.repository).to receive(:blob_data_at).with(ref, anything) do
+ config_content
+ end
+ end
+
+ context 'and configuration is not valid' do
+ let(:config_content) { 'invalid content' }
+
+ before do
+ expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, config_content) do |config|
+ error = 'error'
+ allow(config).to receive_message_chain('errors.first') { error }
+ allow(config).to receive(:valid?) { false }
+ end
+ end
+
+ it 'returns an error' do
+ expect(execute).to be_error
+ expect(execute.message).to eq('Invalid configuration format')
+ end
+ end
+
+ context 'and configuration is valid' do
+ # NOTE: This has to be a valid config, even though it is mocked, because
+ # `expect_next_instance_of` executes the constructor logic.
+ let(:config_content) { 'static_site_generator: middleman' }
+
+ before do
+ expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, config_content) do |config|
+ allow(config).to receive(:valid?) { true }
+ allow(config).to receive(:to_hash_with_defaults) { file_data }
+ end
+ end
+
+ it 'returns merged generated data and config file data' do
+ expect(execute).to be_success
+ expect(execute.payload).to eq(generated: true, file: true)
+ end
+
+ it 'returns an error if any keys would be overwritten by the merge' do
+ generated_data[:duplicate_key] = true
+ file_data[:duplicate_key] = true
+ expect(execute).to be_error
+ expect(execute.message).to match(/duplicate key.*duplicate_key.*found/i)
+ end
+ end
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 47b8621b5c9..42e48b9ad81 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -64,6 +64,18 @@ RSpec.describe SystemNoteService do
end
end
+ describe '.change_issuable_reviewers' do
+ let(:reviewers) { [double, double] }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:change_issuable_reviewers).with(reviewers)
+ end
+
+ described_class.change_issuable_reviewers(noteable, project, author, reviewers)
+ end
+ end
+
describe '.close_after_error_tracking_resolve' do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
@@ -741,4 +753,16 @@ RSpec.describe SystemNoteService do
described_class.create_new_alert(alert, monitoring_tool)
end
end
+
+ describe '.change_incident_severity' do
+ let(:incident) { build(:incident) }
+
+ it 'calls IncidentService' do
+ expect_next_instance_of(SystemNotes::IncidentService) do |service|
+ expect(service).to receive(:change_incident_severity)
+ end
+
+ described_class.change_incident_severity(incident, author)
+ end
+ end
end
diff --git a/spec/services/system_notes/incident_service_spec.rb b/spec/services/system_notes/incident_service_spec.rb
new file mode 100644
index 00000000000..ab9b9eb2bd4
--- /dev/null
+++ b/spec/services/system_notes/incident_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::SystemNotes::IncidentService do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:noteable) { create(:incident, project: project) }
+ let_it_be(:issuable_severity) { create(:issuable_severity, issue: noteable, severity: :medium) }
+
+ describe '#change_incident_severity' do
+ subject(:change_severity) { described_class.new(noteable: noteable, project: project, author: author).change_incident_severity }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:error).and_call_original
+ end
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'severity' }
+ end
+
+ IssuableSeverity.severities.keys.each do |severity|
+ context "with #{severity} severity" do
+ before do
+ issuable_severity.update!(severity: severity)
+ end
+
+ it 'has the appropriate message' do
+ severity_label = IssuableSeverity::SEVERITY_LABELS.fetch(severity.to_sym)
+
+ expect(change_severity.note).to eq("changed the severity to **#{severity_label}**")
+ end
+ end
+ end
+
+ context 'when severity is invalid' do
+ let(:invalid_severity) { 'invalid-severity' }
+
+ before do
+ allow(noteable).to receive(:severity).and_return(invalid_severity)
+ end
+
+ it 'does not create system note' do
+ expect { change_severity }.not_to change { noteable.notes.count }
+ end
+
+ it 'writes error to logs' do
+ change_severity
+
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ message: 'Cannot create a system note for severity change',
+ noteable_class: noteable.class.to_s,
+ noteable_id: noteable.id,
+ severity: invalid_severity
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index fec2a711dc2..e78b00fb67a 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -128,49 +128,76 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
- describe '#change_status' do
- subject { service.change_status(status, source) }
+ describe '#change_issuable_reviewers' do
+ subject { service.change_issuable_reviewers([reviewer]) }
- context 'when resource state event tracking is enabled' do
- let(:status) { 'reopened' }
- let(:source) { nil }
+ let_it_be(:noteable) { create(:merge_request, :simple, source_project: project) }
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:reviewer1) { create(:user) }
+ let_it_be(:reviewer2) { create(:user) }
+ let_it_be(:reviewer3) { create(:user) }
- it 'does not change note count' do
- expect { subject }.not_to change { Note.count }
- end
+ it_behaves_like 'a system note' do
+ let(:action) { 'reviewer' }
end
- context 'with status reopened' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
+ def build_note(old_reviewers, new_reviewers)
+ noteable.reviewers = new_reviewers
+ service.change_issuable_reviewers(old_reviewers).note
+ end
- let(:status) { 'reopened' }
- let(:source) { nil }
+ it 'builds a correct phrase when a reviewer is added to a non-assigned merge request' do
+ expect(build_note([], [reviewer1])).to eq "requested review from @#{reviewer1.username}"
+ end
- it_behaves_like 'a note with overridable created_at'
+ it 'builds a correct phrase when reviewer is removed' do
+ expect(build_note([reviewer], [])).to eq "removed review request for @#{reviewer.username}"
+ end
- it_behaves_like 'a system note' do
- let(:action) { 'opened' }
- end
+ it 'builds a correct phrase when reviewers changed' do
+ expect(build_note([reviewer1], [reviewer2])).to(
+ eq("requested review from @#{reviewer2.username} and removed review request for @#{reviewer1.username}")
+ )
end
- context 'with a source' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
+ it 'builds a correct phrase when three reviewers removed and one added' do
+ expect(build_note([reviewer, reviewer1, reviewer2], [reviewer3])).to(
+ eq("requested review from @#{reviewer3.username} and removed review request for @#{reviewer.username}, @#{reviewer1.username}, and @#{reviewer2.username}")
+ )
+ end
- let(:status) { 'opened' }
- let(:source) { double('commit', gfm_reference: 'commit 123456') }
+ it 'builds a correct phrase when one reviewer is changed from a set' do
+ expect(build_note([reviewer, reviewer1], [reviewer, reviewer2])).to(
+ eq("requested review from @#{reviewer2.username} and removed review request for @#{reviewer1.username}")
+ )
+ end
- it_behaves_like 'a note with overridable created_at'
+ it 'builds a correct phrase when one reviewer removed from a set' do
+ expect(build_note([reviewer, reviewer1, reviewer2], [reviewer, reviewer1])).to(
+ eq( "removed review request for @#{reviewer2.username}")
+ )
+ end
- it 'sets the note text' do
- expect(subject.note).to eq "#{status} via commit 123456"
+ it 'builds a correct phrase when the locale is different' do
+ Gitlab::I18n.with_locale('pt-BR') do
+ expect(build_note([reviewer, reviewer1, reviewer2], [reviewer3])).to(
+ eq("requested review from @#{reviewer3.username} and removed review request for @#{reviewer.username}, @#{reviewer1.username}, and @#{reviewer2.username}")
+ )
end
end
end
+ describe '#change_status' do
+ subject { service.change_status(status, source) }
+
+ let(:status) { 'reopened' }
+ let(:source) { nil }
+
+ it 'creates a resource state event' do
+ expect { subject }.to change { ResourceStateEvent.count }.by(1)
+ end
+ end
+
describe '#change_title' do
let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
@@ -636,67 +663,26 @@ RSpec.describe ::SystemNotes::IssuablesService do
describe '#close_after_error_tracking_resolve' do
subject { service.close_after_error_tracking_resolve }
- context 'when state tracking is enabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: true)
- end
-
- it 'creates the expected state event' do
- subject
-
- event = ResourceStateEvent.last
-
- expect(event.close_after_error_tracking_resolve).to eq(true)
- expect(event.state).to eq('closed')
- end
- end
+ it 'creates the expected state event' do
+ subject
- context 'when state tracking is disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
+ event = ResourceStateEvent.last
- it_behaves_like 'a system note' do
- let(:action) { 'closed' }
- end
-
- it 'creates the expected system note' do
- expect(subject.note)
- .to eq('resolved the corresponding error and closed the issue.')
- end
+ expect(event.close_after_error_tracking_resolve).to eq(true)
+ expect(event.state).to eq('closed')
end
end
describe '#auto_resolve_prometheus_alert' do
subject { service.auto_resolve_prometheus_alert }
- context 'when state tracking is enabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: true)
- end
+ it 'creates the expected state event' do
+ subject
- it 'creates the expected state event' do
- subject
+ event = ResourceStateEvent.last
- event = ResourceStateEvent.last
-
- expect(event.close_auto_resolve_prometheus_alert).to eq(true)
- expect(event.state).to eq('closed')
- end
- end
-
- context 'when state tracking is disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
-
- it_behaves_like 'a system note' do
- let(:action) { 'closed' }
- end
-
- it 'creates the expected system note' do
- expect(subject.note).to eq('automatically closed this issue because the alert resolved.')
- end
+ expect(event.close_auto_resolve_prometheus_alert).to eq(true)
+ expect(event.state).to eq('closed')
end
end
end
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index f671e66cdcd..ec126cb5447 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -6,124 +6,181 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
- let(:noteable) { create(:issue, project: project) }
-
describe '#change_due_date' do
subject { described_class.new(noteable: noteable, project: project, author: author).change_due_date(due_date) }
let(:due_date) { Date.today }
- it_behaves_like 'a note with overridable created_at'
+ context 'when noteable is an issue' do
+ let_it_be(:noteable) { create(:issue, project: project) }
- it_behaves_like 'a system note' do
- let(:action) { 'due_date' }
- end
+ it_behaves_like 'a note with overridable created_at'
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'due_date' }
+ end
- context 'when due date added' do
- it 'sets the note text' do
- expect(subject.note).to eq "changed due date to #{due_date.to_s(:long)}"
+ context 'when due date added' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "changed due date to #{due_date.to_s(:long)}"
+ end
+ end
+
+ context 'when due date removed' do
+ let(:due_date) { nil }
+
+ it 'sets the note text' do
+ expect(subject.note).to eq 'removed due date'
+ end
+ end
+
+ it 'tracks the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_due_date_changed_action).with(author: author)
+
+ subject
end
end
- context 'when due date removed' do
- let(:due_date) { nil }
+ context 'when noteable is a merge request' do
+ let_it_be(:noteable) { create(:merge_request, source_project: project) }
+
+ it 'does not track the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action).with(author: author)
- it 'sets the note text' do
- expect(subject.note).to eq 'removed due date'
+ subject
end
end
end
- describe '.change_time_estimate' do
+ describe '#change_time_estimate' do
subject { described_class.new(noteable: noteable, project: project, author: author).change_time_estimate }
- it_behaves_like 'a system note' do
- let(:action) { 'time_tracking' }
- end
-
- context 'with a time estimate' do
- it 'sets the note text' do
- noteable.update_attribute(:time_estimate, 277200)
+ context 'when noteable is an issue' do
+ let_it_be(:noteable, reload: true) { create(:issue, project: project) }
- expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
end
- context 'when time_tracking_limit_to_hours setting is true' do
- before do
- stub_application_setting(time_tracking_limit_to_hours: true)
- end
-
+ context 'with a time estimate' do
it 'sets the note text' do
noteable.update_attribute(:time_estimate, 277200)
- expect(subject.note).to eq "changed time estimate to 77h"
+ expect(subject.note).to eq "changed time estimate to 1w 4d 5h"
+ end
+
+ context 'when time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it 'sets the note text' do
+ noteable.update_attribute(:time_estimate, 277200)
+
+ expect(subject.note).to eq "changed time estimate to 77h"
+ end
end
end
- end
- context 'without a time estimate' do
- it 'sets the note text' do
- expect(subject.note).to eq "removed time estimate"
+ context 'without a time estimate' do
+ it 'sets the note text' do
+ expect(subject.note).to eq "removed time estimate"
+ end
end
- end
- end
- describe '.change_time_spent' do
- # We need a custom noteable in order to the shared examples to be green.
- let(:noteable) do
- mr = create(:merge_request, source_project: project)
- mr.spend_time(duration: 360000, user_id: author.id)
- mr.save!
- mr
- end
+ it 'tracks the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_time_estimate_changed_action).with(author: author)
- subject do
- described_class.new(noteable: noteable, project: project, author: author).change_time_spent
+ subject
+ end
end
- it_behaves_like 'a system note' do
- let(:action) { 'time_tracking' }
- end
+ context 'when noteable is a merge request' do
+ let_it_be(:noteable) { create(:merge_request, source_project: project) }
- context 'when time was added' do
- it 'sets the note text' do
- spend_time!(277200)
+ it 'does not track the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_time_estimate_changed_action).with(author: author)
- expect(subject.note).to eq "added 1w 4d 5h of time spent"
+ subject
end
end
+ end
- context 'when time was subtracted' do
- it 'sets the note text' do
- spend_time!(-277200)
+ describe '#change_time_spent' do
+ subject { described_class.new(noteable: noteable, project: project, author: author).change_time_spent }
- expect(subject.note).to eq "subtracted 1w 4d 5h of time spent"
- end
- end
+ context 'when noteable is an issue' do
+ let_it_be(:noteable, reload: true) { create(:issue, project: project) }
- context 'when time was removed' do
- it 'sets the note text' do
- spend_time!(:reset)
+ it_behaves_like 'a system note' do
+ let(:action) { 'time_tracking' }
- expect(subject.note).to eq "removed time spent"
+ before do
+ spend_time!(277200)
+ end
end
- end
- context 'when time_tracking_limit_to_hours setting is true' do
- before do
- stub_application_setting(time_tracking_limit_to_hours: true)
+ context 'when time was added' do
+ it 'sets the note text' do
+ spend_time!(277200)
+
+ expect(subject.note).to eq "added 1w 4d 5h of time spent"
+ end
+
+ context 'when time was subtracted' do
+ it 'sets the note text' do
+ spend_time!(360000)
+ spend_time!(-277200)
+
+ expect(subject.note).to eq "subtracted 1w 4d 5h of time spent"
+ end
+ end
+
+ context 'when time was removed' do
+ it 'sets the note text' do
+ spend_time!(:reset)
+
+ expect(subject.note).to eq "removed time spent"
+ end
+ end
+
+ context 'when time_tracking_limit_to_hours setting is true' do
+ before do
+ stub_application_setting(time_tracking_limit_to_hours: true)
+ end
+
+ it 'sets the note text' do
+ spend_time!(277200)
+
+ expect(subject.note).to eq "added 77h of time spent"
+ end
+ end
+
+ it 'tracks the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_time_spent_changed_action).with(author: author)
+
+ spend_time!(277200)
+
+ subject
+ end
end
- it 'sets the note text' do
- spend_time!(277200)
+ context 'when noteable is a merge request' do
+ let_it_be(:noteable) { create(:merge_request, source_project: project) }
+
+ it 'does not track the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_time_estimate_changed_action).with(author: author)
+
+ spend_time!(277200)
- expect(subject.note).to eq "added 77h of time spent"
+ subject
+ end
end
- end
- def spend_time!(seconds)
- noteable.spend_time(duration: seconds, user_id: author.id)
- noteable.save!
+ def spend_time!(seconds)
+ noteable.spend_time(duration: seconds, user_id: author.id)
+ noteable.save!
+ end
end
end
end
diff --git a/spec/services/todos/destroy/entity_leave_service_spec.rb b/spec/services/todos/destroy/entity_leave_service_spec.rb
index 921037bd5db..4126eb88b0b 100644
--- a/spec/services/todos/destroy/entity_leave_service_spec.rb
+++ b/spec/services/todos/destroy/entity_leave_service_spec.rb
@@ -19,20 +19,14 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
let!(:todo_issue_c_user) { create(:todo, user: user, target: issue_c, project: project) }
let!(:todo_issue_c_user2) { create(:todo, user: user2, target: issue_c, project: project) }
- shared_examples 'using different access permissions' do |access_table|
- using RSpec::Parameterized::TableSyntax
-
- where(:group_access, :project_access, :c_todos, :mr_todos, :method, &access_table)
-
- with_them do
- before do
- set_access(project, user, project_access) if project_access
- set_access(group, user, group_access) if group_access
- end
+ shared_examples 'using different access permissions' do
+ before do
+ set_access(project, user, project_access) if project_access
+ set_access(group, user, group_access) if group_access
+ end
- it "#{params[:method].to_s.humanize(capitalize: false)}" do
- send(method)
- end
+ it "#{params[:method].to_s.humanize(capitalize: false)}" do
+ send(method_name)
end
end
@@ -84,22 +78,20 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
context 'access permissions' do
- # rubocop:disable RSpec/LeakyConstantDeclaration
- PRIVATE_PROJECT_PRIVATE_GROUP_ACCESS_TABLE =
- lambda do |_|
- [
- # :group_access, :project_access, :c_todos, :mr_todos, :method
- [nil, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [nil, :guest, :delete, :delete, :removes_confidential_issues_and_merge_request_todos],
- [:reporter, nil, :keep, :keep, :does_not_remove_any_todos],
- [:guest, nil, :delete, :delete, :removes_confidential_issues_and_merge_request_todos],
- [:guest, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [:guest, :guest, :delete, :delete, :removes_confidential_issues_and_merge_request_todos]
- ]
- end
- # rubocop:enable RSpec/LeakyConstantDeclaration
+ where(:group_access, :project_access, :method_name) do
+ [
+ [nil, :reporter, :does_not_remove_any_todos],
+ [nil, :guest, :removes_confidential_issues_and_merge_request_todos],
+ [:reporter, nil, :does_not_remove_any_todos],
+ [:guest, nil, :removes_confidential_issues_and_merge_request_todos],
+ [:guest, :reporter, :does_not_remove_any_todos],
+ [:guest, :guest, :removes_confidential_issues_and_merge_request_todos]
+ ]
+ end
- it_behaves_like 'using different access permissions', PRIVATE_PROJECT_PRIVATE_GROUP_ACCESS_TABLE
+ with_them do
+ it_behaves_like 'using different access permissions'
+ end
end
end
@@ -117,22 +109,20 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
context 'access permissions' do
- # rubocop:disable RSpec/LeakyConstantDeclaration
- PRIVATE_PROJECT_INTERNAL_GROUP_ACCESS_TABLE =
- lambda do |_|
- [
- # :group_access, :project_access, :c_todos, :mr_todos, :method
- [nil, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [nil, :guest, :delete, :delete, :removes_confidential_issues_and_merge_request_todos],
- [:reporter, nil, :keep, :keep, :does_not_remove_any_todos],
- [:guest, nil, :delete, :delete, :removes_confidential_issues_and_merge_request_todos],
- [:guest, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [:guest, :guest, :delete, :delete, :removes_confidential_issues_and_merge_request_todos]
- ]
- end
- # rubocop:enable RSpec/LeakyConstantDeclaration
+ where(:group_access, :project_access, :method_name) do
+ [
+ [nil, :reporter, :does_not_remove_any_todos],
+ [nil, :guest, :removes_confidential_issues_and_merge_request_todos],
+ [:reporter, nil, :does_not_remove_any_todos],
+ [:guest, nil, :removes_confidential_issues_and_merge_request_todos],
+ [:guest, :reporter, :does_not_remove_any_todos],
+ [:guest, :guest, :removes_confidential_issues_and_merge_request_todos]
+ ]
+ end
- it_behaves_like 'using different access permissions', PRIVATE_PROJECT_INTERNAL_GROUP_ACCESS_TABLE
+ with_them do
+ it_behaves_like 'using different access permissions'
+ end
end
end
@@ -172,22 +162,20 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
context 'access permissions' do
- # rubocop:disable RSpec/LeakyConstantDeclaration
- INTERNAL_PROJECT_INTERNAL_GROUP_ACCESS_TABLE =
- lambda do |_|
- [
- # :group_access, :project_access, :c_todos, :mr_todos, :method
- [nil, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [nil, :guest, :delete, :keep, :removes_only_confidential_issues_todos],
- [:reporter, nil, :keep, :keep, :does_not_remove_any_todos],
- [:guest, nil, :delete, :keep, :removes_only_confidential_issues_todos],
- [:guest, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [:guest, :guest, :delete, :keep, :removes_only_confidential_issues_todos]
- ]
- end
- # rubocop:enable RSpec/LeakyConstantDeclaration
-
- it_behaves_like 'using different access permissions', INTERNAL_PROJECT_INTERNAL_GROUP_ACCESS_TABLE
+ where(:group_access, :project_access, :method_name) do
+ [
+ [nil, :reporter, :does_not_remove_any_todos],
+ [nil, :guest, :removes_only_confidential_issues_todos],
+ [:reporter, nil, :does_not_remove_any_todos],
+ [:guest, nil, :removes_only_confidential_issues_todos],
+ [:guest, :reporter, :does_not_remove_any_todos],
+ [:guest, :guest, :removes_only_confidential_issues_todos]
+ ]
+ end
+
+ with_them do
+ it_behaves_like 'using different access permissions'
+ end
end
end
@@ -219,22 +207,20 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
context 'access permissions' do
- # rubocop:disable RSpec/LeakyConstantDeclaration
- PRIVATE_GROUP_PRIVATE_PROJECT_ACCESS_TABLE =
- lambda do |_|
- [
- # :group_access, :project_access, :c_todos, :mr_todos, :method
- [nil, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [nil, :guest, :delete, :delete, :removes_confidential_issues_and_merge_request_todos],
- [:reporter, nil, :keep, :keep, :does_not_remove_any_todos],
- [:guest, nil, :delete, :delete, :removes_confidential_issues_and_merge_request_todos],
- [:guest, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [:guest, :guest, :delete, :delete, :removes_confidential_issues_and_merge_request_todos]
- ]
- end
- # rubocop:enable RSpec/LeakyConstantDeclaration
+ where(:group_access, :project_access, :method_name) do
+ [
+ [nil, :reporter, :does_not_remove_any_todos],
+ [nil, :guest, :removes_confidential_issues_and_merge_request_todos],
+ [:reporter, nil, :does_not_remove_any_todos],
+ [:guest, nil, :removes_confidential_issues_and_merge_request_todos],
+ [:guest, :reporter, :does_not_remove_any_todos],
+ [:guest, :guest, :removes_confidential_issues_and_merge_request_todos]
+ ]
+ end
- it_behaves_like 'using different access permissions', PRIVATE_GROUP_PRIVATE_PROJECT_ACCESS_TABLE
+ with_them do
+ it_behaves_like 'using different access permissions'
+ end
end
context 'with nested groups' do
@@ -320,23 +306,21 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
context 'access permissions' do
- # rubocop:disable RSpec/LeakyConstantDeclaration
- INTERNAL_GROUP_INTERNAL_PROJECT_ACCESS_TABLE =
- lambda do |_|
- [
- # :group_access, :project_access, :c_todos, :mr_todos, :method
- [nil, nil, :delete, :keep, :removes_only_confidential_issues_todos],
- [nil, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [nil, :guest, :delete, :keep, :removes_only_confidential_issues_todos],
- [:reporter, nil, :keep, :keep, :does_not_remove_any_todos],
- [:guest, nil, :delete, :keep, :removes_only_confidential_issues_todos],
- [:guest, :reporter, :keep, :keep, :does_not_remove_any_todos],
- [:guest, :guest, :delete, :keep, :removes_only_confidential_issues_todos]
- ]
- end
- # rubocop:enable RSpec/LeakyConstantDeclaration
+ where(:group_access, :project_access, :method_name) do
+ [
+ [nil, nil, :removes_only_confidential_issues_todos],
+ [nil, :reporter, :does_not_remove_any_todos],
+ [nil, :guest, :removes_only_confidential_issues_todos],
+ [:reporter, nil, :does_not_remove_any_todos],
+ [:guest, nil, :removes_only_confidential_issues_todos],
+ [:guest, :reporter, :does_not_remove_any_todos],
+ [:guest, :guest, :removes_only_confidential_issues_todos]
+ ]
+ end
- it_behaves_like 'using different access permissions', INTERNAL_GROUP_INTERNAL_PROJECT_ACCESS_TABLE
+ with_them do
+ it_behaves_like 'using different access permissions'
+ end
end
end
end
diff --git a/spec/services/users/approve_service_spec.rb b/spec/services/users/approve_service_spec.rb
new file mode 100644
index 00000000000..50f2b6b0827
--- /dev/null
+++ b/spec/services/users/approve_service_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::ApproveService do
+ let_it_be(:current_user) { create(:admin) }
+ let(:user) { create(:user, :blocked_pending_approval) }
+
+ subject(:execute) { described_class.new(current_user).execute(user) }
+
+ describe '#execute' do
+ context 'failures' do
+ context 'when the executor user is not allowed to approve users' do
+ let(:current_user) { create(:user) }
+
+ it 'returns error result' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match(/You are not allowed to approve a user/)
+ end
+ end
+
+ context 'when user is not in pending approval state' do
+ let(:user) { create(:user, state: 'active') }
+
+ it 'returns error result' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message])
+ .to match(/The user you are trying to approve is not pending an approval/)
+ end
+ end
+
+ context 'when user cannot be activated' do
+ let(:user) do
+ build(:user, state: 'blocked_pending_approval', email: 'invalid email')
+ end
+
+ it 'returns error result' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match(/Email is invalid/)
+ end
+
+ it 'does not change the state of the user' do
+ expect { subject }.not_to change { user.state }
+ end
+ end
+ end
+
+ context 'success' do
+ it 'activates the user' do
+ expect(subject[:status]).to eq(:success)
+ expect(user.reload).to be_active
+ end
+
+ context 'email confirmation status' do
+ context 'user is unconfirmed' do
+ let(:user) { create(:user, :blocked_pending_approval, :unconfirmed) }
+
+ it 'sends confirmation instructions' do
+ expect { subject }
+ .to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ end
+ end
+
+ context 'user is confirmed' do
+ it 'does not send a confirmation email' do
+ expect { subject }
+ .not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ end
+ end
+ end
+
+ context 'pending invitiations' do
+ let!(:project_member_invite) { create(:project_member, :invited, invite_email: user.email) }
+ let!(:group_member_invite) { create(:group_member, :invited, invite_email: user.email) }
+
+ context 'user is unconfirmed' do
+ let(:user) { create(:user, :blocked_pending_approval, :unconfirmed) }
+
+ it 'does not accept pending invites of the user' do
+ expect(subject[:status]).to eq(:success)
+
+ group_member_invite.reload
+ project_member_invite.reload
+
+ expect(group_member_invite).to be_invite
+ expect(project_member_invite).to be_invite
+ end
+ end
+
+ context 'user is confirmed' do
+ it 'accepts pending invites of the user' do
+ expect(subject[:status]).to eq(:success)
+
+ group_member_invite.reload
+ project_member_invite.reload
+
+ expect(group_member_invite).not_to be_invite
+ expect(project_member_invite).not_to be_invite
+ expect(group_member_invite.user).to eq(user)
+ expect(project_member_invite.user).to eq(user)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/users/block_service_spec.rb b/spec/services/users/block_service_spec.rb
index e170a5494aa..45a5b1e5100 100644
--- a/spec/services/users/block_service_spec.rb
+++ b/spec/services/users/block_service_spec.rb
@@ -34,5 +34,15 @@ RSpec.describe Users::BlockService do
expect { operation }.not_to change { user.state }
end
end
+
+ context 'when internal user' do
+ let(:user) { create(:user, :bot) }
+
+ it 'returns error result' do
+ expect(operation[:status]).to eq(:error)
+ expect(operation[:message]).to eq('An internal user cannot be blocked')
+ expect(operation[:http_status]).to eq(403)
+ end
+ end
end
end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index c14fdb35bfa..446741221b3 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
RSpec.describe Users::BuildService do
describe '#execute' do
- let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass' }
- end
+ let(:params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
context 'with an admin user' do
+ let(:params) { build_stubbed(:user).slice(:name, :username, :email, :password) }
+
let(:admin_user) { create(:admin) }
let(:service) { described_class.new(admin_user, ActionController::Parameters.new(params).permit!) }
@@ -16,6 +16,10 @@ RSpec.describe Users::BuildService do
expect(service.execute).to be_valid
end
+ it 'sets the created_by_id' do
+ expect(service.execute.created_by_id).to eq(admin_user.id)
+ end
+
context 'calls the UpdateCanonicalEmailService' do
specify do
expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original
@@ -128,6 +132,16 @@ RSpec.describe Users::BuildService do
it 'raises AccessDeniedError exception' do
expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError
end
+
+ context 'when authorization is skipped' do
+ subject(:built_user) { service.execute(skip_authorization: true) }
+
+ it { is_expected.to be_valid }
+
+ it 'sets the created_by_id' do
+ expect(built_user.created_by_id).to eq(user.id)
+ end
+ end
end
context 'with nil user' do
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index ff919257b3c..6de685dd89a 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -234,6 +234,14 @@ RSpec.describe Users::DestroyService do
expect(User.exists?(user.id)).to be(false)
end
+
+ it 'allows user to be deleted if skip_authorization: true' do
+ other_user = create(:user)
+
+ described_class.new(user).execute(other_user, skip_authorization: true)
+
+ expect(User.exists?(other_user.id)).to be(false)
+ end
end
context "migrating associated records" do
diff --git a/spec/services/users/validate_otp_service_spec.rb b/spec/services/users/validate_otp_service_spec.rb
new file mode 100644
index 00000000000..826755d6145
--- /dev/null
+++ b/spec/services/users/validate_otp_service_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::ValidateOtpService do
+ let_it_be(:user) { create(:user) }
+ let(:otp_code) { 42 }
+
+ subject(:validate) { described_class.new(user).execute(otp_code) }
+
+ context 'Devise' do
+ it 'calls Devise strategy' do
+ expect_next_instance_of(::Gitlab::Auth::Otp::Strategies::Devise) do |strategy|
+ expect(strategy).to receive(:validate).with(otp_code).once
+ end
+
+ validate
+ end
+ end
+
+ context 'FortiAuthenticator' do
+ before do
+ stub_feature_flags(forti_authenticator: true)
+ end
+
+ it 'calls FortiAuthenticator strategy' do
+ expect_next_instance_of(::Gitlab::Auth::Otp::Strategies::FortiAuthenticator) do |strategy|
+ expect(strategy).to receive(:validate).with(otp_code).once
+ end
+
+ validate
+ end
+ end
+end
diff --git a/spec/services/web_hooks/destroy_service_spec.rb b/spec/services/web_hooks/destroy_service_spec.rb
new file mode 100644
index 00000000000..fda40eb01e2
--- /dev/null
+++ b/spec/services/web_hooks/destroy_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::DestroyService do
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(user) }
+
+ shared_examples 'batched destroys' do
+ it 'destroys all hooks in batches' do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ expect(subject).to receive(:delete_web_hook_logs_in_batches).exactly(4).times.and_call_original
+
+ expect do
+ status = subject.execute(hook)
+ expect(status[:async]).to be false
+ end
+ .to change { WebHook.count }.from(1).to(0)
+ .and change { WebHookLog.count }.from(3).to(0)
+ end
+
+ it 'returns an error if sync destroy fails' do
+ expect(hook).to receive(:destroy).and_return(false)
+
+ result = subject.sync_destroy(hook)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq("Unable to destroy #{hook.model_name.human}")
+ end
+
+ it 'schedules an async delete' do
+ stub_const('WebHooks::DestroyService::LOG_COUNT_THRESHOLD', 1)
+
+ expect(WebHooks::DestroyWorker).to receive(:perform_async).with(user.id, hook.id).and_call_original
+
+ status = subject.execute(hook)
+
+ expect(status[:async]).to be true
+ end
+ end
+
+ context 'with system hook' do
+ let_it_be(:hook) { create(:system_hook, url: "http://example.com") }
+ let_it_be(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+
+ it_behaves_like 'batched destroys'
+ end
+
+ context 'with project hook' do
+ let_it_be(:hook) { create(:project_hook) }
+ let_it_be(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+
+ it_behaves_like 'batched destroys'
+ end
+end
diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb
index 17b76205d9e..617a45ae449 100644
--- a/spec/simplecov_env.rb
+++ b/spec/simplecov_env.rb
@@ -2,7 +2,6 @@
require 'simplecov'
require 'simplecov-cobertura'
-require 'active_support/core_ext/numeric/time'
require_relative '../lib/gitlab/utils'
module SimpleCovEnv
@@ -75,7 +74,7 @@ module SimpleCovEnv
add_group 'Libraries', %w[/lib /ee/lib]
add_group 'Tooling', %w[/haml_lint /rubocop /tooling]
- merge_timeout 365.days
+ merge_timeout 365 * 24 * 3600
end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 11a83bd9501..11a45e005b8 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -66,7 +66,11 @@ RSpec.configure do |config|
config.display_try_failure_messages = true
config.infer_spec_type_from_file_location!
- config.full_backtrace = !!ENV['CI']
+
+ # Add :full_backtrace tag to an example if full_backtrace output is desired
+ config.before(:each, full_backtrace: true) do |example|
+ config.full_backtrace = true
+ end
unless ENV['CI']
# Re-run failures locally with `--only-failures`
@@ -124,6 +128,7 @@ RSpec.configure do |config|
config.include LoginHelpers, type: :feature
config.include SearchHelpers, type: :feature
config.include WaitHelpers, type: :feature
+ config.include WaitForRequests, type: :feature
config.include EmailHelpers, :mailer, type: :mailer
config.include Warden::Test::Helpers, type: :request
config.include Gitlab::Routing, type: :routing
@@ -133,7 +138,6 @@ RSpec.configure do |config|
config.include InputHelper, :js
config.include SelectionHelper, :js
config.include InspectRequests, :js
- config.include WaitForRequests, :js
config.include LiveDebugger, :js
config.include MigrationsHelpers, :migration
config.include RedisHelpers
@@ -208,6 +212,10 @@ RSpec.configure do |config|
# for now whilst we migrate as much as we can over the GraphQL
stub_feature_flags(merge_request_widget_graphql: false)
+ # Using FortiAuthenticator as OTP provider is disabled by default in
+ # tests, until we introduce it in user settings
+ stub_feature_flags(forti_authenticator: false)
+
enable_rugged = example.metadata[:enable_rugged].present?
# Disable Rugged features by default
@@ -225,7 +233,7 @@ RSpec.configure do |config|
end
# Enable Marginalia feature for all specs in the test suite.
- allow(Gitlab::Marginalia).to receive(:cached_feature_enabled?).and_return(true)
+ Gitlab::Marginalia.enabled = true
# Stub these calls due to being expensive operations
# It can be reenabled for specific tests via:
@@ -233,6 +241,12 @@ RSpec.configure do |config|
# expect(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
allow(Gitlab::Git::KeepAround).to receive(:execute)
+ # Stub these calls due to being expensive operations
+ # It can be reenabled for specific tests via:
+ #
+ # expect(Gitlab::JobWaiter).to receive(:wait).and_call_original
+ allow_any_instance_of(Gitlab::JobWaiter).to receive(:wait)
+
Gitlab::ProcessMemoryCache.cache_backend.clear
Sidekiq::Worker.clear_all
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 66fce4fddf1..ab55cf97ab4 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -50,6 +50,10 @@ Capybara.register_driver :chrome do |app|
)
options = Selenium::WebDriver::Chrome::Options.new
+
+ # Force the browser's scale factor to prevent inconsistencies on high-res devices
+ options.add_argument('--force-device-scale-factor=1')
+
options.add_argument("window-size=#{CAPYBARA_WINDOW_SIZE.join(',')}")
# Chrome won't work properly in a Docker container in sandbox mode
@@ -123,6 +127,10 @@ RSpec.configure do |config|
port: session.server.port,
protocol: 'http')
+ # CSRF protection is disabled by default. We only enable this for JS specs because some forms
+ # require Javascript to set the CSRF token.
+ allow_any_instance_of(ActionController::Base).to receive(:protect_against_forgery?).and_return(true)
+
# reset window size between tests
unless session.current_window.size == CAPYBARA_WINDOW_SIZE
begin
diff --git a/spec/support/counter_attribute.rb b/spec/support/counter_attribute.rb
index ea71b25b4c0..8bd40b72dcf 100644
--- a/spec/support/counter_attribute.rb
+++ b/spec/support/counter_attribute.rb
@@ -9,6 +9,12 @@ RSpec.configure do |config|
counter_attribute :build_artifacts_size
counter_attribute :commit_count
+
+ attr_accessor :flushed
+
+ counter_attribute_after_flush do |subject|
+ subject.flushed = true
+ end
end
end
end
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
index a86161bfded..c9d372993b5 100644
--- a/spec/support/factory_bot.rb
+++ b/spec/support/factory_bot.rb
@@ -3,7 +3,3 @@
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
end
-
-# Use FactoryBot 4.x behavior:
-# https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#associations
-FactoryBot.use_parent_strategy = false
diff --git a/spec/support/google_api/cloud_platform_helpers.rb b/spec/support/google_api/cloud_platform_helpers.rb
index 38ffca8c5ae..840f948e377 100644
--- a/spec/support/google_api/cloud_platform_helpers.rb
+++ b/spec/support/google_api/cloud_platform_helpers.rb
@@ -22,7 +22,7 @@ module GoogleApi
.to_return(cloud_platform_response(cloud_platform_projects_billing_info_body(project_id, billing_enabled)))
end
- def stub_cloud_platform_get_zone_cluster(project_id, zone, cluster_id, **options)
+ def stub_cloud_platform_get_zone_cluster(project_id, zone, cluster_id, options = {})
WebMock.stub_request(:get, cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id))
.to_return(cloud_platform_response(cloud_platform_cluster_body(options)))
end
@@ -32,7 +32,7 @@ module GoogleApi
.to_return(status: [500, "Internal Server Error"])
end
- def stub_cloud_platform_create_cluster(project_id, zone, **options)
+ def stub_cloud_platform_create_cluster(project_id, zone, options = {})
WebMock.stub_request(:post, cloud_platform_create_cluster_url(project_id, zone))
.to_return(cloud_platform_response(cloud_platform_operation_body(options)))
end
@@ -42,7 +42,7 @@ module GoogleApi
.to_return(status: [500, "Internal Server Error"])
end
- def stub_cloud_platform_get_zone_operation(project_id, zone, operation_id, **options)
+ def stub_cloud_platform_get_zone_operation(project_id, zone, operation_id, options = {})
WebMock.stub_request(:get, cloud_platform_get_zone_operation_url(project_id, zone, operation_id))
.to_return(cloud_platform_response(cloud_platform_operation_body(options)))
end
@@ -86,7 +86,7 @@ module GoogleApi
# https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters/create
# rubocop:disable Metrics/CyclomaticComplexity
# rubocop:disable Metrics/PerceivedComplexity
- def cloud_platform_cluster_body(**options)
+ def cloud_platform_cluster_body(options)
{
"name": options[:name] || 'string',
"description": options[:description] || 'string',
@@ -121,7 +121,7 @@ module GoogleApi
}
end
- def cloud_platform_operation_body(**options)
+ def cloud_platform_operation_body(options)
{
"name": options[:name] || 'operation-1234567891234-1234567',
"zone": options[:zone] || 'us-central1-a',
@@ -136,7 +136,7 @@ module GoogleApi
}
end
- def cloud_platform_projects_body(**options)
+ def cloud_platform_projects_body(options)
{
"projects": [
{
diff --git a/spec/support/helpers/api_internal_base_helpers.rb b/spec/support/helpers/api_internal_base_helpers.rb
new file mode 100644
index 00000000000..94996f7480e
--- /dev/null
+++ b/spec/support/helpers/api_internal_base_helpers.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+module APIInternalBaseHelpers
+ def gl_repository_for(container)
+ case container
+ when ProjectWiki
+ Gitlab::GlRepository::WIKI.identifier_for_container(container)
+ when Project
+ Gitlab::GlRepository::PROJECT.identifier_for_container(container)
+ when Snippet
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(container)
+ else
+ nil
+ end
+ end
+
+ def full_path_for(container)
+ case container
+ when PersonalSnippet
+ "snippets/#{container.id}"
+ when ProjectSnippet
+ "#{container.project.full_path}/snippets/#{container.id}"
+ else
+ container.full_path
+ end
+ end
+
+ def pull(key, container, protocol = 'ssh')
+ post(
+ api("/internal/allowed"),
+ params: {
+ key_id: key.id,
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ action: 'git-upload-pack',
+ secret_token: secret_token,
+ protocol: protocol
+ }
+ )
+ end
+
+ def push(key, container, protocol = 'ssh', env: nil, changes: nil)
+ push_with_path(key,
+ full_path: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ protocol: protocol,
+ env: env,
+ changes: changes)
+ end
+
+ def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil)
+ changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
+
+ params = {
+ changes: changes,
+ key_id: key.id,
+ project: full_path,
+ action: 'git-receive-pack',
+ secret_token: secret_token,
+ protocol: protocol,
+ env: env
+ }
+ params[:gl_repository] = gl_repository if gl_repository
+
+ post(
+ api("/internal/allowed"),
+ params: params
+ )
+ end
+
+ def archive(key, container)
+ post(
+ api("/internal/allowed"),
+ params: {
+ ref: 'master',
+ key_id: key.id,
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ action: 'git-upload-archive',
+ secret_token: secret_token,
+ protocol: 'ssh'
+ }
+ )
+ end
+end
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index f4343b8b783..6d3ac699a7c 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -126,17 +126,15 @@ module CycleAnalyticsHelpers
end
def mock_gitaly_multi_action_dates(repository, commit_time)
- allow(repository.raw).to receive(:multi_action).and_wrap_original do |m, *args|
+ allow(repository.raw).to receive(:multi_action).and_wrap_original do |m, user, kargs|
new_date = commit_time || Time.now
- branch_update = m.call(*args)
+ branch_update = m.call(user, **kargs)
if branch_update.newrev
- _, opts = args
-
commit = rugged_repo(repository).rev_parse(branch_update.newrev)
branch_update.newrev = commit.amend(
- update_ref: "#{Gitlab::Git::BRANCH_REF_PREFIX}#{opts[:branch_name]}",
+ update_ref: "#{Gitlab::Git::BRANCH_REF_PREFIX}#{kargs[:branch_name]}",
author: commit.author.merge(time: new_date),
committer: commit.committer.merge(time: new_date)
)
diff --git a/spec/support/helpers/drag_to_helper.rb b/spec/support/helpers/drag_to_helper.rb
index 2e9932f2e8a..692a4d2b30e 100644
--- a/spec/support/helpers/drag_to_helper.rb
+++ b/spec/support/helpers/drag_to_helper.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
module DragTo
- def drag_to(list_from_index: 0, from_index: 0, to_index: 0, list_to_index: 0, selector: '', scrollable: 'body', duration: 1000, perform_drop: true)
+ # rubocop:disable Metrics/ParameterLists
+ def drag_to(list_from_index: 0, from_index: 0, to_index: 0, list_to_index: 0, selector: '', scrollable: 'body', duration: 1000, perform_drop: true, extra_height: 0)
js = <<~JS
simulateDrag({
scrollable: document.querySelector('#{scrollable}'),
@@ -14,7 +15,8 @@ module DragTo
el: document.querySelectorAll('#{selector}')[#{list_to_index}],
index: #{to_index}
},
- performDrop: #{perform_drop}
+ performDrop: #{perform_drop},
+ extraHeight: #{extra_height}
});
JS
evaluate_script(js)
@@ -23,6 +25,7 @@ module DragTo
loop while drag_active?
end
end
+ # rubocop:enable Metrics/ParameterLists
def drag_active?
page.evaluate_script('window.SIMULATE_DRAG_ACTIVE').nonzero?
diff --git a/spec/support/helpers/features/blob_spec_helpers.rb b/spec/support/helpers/features/blob_spec_helpers.rb
new file mode 100644
index 00000000000..880a7249284
--- /dev/null
+++ b/spec/support/helpers/features/blob_spec_helpers.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+# These helpers help you interact within the blobs page and blobs edit page (Single file editor).
+module BlobSpecHelpers
+ include ActionView::Helpers::JavaScriptHelper
+
+ def set_default_button(type)
+ evaluate_script("localStorage.setItem('gl-web-ide-button-selected', '#{type}')")
+ end
+
+ def unset_default_button
+ set_default_button('')
+ end
+
+ def editor_value
+ evaluate_script('monaco.editor.getModels()[0].getValue()')
+ end
+
+ def set_editor_value(value)
+ execute_script("monaco.editor.getModels()[0].setValue('#{value}')")
+ end
+end
diff --git a/spec/support/helpers/features/canonical_link_helpers.rb b/spec/support/helpers/features/canonical_link_helpers.rb
new file mode 100644
index 00000000000..da3a28f1cb2
--- /dev/null
+++ b/spec/support/helpers/features/canonical_link_helpers.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+# These helpers allow you to manipulate with notes.
+#
+# Usage:
+# describe "..." do
+# include Spec::Support::Helpers::Features::CanonicalLinkHelpers
+# ...
+#
+# expect(page).to have_canonical_link(url)
+#
+module Spec
+ module Support
+ module Helpers
+ module Features
+ module CanonicalLinkHelpers
+ def have_canonical_link(url)
+ have_xpath("//link[@rel=\"canonical\" and @href=\"#{url}\"]", visible: false)
+ end
+
+ def have_any_canonical_links
+ have_xpath('//link[@rel="canonical"]', visible: false)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/features/snippet_helpers.rb b/spec/support/helpers/features/snippet_helpers.rb
index c01d179770c..c26849a9680 100644
--- a/spec/support/helpers/features/snippet_helpers.rb
+++ b/spec/support/helpers/features/snippet_helpers.rb
@@ -10,39 +10,69 @@ module Spec
include ActionView::Helpers::JavaScriptHelper
include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ def snippet_description_locator
+ 'snippet-description'
+ end
+
+ def snippet_blob_path_locator
+ 'snippet_file_name'
+ end
+
+ def snippet_description_view_selector
+ '.snippet-header .snippet-description'
+ end
+
+ def snippet_description_field_collapsed
+ find('.js-description-input').find('input,textarea')
+ end
+
def snippet_get_first_blob_path
- page.find_field(snippet_blob_path_field, match: :first).value
+ page.find_field('snippet_file_name', match: :first).value
end
def snippet_get_first_blob_value
- page.find(snippet_blob_content_selector, match: :first)
+ page.find('.gl-editor-lite', match: :first)
end
def snippet_description_value
- page.find_field(snippet_description_field).value
+ page.find_field(snippet_description_locator).value
+ end
+
+ def snippet_fill_in_visibility(text)
+ page.find('#visibility-level-setting').choose(text)
end
- def snippet_fill_in_form(title:, content:, description: '')
- # fill_in snippet_title_field, with: title
- # editor_set_value(content)
- fill_in snippet_title_field, with: title
+ def snippet_fill_in_title(value)
+ fill_in 'snippet-title', with: value
+ end
- if description
- # Click placeholder first to expand full description field
- description_field.click
- fill_in snippet_description_field, with: description
- end
+ def snippet_fill_in_description(value)
+ # Click placeholder first to expand full description field
+ snippet_description_field_collapsed.click
+ fill_in snippet_description_locator, with: value
+ end
- page.within('.file-editor') do
+ def snippet_fill_in_content(value)
+ page.within('.gl-editor-lite') do
el = find('.inputarea')
- el.send_keys content
+ el.send_keys value
end
end
- private
+ def snippet_fill_in_file_name(value)
+ fill_in(snippet_blob_path_locator, match: :first, with: value)
+ end
+
+ def snippet_fill_in_form(title: nil, content: nil, file_name: nil, description: nil, visibility: nil)
+ snippet_fill_in_title(title) if title
- def description_field
- find('.js-description-input').find('input,textarea')
+ snippet_fill_in_description(description) if description
+
+ snippet_fill_in_file_name(file_name) if file_name
+
+ snippet_fill_in_content(content) if content
+
+ snippet_fill_in_visibility(visibility) if visibility
end
end
end
diff --git a/spec/support/helpers/git_http_helpers.rb b/spec/support/helpers/git_http_helpers.rb
index de8bb9ac8e3..c9c1c4dcfc9 100644
--- a/spec/support/helpers/git_http_helpers.rb
+++ b/spec/support/helpers/git_http_helpers.rb
@@ -5,45 +5,45 @@ require_relative 'workhorse_helpers'
module GitHttpHelpers
include WorkhorseHelpers
- def clone_get(project, options = {})
+ def clone_get(project, **options)
get "/#{project}/info/refs", params: { service: 'git-upload-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
- def clone_post(project, options = {})
+ def clone_post(project, **options)
post "/#{project}/git-upload-pack", headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
- def push_get(project, options = {})
+ def push_get(project, **options)
get "/#{project}/info/refs", params: { service: 'git-receive-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
- def push_post(project, options = {})
+ def push_post(project, **options)
post "/#{project}/git-receive-pack", headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
def download(project, user: nil, password: nil, spnego_request_token: nil)
- args = [project, { user: user, password: password, spnego_request_token: spnego_request_token }]
+ args = { user: user, password: password, spnego_request_token: spnego_request_token }
- clone_get(*args)
+ clone_get(project, **args)
yield response
- clone_post(*args)
+ clone_post(project, **args)
yield response
end
def upload(project, user: nil, password: nil, spnego_request_token: nil)
- args = [project, { user: user, password: password, spnego_request_token: spnego_request_token }]
+ args = { user: user, password: password, spnego_request_token: spnego_request_token }
- push_get(*args)
+ push_get(project, **args)
yield response
- push_post(*args)
+ push_post(project, **args)
yield response
end
- def download_or_upload(*args, &block)
- download(*args, &block)
- upload(*args, &block)
+ def download_or_upload(project, **args, &block)
+ download(project, **args, &block)
+ upload(project, **args, &block)
end
def auth_env(user, password, spnego_request_token)
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 5635ba3df05..db769041f1e 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -23,7 +23,7 @@ module GraphqlHelpers
return early_return unless ready
- resolver.resolve(args)
+ resolver.resolve(**args)
end
# Eagerly run a loader's named resolver
@@ -219,6 +219,7 @@ module GraphqlHelpers
def as_graphql_literal(value)
case value
when Array then "[#{value.map { |v| as_graphql_literal(v) }.join(',')}]"
+ when Hash then "{#{attributes_to_graphql(value)}}"
when Integer, Float then value.to_s
when String then "\"#{value.gsub(/"/, '\\"')}\""
when Symbol then value
@@ -234,7 +235,8 @@ module GraphqlHelpers
end
def post_graphql(query, current_user: nil, variables: nil, headers: {})
- post api('/', current_user, version: 'graphql'), params: { query: query, variables: variables }, headers: headers
+ params = { query: query, variables: variables&.to_json }
+ post api('/', current_user, version: 'graphql'), params: params, headers: headers
end
def post_graphql_mutation(mutation, current_user: nil)
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 4f11f8c6b24..2224af88ab9 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -39,6 +39,17 @@ module JavaScriptFixturesHelpers
Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
end
+ # Public: Reads a GraphQL query from the filesystem as a string
+ #
+ # query_path - file path to the GraphQL query, relative to `app/assets/javascripts`
+ # fragment_paths - an optional array of file paths to any fragments the query uses,
+ # also relative to `app/assets/javascripts`
+ def get_graphql_query_as_string(query_path, fragment_paths = [])
+ [query_path, *fragment_paths].map do |path|
+ File.read(File.join(Rails.root, '/app/assets/javascripts', path))
+ end.join("\n")
+ end
+
private
# Private: Store a response object as fixture file
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 8882f31e2f4..113bb31e4be 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -33,6 +33,10 @@ module KubernetesHelpers
kube_response(kube_deployments_body)
end
+ def kube_ingresses_response
+ kube_response(kube_ingresses_body)
+ end
+
def stub_kubeclient_discover_base(api_url)
WebMock.stub_request(:get, api_url + '/api/v1').to_return(kube_response(kube_v1_discovery_body))
WebMock
@@ -63,6 +67,9 @@ module KubernetesHelpers
WebMock
.stub_request(:get, api_url + '/apis/serving.knative.dev/v1alpha1')
.to_return(kube_response(kube_v1alpha1_serving_knative_discovery_body))
+ WebMock
+ .stub_request(:get, api_url + '/apis/networking.k8s.io/v1')
+ .to_return(kube_response(kube_v1_networking_discovery_body))
end
def stub_kubeclient_discover_knative_not_found(api_url)
@@ -148,12 +155,20 @@ module KubernetesHelpers
WebMock.stub_request(:get, deployments_url).to_return(response || kube_deployments_response)
end
+ def stub_kubeclient_ingresses(namespace, status: nil)
+ stub_kubeclient_discover(service.api_url)
+ ingresses_url = service.api_url + "/apis/extensions/v1beta1/namespaces/#{namespace}/ingresses"
+ response = { status: status } if status
+
+ WebMock.stub_request(:get, ingresses_url).to_return(response || kube_ingresses_response)
+ end
+
def stub_kubeclient_knative_services(options = {})
namespace_path = options[:namespace].present? ? "namespaces/#{options[:namespace]}/" : ""
options[:name] ||= "kubetest"
options[:domain] ||= "example.com"
- options[:response] ||= kube_response(kube_knative_services_body(options))
+ options[:response] ||= kube_response(kube_knative_services_body(**options))
stub_kubeclient_discover(service.api_url)
@@ -265,7 +280,7 @@ module KubernetesHelpers
.to_return(kube_response({}))
end
- def kube_v1_secret_body(**options)
+ def kube_v1_secret_body(options)
{
"kind" => "SecretList",
"apiVersion": "v1",
@@ -304,6 +319,14 @@ module KubernetesHelpers
}
end
+ # From Kubernetes 1.22+ Ingresses are no longer served from apis/extensions
+ def kube_1_22_extensions_v1beta1_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "resources" => []
+ }
+ end
+
def kube_knative_discovery_body
{
"kind" => "APIResourceList",
@@ -416,6 +439,17 @@ module KubernetesHelpers
}
end
+ def kube_v1_networking_discovery_body
+ {
+ "kind" => "APIResourceList",
+ "apiVersion" => "v1",
+ "groupVersion" => "networking.k8s.io/v1",
+ "resources" => [
+ { "name" => "ingresses", "namespaced" => true, "kind" => "Ingress" }
+ ]
+ }
+ end
+
def kube_istio_gateway_body(name, namespace)
{
"apiVersion" => "networking.istio.io/v1alpha3",
@@ -507,6 +541,13 @@ module KubernetesHelpers
}
end
+ def kube_ingresses_body
+ {
+ "kind" => "List",
+ "items" => [kube_ingress]
+ }
+ end
+
def kube_knative_pods_body(name, namespace)
{
"kind" => "PodList",
@@ -517,7 +558,7 @@ module KubernetesHelpers
def kube_knative_services_body(**options)
{
"kind" => "List",
- "items" => [knative_09_service(options)]
+ "items" => [knative_09_service(**options)]
}
end
@@ -548,6 +589,38 @@ module KubernetesHelpers
}
end
+ def kube_ingress(track: :stable)
+ additional_annotations =
+ if track == :canary
+ {
+ "nginx.ingress.kubernetes.io/canary" => "true",
+ "nginx.ingress.kubernetes.io/canary-by-header" => "canary",
+ "nginx.ingress.kubernetes.io/canary-weight" => "50"
+ }
+ else
+ {}
+ end
+
+ {
+ "metadata" => {
+ "name" => "production-auto-deploy",
+ "labels" => {
+ "app" => "production",
+ "app.kubernetes.io/managed-by" => "Helm",
+ "chart" => "auto-deploy-app-2.0.0-beta.2",
+ "heritage" => "Helm",
+ "release" => "production"
+ },
+ "annotations" => {
+ "kubernetes.io/ingress.class" => "nginx",
+ "kubernetes.io/tls-acme" => "true",
+ "meta.helm.sh/release-name" => "production",
+ "meta.helm.sh/release-namespace" => "awesome-app-1-production"
+ }.merge(additional_annotations)
+ }
+ }
+ end
+
# This is a partial response, it will have many more elements in reality but
# these are the ones we care about at the moment
def kube_node
@@ -604,7 +677,7 @@ module KubernetesHelpers
}
end
- def kube_deployment(name: "kube-deployment", environment_slug: "production", project_slug: "project-path-slug", track: nil)
+ def kube_deployment(name: "kube-deployment", environment_slug: "production", project_slug: "project-path-slug", track: nil, replicas: 3)
{
"metadata" => {
"name" => name,
@@ -617,7 +690,7 @@ module KubernetesHelpers
"track" => track
}.compact
},
- "spec" => { "replicas" => 3 },
+ "spec" => { "replicas" => replicas },
"status" => {
"observedGeneration" => 4
}
@@ -862,8 +935,8 @@ module KubernetesHelpers
end
end
- def kube_deployment_rollout_status
- ::Gitlab::Kubernetes::RolloutStatus.from_deployments(kube_deployment)
+ def kube_deployment_rollout_status(ingresses: [])
+ ::Gitlab::Kubernetes::RolloutStatus.from_deployments(kube_deployment, ingresses: ingresses)
end
def empty_deployment_rollout_status
diff --git a/spec/support/helpers/multipart_helpers.rb b/spec/support/helpers/multipart_helpers.rb
index 043cb6e1420..2e8db0e9e42 100644
--- a/spec/support/helpers/multipart_helpers.rb
+++ b/spec/support/helpers/multipart_helpers.rb
@@ -31,7 +31,7 @@ module MultipartHelpers
raise ArgumentError, "can't handle #{mode} mode"
end
- return result if ::Feature.disabled?(:upload_middleware_jwt_params_handler)
+ return result if ::Feature.disabled?(:upload_middleware_jwt_params_handler, default_enabled: true)
# the HandlerForJWTParams expects a jwt token with the upload parameters
# *without* the "#{key}." prefix
diff --git a/spec/support/helpers/rack_attack_spec_helpers.rb b/spec/support/helpers/rack_attack_spec_helpers.rb
index 65082ec690f..a8ae69885d8 100644
--- a/spec/support/helpers/rack_attack_spec_helpers.rb
+++ b/spec/support/helpers/rack_attack_spec_helpers.rb
@@ -1,10 +1,6 @@
# frozen_string_literal: true
module RackAttackSpecHelpers
- def post_args_with_token_headers(url, token_headers)
- [url, params: nil, headers: token_headers]
- end
-
def api_get_args_with_token_headers(partial_url, token_headers)
["/api/#{API::API.version}#{partial_url}", params: nil, headers: token_headers]
end
diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb
index db6e47459e9..328f272724a 100644
--- a/spec/support/helpers/search_helpers.rb
+++ b/spec/support/helpers/search_helpers.rb
@@ -1,6 +1,14 @@
# frozen_string_literal: true
module SearchHelpers
+ def fill_in_search(text)
+ page.within('.search-input-wrap') do
+ fill_in('search', with: text)
+ end
+
+ wait_for_all_requests
+ end
+
def submit_search(query, scope: nil)
page.within('.search-form, .search-page-form') do
field = find_field('search')
@@ -11,6 +19,8 @@ module SearchHelpers
else
click_button('Search')
end
+
+ wait_for_all_requests
end
end
diff --git a/spec/support/helpers/snippet_helpers.rb b/spec/support/helpers/snippet_helpers.rb
index de64ad7d3e2..1ec50bce070 100644
--- a/spec/support/helpers/snippet_helpers.rb
+++ b/spec/support/helpers/snippet_helpers.rb
@@ -8,7 +8,7 @@ module SnippetHelpers
def snippet_blob_file(blob)
{
"path" => blob.path,
- "raw_url" => gitlab_raw_snippet_blob_url(blob.container, blob.path)
+ "raw_url" => gitlab_raw_snippet_blob_url(blob.container, blob.path, host: 'localhost')
}
end
end
diff --git a/spec/support/helpers/snowplow_helpers.rb b/spec/support/helpers/snowplow_helpers.rb
index 83a5b7e48bc..3bde01c6fbf 100644
--- a/spec/support/helpers/snowplow_helpers.rb
+++ b/spec/support/helpers/snowplow_helpers.rb
@@ -32,8 +32,16 @@ module SnowplowHelpers
# end
# end
def expect_snowplow_event(category:, action:, **kwargs)
- expect(Gitlab::Tracking).to have_received(:event)
- .with(category, action, **kwargs).at_least(:once)
+ # This check will no longer be needed with Ruby 2.7 which
+ # would not pass any arguments when using **kwargs.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/263430
+ if kwargs.present?
+ expect(Gitlab::Tracking).to have_received(:event)
+ .with(category, action, **kwargs).at_least(:once)
+ else
+ expect(Gitlab::Tracking).to have_received(:event)
+ .with(category, action).at_least(:once)
+ end
end
# Asserts that no call to `Gitlab::Tracking#event` was made.
diff --git a/spec/support/helpers/stub_experiments.rb b/spec/support/helpers/stub_experiments.rb
index ff3b02dc3f6..7a6154d5ef9 100644
--- a/spec/support/helpers/stub_experiments.rb
+++ b/spec/support/helpers/stub_experiments.rb
@@ -22,10 +22,10 @@ module StubExperiments
# Examples
# - `stub_experiment_for_user(signup_flow: false)` ... Disable `signup_flow` experiment for user.
def stub_experiment_for_user(experiments)
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?).and_call_original
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?).and_call_original
experiments.each do |experiment_key, enabled|
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?).with(experiment_key, anything) { enabled }
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?).with(experiment_key, anything) { enabled }
end
end
end
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index 792a1c21c31..7f30a2a70cd 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -62,4 +62,8 @@ module StubFeatureFlags
StubFeatureGate.new(object)
end
+
+ def skip_feature_flags_yaml_validation
+ allow(Feature::Definition).to receive(:valid_usage!)
+ end
end
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 476b7d34ee5..dba3d2b137e 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -82,13 +82,27 @@ module StubObjectStorage
**params)
end
- def stub_terraform_state_object_storage(uploader = described_class, **params)
+ def stub_terraform_state_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.terraform_state.object_store,
- uploader: uploader,
+ uploader: Terraform::VersionedStateUploader,
+ remote_directory: 'terraform',
+ **params)
+ end
+
+ def stub_terraform_state_version_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.terraform_state.object_store,
+ uploader: Terraform::StateUploader,
remote_directory: 'terraform',
**params)
end
+ def stub_pages_object_storage(uploader = described_class, **params)
+ stub_object_storage_uploader(config: Gitlab.config.pages.object_store,
+ uploader: uploader,
+ remote_directory: 'pages',
+ **params)
+ end
+
def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id")
stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z})
.to_return status: 200, body: <<-EOS.strip_heredoc
diff --git a/spec/support/helpers/stubbed_feature.rb b/spec/support/helpers/stubbed_feature.rb
index d4e9af7a031..67ceb7d9b35 100644
--- a/spec/support/helpers/stubbed_feature.rb
+++ b/spec/support/helpers/stubbed_feature.rb
@@ -4,6 +4,14 @@
module StubbedFeature
extend ActiveSupport::Concern
+ prepended do
+ cattr_reader(:persist_used) do
+ # persist feature flags in CI
+ # nil: indicates that we do not want to persist used feature flags
+ Gitlab::Utils.to_boolean(ENV['CI']) ? {} : nil
+ end
+ end
+
class_methods do
# Turn stubbed feature flags on or off.
def stub=(stub)
@@ -29,10 +37,12 @@ module StubbedFeature
end
# Replace #enabled? method with the optional stubbed/unstubbed version.
- def enabled?(*args)
- feature_flag = super(*args)
+ def enabled?(*args, **kwargs)
+ feature_flag = super
return feature_flag unless stub?
+ persist_used!(args.first)
+
# If feature flag is not persisted we mark the feature flag as enabled
# We do `m.call` as we want to validate the execution of method arguments
# and a feature flag state if it is not persisted
@@ -42,5 +52,17 @@ module StubbedFeature
feature_flag
end
+
+ # This method creates a temporary file in `tmp/feature_flags`
+ # if feature flag was touched during execution
+ def persist_used!(name)
+ return unless persist_used
+ return if persist_used[name]
+
+ persist_used[name] = true
+ FileUtils.touch(
+ Rails.root.join('tmp', 'feature_flags', name.to_s + ".used")
+ )
+ end
end
end
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index d92fcdc2d4a..2592d9f8b42 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -99,6 +99,7 @@ module UsageDataHelpers
projects_with_error_tracking_enabled
projects_with_alerts_service_enabled
projects_with_prometheus_alerts
+ projects_with_tracing_enabled
projects_with_expiration_policy_enabled
projects_with_expiration_policy_disabled
projects_with_expiration_policy_enabled_with_keep_n_unset
@@ -133,6 +134,7 @@ module UsageDataHelpers
todos
uploads
web_hooks
+ user_preferences_user_gitpod_enabled
).push(*SMAU_KEYS)
USAGE_DATA_KEYS = %i(
@@ -171,6 +173,10 @@ module UsageDataHelpers
allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
end
+ def clear_memoized_values(values)
+ values.each { |v| described_class.clear_memoization(v) }
+ end
+
def stub_object_store_settings
allow(Settings).to receive(:[]).with('artifacts')
.and_return(
@@ -228,9 +234,9 @@ module UsageDataHelpers
receive_matchers.each { |m| expect(prometheus_client).to m }
end
- def for_defined_days_back(days: [29, 2])
+ def for_defined_days_back(days: [31, 3])
days.each do |n|
- Timecop.travel(n.days.ago) do
+ travel_to(n.days.ago) do
yield
end
end
diff --git a/spec/support/helpers/wait_for_requests.rb b/spec/support/helpers/wait_for_requests.rb
index 2cfd47634ca..43060e571a9 100644
--- a/spec/support/helpers/wait_for_requests.rb
+++ b/spec/support/helpers/wait_for_requests.rb
@@ -48,17 +48,10 @@ module WaitForRequests
def finished_all_js_requests?
return true unless javascript_test?
- finished_all_ajax_requests? &&
- finished_all_axios_requests?
- end
-
- def finished_all_axios_requests?
- Capybara.page.evaluate_script('window.pendingRequests || 0').zero? # rubocop:disable Style/NumericPredicate
+ finished_all_ajax_requests?
end
def finished_all_ajax_requests?
- return true if Capybara.page.evaluate_script('typeof jQuery === "undefined"')
-
- Capybara.page.evaluate_script('jQuery.active').zero? # rubocop:disable Style/NumericPredicate
+ Capybara.page.evaluate_script('window.pendingRequests || window.pendingRailsUJSRequests || 0').zero? # rubocop:disable Style/NumericPredicate
end
end
diff --git a/spec/support/helpers/wiki_helpers.rb b/spec/support/helpers/wiki_helpers.rb
index e59c6bde264..8873a90579d 100644
--- a/spec/support/helpers/wiki_helpers.rb
+++ b/spec/support/helpers/wiki_helpers.rb
@@ -13,16 +13,16 @@ module WikiHelpers
find('.svg-content .js-lazy-loaded') if example.nil? || example.metadata.key?(:js)
end
- def upload_file_to_wiki(container, user, file_name)
- opts = {
+ def upload_file_to_wiki(wiki, user, file_name)
+ params = {
file_name: file_name,
file_content: File.read(expand_fixture_path(file_name))
}
::Wikis::CreateAttachmentService.new(
- container: container,
+ container: wiki.container,
current_user: user,
- params: opts
- ).execute[:result][:file_path]
+ params: params
+ ).execute.dig(:result, :file_path)
end
end
diff --git a/spec/support/matchers/be_sorted.rb b/spec/support/matchers/be_sorted.rb
new file mode 100644
index 00000000000..1455060fe71
--- /dev/null
+++ b/spec/support/matchers/be_sorted.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# Assert that this collection is sorted by argument and order
+#
+# By default, this checks that the collection is sorted ascending
+# but you can check order by specific field and order by passing
+# them, eg:
+#
+# ```
+# expect(collection).to be_sorted(:field, :desc)
+# ```
+RSpec::Matchers.define :be_sorted do |by, order = :asc|
+ match do |actual|
+ next true unless actual.present? # emtpy collection is sorted
+
+ actual
+ .then { |collection| by ? collection.sort_by(&by) : collection.sort }
+ .then { |sorted_collection| order.to_sym == :desc ? sorted_collection.reverse : sorted_collection }
+ .then { |sorted_collection| sorted_collection == actual }
+ end
+end
diff --git a/spec/support/migrations_helpers/cluster_helpers.rb b/spec/support/migrations_helpers/cluster_helpers.rb
index b54af15c29e..03104e22bcf 100644
--- a/spec/support/migrations_helpers/cluster_helpers.rb
+++ b/spec/support/migrations_helpers/cluster_helpers.rb
@@ -4,7 +4,7 @@ module MigrationHelpers
module ClusterHelpers
# Creates a list of cluster projects.
def create_cluster_project_list(quantity)
- group = namespaces_table.create(name: 'gitlab-org', path: 'gitlab-org')
+ group = namespaces_table.create!(name: 'gitlab-org', path: 'gitlab-org')
quantity.times do |id|
create_cluster_project(group, id)
@@ -25,14 +25,14 @@ module MigrationHelpers
namespace_id: group.id
)
- cluster = clusters_table.create(
+ cluster = clusters_table.create!(
name: 'test-cluster',
cluster_type: 3,
provider_type: :gcp,
platform_type: :kubernetes
)
- cluster_projects_table.create(project_id: project.id, cluster_id: cluster.id)
+ cluster_projects_table.create!(project_id: project.id, cluster_id: cluster.id)
provider_gcp_table.create!(
gcp_project_id: "test-gcp-project-#{id}",
@@ -43,7 +43,7 @@ module MigrationHelpers
zone: 'us-central1-a'
)
- platform_kubernetes_table.create(
+ platform_kubernetes_table.create!(
cluster_id: cluster.id,
api_url: 'https://kubernetes.example.com',
encrypted_token: 'a' * 40,
@@ -58,7 +58,7 @@ module MigrationHelpers
project = projects_table.find(cluster_project.project_id)
namespace = "#{project.path}-#{project.id}"
- cluster_kubernetes_namespaces_table.create(
+ cluster_kubernetes_namespaces_table.create!(
cluster_project_id: cluster_project.id,
cluster_id: cluster.id,
project_id: cluster_project.project_id,
diff --git a/spec/support/migrations_helpers/namespaces_helper.rb b/spec/support/migrations_helpers/namespaces_helper.rb
index 4ca01c87568..c62ef6a4620 100644
--- a/spec/support/migrations_helpers/namespaces_helper.rb
+++ b/spec/support/migrations_helpers/namespaces_helper.rb
@@ -3,7 +3,7 @@
module MigrationHelpers
module NamespacesHelpers
def create_namespace(name, visibility, options = {})
- table(:namespaces).create({
+ table(:namespaces).create!({
name: name,
path: name,
type: 'Group',
diff --git a/spec/support/migrations_helpers/schema_version_finder.rb b/spec/support/migrations_helpers/schema_version_finder.rb
new file mode 100644
index 00000000000..b677db7ea26
--- /dev/null
+++ b/spec/support/migrations_helpers/schema_version_finder.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# Sometimes data migration specs require adding invalid test data in order to test
+# the migration (e.g. adding a row with null foreign key). Certain db migrations that
+# add constraints (e.g. NOT NULL constraint) prevent invalid records from being added
+# and data migration from being tested. For this reason, SchemaVersionFinder can be used
+# to find and use schema prior to specified one.
+#
+# @example
+# RSpec.describe CleanupThings, :migration, schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraint) do ...
+#
+# SchemaVersionFinder returns schema version prior to the one specified, which allows to then add
+# invalid records to the database, which in return allows to properly test data migration.
+module MigrationHelpers
+ class SchemaVersionFinder
+ def self.migrations_paths
+ ActiveRecord::Migrator.migrations_paths
+ end
+
+ def self.migration_context
+ ActiveRecord::MigrationContext.new(migrations_paths, ActiveRecord::SchemaMigration)
+ end
+
+ def self.migrations
+ migration_context.migrations
+ end
+
+ def self.migration_prior(migration_klass)
+ migrations.each_cons(2) do |previous, migration|
+ break previous.version if migration.name == migration_klass.name
+ end
+ end
+ end
+end
diff --git a/spec/support/models/merge_request_without_merge_request_diff.rb b/spec/support/models/merge_request_without_merge_request_diff.rb
new file mode 100644
index 00000000000..5cdf1feb7a5
--- /dev/null
+++ b/spec/support/models/merge_request_without_merge_request_diff.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+class MergeRequestWithoutMergeRequestDiff < ::MergeRequest
+ self.inheritance_column = :_type_disabled
+
+ def ensure_merge_request_diff; end
+end
diff --git a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
index e3c1d0afa53..bfb719fd840 100644
--- a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
+++ b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
@@ -25,7 +25,7 @@ RSpec.shared_examples 'allowed user IDs are cached' do
expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original
expect(subject).to be_truthy
- end.not_to exceed_query_limit(2)
+ end.not_to exceed_query_limit(3)
end
end
end
diff --git a/spec/support/shared_contexts/email_shared_context.rb b/spec/support/shared_contexts/email_shared_context.rb
index b4d7722f03d..298e03162c4 100644
--- a/spec/support/shared_contexts/email_shared_context.rb
+++ b/spec/support/shared_contexts/email_shared_context.rb
@@ -21,7 +21,7 @@ end
RSpec.shared_examples :reply_processing_shared_examples do
context "when the user could not be found" do
before do
- user.destroy
+ user.destroy!
end
it "raises a UserNotFoundError" do
diff --git a/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
index 58ee48a98f1..2b6edb4c07d 100644
--- a/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/group_projects_finder_shared_contexts.rb
@@ -18,8 +18,8 @@ RSpec.shared_context 'GroupProjectsFinder context' do
let!(:subgroup_private_project) { create(:project, :private, path: '7', group: subgroup) }
before do
- shared_project_1.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
- shared_project_2.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
- shared_project_3.project_group_links.create(group_access: Gitlab::Access::MAINTAINER, group: group)
+ shared_project_1.project_group_links.create!(group_access: Gitlab::Access::MAINTAINER, group: group)
+ shared_project_2.project_group_links.create!(group_access: Gitlab::Access::MAINTAINER, group: group)
+ shared_project_3.project_group_links.create!(group_access: Gitlab::Access::MAINTAINER, group: group)
end
end
diff --git a/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb
new file mode 100644
index 00000000000..6b9ddc70691
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/import_export/relation_tree_restorer_shared_context.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'relation tree restorer shared context' do
+ include ImportExport::CommonUtil
+
+ let(:user) { create(:user) }
+ let(:shared) { Gitlab::ImportExport::Shared.new(importable) }
+ let(:attributes) { relation_reader.consume_attributes(importable_name) }
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(exported_members: {}, user: user, importable: importable)
+ end
+end
diff --git a/spec/support/shared_contexts/mailers/notify_shared_context.rb b/spec/support/shared_contexts/mailers/notify_shared_context.rb
index de8c0d5d2b4..4b7d028410a 100644
--- a/spec/support/shared_contexts/mailers/notify_shared_context.rb
+++ b/spec/support/shared_contexts/mailers/notify_shared_context.rb
@@ -11,7 +11,7 @@ RSpec.shared_context 'gitlab email notification' do
let(:new_user_address) { 'newguy@example.com' }
before do
- email = recipient.emails.create(email: "notifications@example.com")
+ email = recipient.emails.create!(email: "notifications@example.com")
recipient.update_attribute(:notification_email, email.email)
stub_incoming_email_setting(enabled: true, address: "reply+%{key}@#{Gitlab.config.gitlab.host}")
end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 747358fc1e0..9ebfdcb9522 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -44,7 +44,8 @@ RSpec.shared_context 'project navbar structure' do
_('Boards'),
_('Labels'),
_('Service Desk'),
- _('Milestones')
+ _('Milestones'),
+ (_('Iterations') if Gitlab.ee?)
]
},
{
@@ -64,14 +65,16 @@ RSpec.shared_context 'project navbar structure' do
nav_item: _('Operations'),
nav_sub_items: [
_('Metrics'),
+ _('Logs'),
+ _('Tracing'),
+ _('Error Tracking'),
_('Alerts'),
_('Incidents'),
- _('Environments'),
- _('Error Tracking'),
- _('Product Analytics'),
_('Serverless'),
- _('Logs'),
- _('Kubernetes')
+ _('Kubernetes'),
+ _('Environments'),
+ _('Feature Flags'),
+ _('Product Analytics')
]
},
analytics_nav_item,
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index 113252a6ab5..84910d0dfe4 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -85,7 +85,7 @@ RSpec.shared_examples 'multiple issue boards' do
wait_for_requests
- expect(page).to have_selector('.board', count: 3)
+ expect(page).to have_selector('.board', count: 5)
in_boards_switcher_dropdown do
click_link board.name
@@ -93,7 +93,7 @@ RSpec.shared_examples 'multiple issue boards' do
wait_for_requests
- expect(page).to have_selector('.board', count: 2)
+ expect(page).to have_selector('.board', count: 4)
end
it 'maintains sidebar state over board switch' do
diff --git a/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb b/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
new file mode 100644
index 00000000000..54d41f9a68c
--- /dev/null
+++ b/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'project access tokens available #index' do
+ let_it_be(:active_project_access_token) { create(:personal_access_token, user: bot_user) }
+ let_it_be(:inactive_project_access_token) { create(:personal_access_token, :revoked, user: bot_user) }
+
+ it 'retrieves active project access tokens' do
+ subject
+
+ expect(assigns(:active_project_access_tokens)).to contain_exactly(active_project_access_token)
+ end
+
+ it 'retrieves inactive project access tokens' do
+ subject
+
+ expect(assigns(:inactive_project_access_tokens)).to contain_exactly(inactive_project_access_token)
+ end
+
+ it 'lists all available scopes' do
+ subject
+
+ expect(assigns(:scopes)).to eq(Gitlab::Auth.resource_bot_scopes)
+ end
+
+ it 'retrieves newly created personal access token value' do
+ token_value = 'random-value'
+ allow(PersonalAccessToken).to receive(:redis_getdel).with("#{user.id}:#{project.id}").and_return(token_value)
+
+ subject
+
+ expect(assigns(:new_project_access_token)).to eq(token_value)
+ end
+end
+
+RSpec.shared_examples 'project access tokens available #create' do
+ def created_token
+ PersonalAccessToken.order(:created_at).last
+ end
+
+ it 'returns success message' do
+ subject
+
+ expect(response.flash[:notice]).to match('Your new project access token has been created.')
+ end
+
+ it 'creates project access token' do
+ subject
+
+ expect(created_token.name).to eq(access_token_params[:name])
+ expect(created_token.scopes).to eq(access_token_params[:scopes])
+ expect(created_token.expires_at).to eq(access_token_params[:expires_at])
+ end
+
+ it 'creates project bot user' do
+ subject
+
+ expect(created_token.user).to be_project_bot
+ end
+
+ it 'stores newly created token redis store' do
+ expect(PersonalAccessToken).to receive(:redis_store!)
+
+ subject
+ end
+
+ it { expect { subject }.to change { User.count }.by(1) }
+ it { expect { subject }.to change { PersonalAccessToken.count }.by(1) }
+
+ context 'when unsuccessful' do
+ before do
+ allow_next_instance_of(ResourceAccessTokens::CreateService) do |service|
+ allow(service).to receive(:execute).and_return ServiceResponse.error(message: 'Failed!')
+ end
+ end
+
+ it { expect(subject).to render_template(:index) }
+ end
+end
+
+RSpec.shared_examples 'project access tokens available #revoke' do
+ it 'calls delete user worker' do
+ expect(DeleteUserWorker).to receive(:perform_async).with(user.id, bot_user.id, skip_authorization: true)
+
+ subject
+ end
+
+ it 'removes membership of bot user' do
+ subject
+
+ expect(project.reload.bots).not_to include(bot_user)
+ end
+
+ it 'converts issuables of the bot user to ghost user' do
+ issue = create(:issue, author: bot_user)
+
+ subject
+
+ expect(issue.reload.author.ghost?).to be true
+ end
+
+ it 'deletes project bot user' do
+ subject
+
+ expect(User.exists?(bot_user.id)).to be_falsy
+ end
+end
diff --git a/spec/support/shared_examples/controllers/cache_control_shared_examples.rb b/spec/support/shared_examples/controllers/cache_control_shared_examples.rb
index 426d7f95222..5496e04e26c 100644
--- a/spec/support/shared_examples/controllers/cache_control_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/cache_control_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'project cache control headers' do
before do
- project.update(visibility_level: visibility_level)
+ project.update!(visibility_level: visibility_level)
end
context 'when project is public' do
diff --git a/spec/support/shared_examples/controllers/destroy_hook_shared_examples.rb b/spec/support/shared_examples/controllers/destroy_hook_shared_examples.rb
new file mode 100644
index 00000000000..710aa333dec
--- /dev/null
+++ b/spec/support/shared_examples/controllers/destroy_hook_shared_examples.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'Web hook destroyer' do
+ it 'displays a message about synchronous delete', :aggregate_failures do
+ expect_next_instance_of(WebHooks::DestroyService) do |instance|
+ expect(instance).to receive(:execute).with(anything).and_call_original
+ end
+
+ delete :destroy, params: params
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:notice]).to eq("#{hook.model_name.human} was deleted")
+ end
+
+ it 'displays a message about async delete', :aggregate_failures do
+ expect_next_instance_of(WebHooks::DestroyService) do |instance|
+ expect(instance).to receive(:execute).with(anything).and_return({ status: :success, async: true } )
+ end
+
+ delete :destroy, params: params
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:notice]).to eq("#{hook.model_name.human} was scheduled for deletion")
+ end
+
+ it 'displays an error if deletion failed', :aggregate_failures do
+ expect_next_instance_of(WebHooks::DestroyService) do |instance|
+ expect(instance).to receive(:execute).with(anything).and_return({ status: :error, async: true, message: "failed" } )
+ end
+
+ delete :destroy, params: params
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(flash[:alert]).to eq("failed")
+ end
+end
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index 8bc91f72b8c..2fcc88ef36a 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -262,7 +262,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
context "when the namespace is owned by the GitLab user" do
before do
user.username = other_username
- user.save
+ user.save!
end
it "takes the existing namespace" do
diff --git a/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb b/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
index 7f26155f9d6..3f147f942ba 100644
--- a/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
@@ -59,7 +59,7 @@ RSpec.shared_examples 'known sign in' do
it 'notifies the user when the cookie is expired' do
stub_cookie
- Timecop.freeze((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
+ travel_to((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
diff --git a/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb b/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
index 925c45005f0..2d35b1681ea 100644
--- a/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
@@ -2,9 +2,28 @@
RSpec.shared_examples 'milestone tabs' do
def go(path, extra_params = {})
- params = { namespace_id: project.namespace.to_param, project_id: project, id: milestone.iid }
+ get path, params: request_params.merge(extra_params)
+ end
+
+ describe '#issues' do
+ context 'as html' do
+ before do
+ go(:issues, format: 'html')
+ end
- get path, params: params.merge(extra_params)
+ it 'redirects to milestone#show' do
+ expect(response).to redirect_to(milestone_path)
+ end
+ end
+
+ context 'as json' do
+ it 'renders the issues tab template to a string' do
+ go(:issues, format: 'json')
+
+ expect(response).to render_template('shared/milestones/_issues_tab')
+ expect(json_response).to have_key('html')
+ end
+ end
end
describe '#merge_requests' do
diff --git a/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb b/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
index f2a97a86df6..b67eb0d99fd 100644
--- a/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/sessionless_auth_controller_shared_examples.rb
@@ -44,7 +44,7 @@ RSpec.shared_examples 'authenticates sessionless user' do |path, format, params|
.to increment(:user_unauthenticated_counter)
end
- personal_access_token.update(scopes: [:read_user])
+ personal_access_token.update!(scopes: [:read_user])
get path, params: default_params.merge(private_token: personal_access_token.token)
diff --git a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
index 7e5a225f020..cf7ee17ea13 100644
--- a/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_hll_events_examples.rb
@@ -1,47 +1,24 @@
# frozen_string_literal: true
+#
+# Requires a context containing:
+# - request
+# - expected_type
+# - target_id
RSpec.shared_examples 'tracking unique hll events' do |feature_flag|
- context 'when format is HTML' do
- let(:format) { :html }
+ it 'tracks unique event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(expected_type, target_id)
- it 'tracks unique event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(expected_type, target_id)
-
- subject
- end
-
- it 'tracks unique event if DNT is not enabled' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(expected_type, target_id)
- request.headers['DNT'] = '0'
-
- subject
- end
-
- it 'does not track unique event if DNT is enabled' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event).with(expected_type, target_id)
- request.headers['DNT'] = '1'
-
- subject
- end
-
- context 'when feature flag is disabled' do
- it 'does not track unique event' do
- stub_feature_flags(feature_flag => false)
-
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event).with(expected_type, target_id)
-
- subject
- end
- end
+ request
end
- context 'when format is JSON' do
- let(:format) { :json }
+ context 'when feature flag is disabled' do
+ it 'does not track unique event' do
+ stub_feature_flags(feature_flag => false)
- it 'does not track unique event if the format is JSON' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event).with(expected_type, target_id)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- subject
+ request
end
end
end
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 4ca400dd87b..a6ad8fc594c 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -15,10 +15,10 @@ RSpec.shared_examples 'wiki controller actions' do
end
describe 'GET #new' do
- subject { get :new, params: routing_params }
+ subject(:request) { get :new, params: routing_params }
it 'redirects to #show and appends a `random_title` param' do
- subject
+ request
expect(response).to be_redirect
expect(response.redirect_url).to match(%r{
@@ -35,7 +35,7 @@ RSpec.shared_examples 'wiki controller actions' do
end
it 'redirects to the wiki container and displays an error message' do
- subject
+ request
expect(response).to redirect_to(container)
expect(flash[:notice]).to eq('Could not create Wiki Repository at this time. Please try again later.')
@@ -146,13 +146,13 @@ RSpec.shared_examples 'wiki controller actions' do
let(:random_title) { nil }
- subject { get :show, params: routing_params.merge(id: id, random_title: random_title) }
+ subject(:request) { get :show, params: routing_params.merge(id: id, random_title: random_title) }
context 'when page exists' do
let(:id) { wiki_title }
it 'renders the page' do
- subject
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('shared/wikis/show')
@@ -161,19 +161,26 @@ RSpec.shared_examples 'wiki controller actions' do
expect(assigns(:sidebar_limited)).to be(false)
end
- it 'increases the page view counter' do
- expect do
- subject
+ context 'page view tracking' do
+ it_behaves_like 'tracking unique hll events', :track_unique_wiki_page_views do
+ let(:target_id) { 'wiki_action' }
+ let(:expected_type) { instance_of(String) }
+ end
- expect(response).to have_gitlab_http_status(:ok)
- end.to change { Gitlab::UsageDataCounters::WikiPageCounter.read(:view) }.by(1)
+ it 'increases the page view counter' do
+ expect do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end.to change { Gitlab::UsageDataCounters::WikiPageCounter.read(:view) }.by(1)
+ end
end
context 'when page content encoding is invalid' do
it 'sets flash error' do
allow(controller).to receive(:valid_encoding?).and_return(false)
- subject
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('shared/wikis/show')
@@ -187,7 +194,7 @@ RSpec.shared_examples 'wiki controller actions' do
context 'when the user can create pages' do
before do
- subject
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('shared/wikis/edit')
@@ -212,7 +219,7 @@ RSpec.shared_examples 'wiki controller actions' do
end
it 'shows the empty state' do
- subject
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template('shared/wikis/empty')
@@ -226,10 +233,10 @@ RSpec.shared_examples 'wiki controller actions' do
where(:file_name) { ['dk.png', 'unsanitized.svg', 'git-cheat-sheet.pdf'] }
with_them do
- let(:id) { upload_file_to_wiki(container, user, file_name) }
+ let(:id) { upload_file_to_wiki(wiki, user, file_name) }
it 'delivers the file with the correct headers' do
- subject
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Content-Disposition']).to match(/^inline/)
@@ -255,7 +262,7 @@ RSpec.shared_examples 'wiki controller actions' do
let(:id_param) { 'invalid' }
it 'redirects to show' do
- subject
+ request
expect(response).to redirect_to_wiki(wiki, 'invalid')
end
@@ -265,7 +272,7 @@ RSpec.shared_examples 'wiki controller actions' do
let(:id_param) { ' ' }
it 'redirects to the home page' do
- subject
+ request
expect(response).to redirect_to_wiki(wiki, 'home')
end
@@ -275,7 +282,7 @@ RSpec.shared_examples 'wiki controller actions' do
it 'redirects to show' do
allow(controller).to receive(:valid_encoding?).and_return(false)
- subject
+ request
expect(response).to redirect_to_wiki(wiki, wiki.list_pages.first)
end
@@ -288,7 +295,7 @@ RSpec.shared_examples 'wiki controller actions' do
allow(page).to receive(:content).and_return(nil)
allow(controller).to receive(:page).and_return(page)
- subject
+ request
expect(response).to redirect_to_wiki(wiki, page)
end
@@ -298,7 +305,7 @@ RSpec.shared_examples 'wiki controller actions' do
describe 'GET #edit' do
let(:id_param) { wiki_title }
- subject { get(:edit, params: routing_params.merge(id: id_param)) }
+ subject(:request) { get(:edit, params: routing_params.merge(id: id_param)) }
it_behaves_like 'edit action'
@@ -306,7 +313,7 @@ RSpec.shared_examples 'wiki controller actions' do
render_views
it 'shows the edit page' do
- subject
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(response.body).to include(s_('Wiki|Edit Page'))
@@ -319,7 +326,7 @@ RSpec.shared_examples 'wiki controller actions' do
let(:new_content) { 'New content' }
let(:id_param) { wiki_title }
- subject do
+ subject(:request) do
patch(:update,
params: routing_params.merge(
id: id_param,
@@ -333,7 +340,7 @@ RSpec.shared_examples 'wiki controller actions' do
render_views
it 'updates the page' do
- subject
+ request
wiki_page = wiki.list_pages(load_content: true).first
@@ -348,7 +355,7 @@ RSpec.shared_examples 'wiki controller actions' do
end
it 'renders the empty state' do
- subject
+ request
expect(response).to render_template('shared/wikis/empty')
end
@@ -359,7 +366,7 @@ RSpec.shared_examples 'wiki controller actions' do
let(:new_title) { 'New title' }
let(:new_content) { 'New content' }
- subject do
+ subject(:request) do
post(:create,
params: routing_params.merge(
wiki: { title: new_title, content: new_content }
@@ -369,7 +376,7 @@ RSpec.shared_examples 'wiki controller actions' do
context 'when page is valid' do
it 'creates the page' do
expect do
- subject
+ request
end.to change { wiki.list_pages.size }.by 1
wiki_page = wiki.find_page(new_title)
@@ -384,7 +391,7 @@ RSpec.shared_examples 'wiki controller actions' do
it 'renders the edit state' do
expect do
- subject
+ request
end.not_to change { wiki.list_pages.size }
expect(response).to render_template('shared/wikis/edit')
@@ -395,7 +402,7 @@ RSpec.shared_examples 'wiki controller actions' do
describe 'DELETE #destroy' do
let(:id_param) { wiki_title }
- subject do
+ subject(:request) do
delete(:destroy,
params: routing_params.merge(
id: id_param
@@ -405,7 +412,7 @@ RSpec.shared_examples 'wiki controller actions' do
context 'when page exists' do
it 'deletes the page' do
expect do
- subject
+ request
end.to change { wiki.list_pages.size }.by(-1)
end
@@ -418,7 +425,7 @@ RSpec.shared_examples 'wiki controller actions' do
it 'renders the edit state' do
expect do
- subject
+ request
end.not_to change { wiki.list_pages.size }
expect(response).to render_template('shared/wikis/edit')
@@ -432,7 +439,7 @@ RSpec.shared_examples 'wiki controller actions' do
it 'renders 404' do
expect do
- subject
+ request
end.not_to change { wiki.list_pages.size }
expect(response).to have_gitlab_http_status(:not_found)
diff --git a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
index c9910487798..2fff4137934 100644
--- a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
@@ -11,6 +11,15 @@ RSpec.shared_examples 'an editable merge request' do
expect(page).to have_content user.name
end
+ find('.js-reviewer-search').click
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+ expect(find('input[name="merge_request[reviewer_ids][]"]', visible: false).value).to match(user.id.to_s)
+ page.within '.js-reviewer-search' do
+ expect(page).to have_content user.name
+ end
+
click_button 'Milestone'
page.within '.issue-milestone' do
click_link milestone.title
@@ -38,6 +47,10 @@ RSpec.shared_examples 'an editable merge request' do
expect(page).to have_content user.name
end
+ page.within '.reviewer' do
+ expect(page).to have_content user.name
+ end
+
page.within '.milestone' do
expect(page).to have_content milestone.title
end
@@ -69,7 +82,7 @@ RSpec.shared_examples 'an editable merge request' do
end
it 'warns about version conflict' do
- merge_request.update(title: "New title")
+ merge_request.update!(title: "New title")
fill_in 'merge_request_title', with: 'bug 345'
fill_in 'merge_request_description', with: 'bug description'
@@ -124,16 +137,3 @@ end
def get_textarea_height
page.evaluate_script('document.getElementById("merge_request_description").offsetHeight')
end
-
-RSpec.shared_examples 'an editable merge request with reviewers' do
- it 'updates merge request', :js do
- find('.js-reviewer-search').click
- page.within '.dropdown-menu-user' do
- click_link user.name
- end
- expect(find('input[name="merge_request[reviewer_ids][]"]', visible: false).value).to match(user.id.to_s)
- page.within '.js-reviewer-search' do
- expect(page).to have_content user.name
- end
- end
-end
diff --git a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
new file mode 100644
index 00000000000..ac1cc2da7e3
--- /dev/null
+++ b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issuable invite members experiments' do
+ context 'when invite_members_version_a experiment is enabled' do
+ before do
+ stub_experiment_for_user(invite_members_version_a: true)
+ end
+
+ it 'shows a link for inviting members and follows through to the members page' do
+ project.add_maintainer(user)
+ visit issuable_path
+
+ find('.block.assignee .edit-link').click
+
+ wait_for_requests
+
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_link('Invite Members', href: project_project_members_path(project))
+ expect(page).to have_selector('[data-track-event="click_invite_members"]')
+ expect(page).to have_selector('[data-track-label="edit_assignee"]')
+ end
+
+ click_link 'Invite Members'
+
+ expect(current_path).to eq project_project_members_path(project)
+ end
+ end
+
+ context 'when invite_members_version_b experiment is enabled' do
+ before do
+ stub_experiment_for_user(invite_members_version_b: true)
+ end
+
+ it 'shows a link for inviting members and follows through to modal' do
+ project.add_developer(user)
+ visit issuable_path
+
+ find('.block.assignee .edit-link').click
+
+ wait_for_requests
+
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_link('Invite Members', href: '#')
+ expect(page).to have_selector('[data-track-event="click_invite_members_version_b"]')
+ expect(page).to have_selector('[data-track-label="edit_assignee"]')
+ end
+
+ click_link 'Invite Members'
+
+ expect(page).to have_content("Oops, this feature isn't ready yet")
+ end
+ end
+
+ context 'when no invite members experiments are enabled' do
+ it 'shows author in assignee dropdown and no invite link' do
+ project.add_maintainer(user)
+ visit issuable_path
+
+ find('.block.assignee .edit-link').click
+
+ wait_for_requests
+
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_link('Invite Members')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
index 19a5750cf6d..9d023d9514a 100644
--- a/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
+++ b/spec/support/shared_examples/features/multiple_assignees_mr_shared_examples.rb
@@ -38,7 +38,7 @@ RSpec.shared_examples 'multiple assignees merge request' do |action, save_button
page.within '.issuable-sidebar' do
page.within '.assignee' do
# Closing dropdown to persist
- click_link 'Edit'
+ click_link 'Apply'
expect(page).to have_content user2.name
end
diff --git a/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb
new file mode 100644
index 00000000000..48cde90bd9b
--- /dev/null
+++ b/spec/support/shared_examples/features/multiple_reviewers_mr_shared_examples.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'multiple reviewers merge request' do |action, save_button_title|
+ it "#{action} a MR with multiple reviewers", :js do
+ find('.js-reviewer-search').click
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ click_link user2.name
+ end
+
+ # Extra click needed in order to toggle the dropdown
+ find('.js-reviewer-search').click
+
+ expect(all('input[name="merge_request[reviewer_ids][]"]', visible: false).map(&:value))
+ .to match_array([user.id.to_s, user2.id.to_s])
+
+ page.within '.js-reviewer-search' do
+ expect(page).to have_content "#{user2.name} + 1 more"
+ end
+
+ click_button save_button_title
+
+ page.within '.issuable-sidebar' do
+ page.within '.reviewer' do
+ expect(page).to have_content '2 Reviewers'
+
+ click_link 'Edit'
+
+ expect(page).to have_content user.name
+ expect(page).to have_content user2.name
+ end
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within '.issuable-sidebar' do
+ page.within '.reviewer' do
+ # Closing dropdown to persist
+ click_link 'Edit'
+
+ expect(page).to have_content user2.name
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/navbar_shared_examples.rb b/spec/support/shared_examples/features/navbar_shared_examples.rb
index 91a4048fa7c..c768e95c45a 100644
--- a/spec/support/shared_examples/features/navbar_shared_examples.rb
+++ b/spec/support/shared_examples/features/navbar_shared_examples.rb
@@ -3,12 +3,14 @@
RSpec.shared_examples 'verified navigation bar' do
let(:expected_structure) do
structure.compact!
- structure.each { |s| s[:nav_sub_items].compact! }
+ structure.each { |s| s[:nav_sub_items]&.compact! }
structure
end
it 'renders correctly' do
current_structure = page.all('.sidebar-top-level-items > li', class: ['!hidden']).map do |item|
+ next if item.find_all('a').empty?
+
nav_item = item.find_all('a').first.text.gsub(/\s+\d+$/, '') # remove counts at the end
nav_sub_items = item.all('.sidebar-sub-level-items > li', class: ['!fly-out-top-item']).map do |list_item|
@@ -16,7 +18,7 @@ RSpec.shared_examples 'verified navigation bar' do
end
{ nav_item: nav_item, nav_sub_items: nav_sub_items }
- end
+ end.compact
expect(current_structure).to eq(expected_structure)
end
diff --git a/spec/support/shared_examples/features/packages_shared_examples.rb b/spec/support/shared_examples/features/packages_shared_examples.rb
index f201421e827..4d2e13aa5bc 100644
--- a/spec/support/shared_examples/features/packages_shared_examples.rb
+++ b/spec/support/shared_examples/features/packages_shared_examples.rb
@@ -84,11 +84,11 @@ RSpec.shared_examples 'shared package sorting' do
let(:packages) { [package_two, package_one] }
end
- it_behaves_like 'correctly sorted packages list', 'Created' do
+ it_behaves_like 'correctly sorted packages list', 'Published' do
let(:packages) { [package_two, package_one] }
end
- it_behaves_like 'correctly sorted packages list', 'Created', ascending: true do
+ it_behaves_like 'correctly sorted packages list', 'Published', ascending: true do
let(:packages) { [package_one, package_two] }
end
end
diff --git a/spec/support/shared_examples/features/page_description_shared_examples.rb b/spec/support/shared_examples/features/page_description_shared_examples.rb
new file mode 100644
index 00000000000..81653220b4c
--- /dev/null
+++ b/spec/support/shared_examples/features/page_description_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'page meta description' do |expected_description|
+ it 'renders the page with description, og:description, and twitter:description meta tags that contains a plain-text version of the markdown', :aggregate_failures do
+ %w(name='description' property='og:description' property='twitter:description').each do |selector|
+ expect(page).to have_selector("meta[#{selector}][content='#{expected_description}']", visible: false)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
new file mode 100644
index 00000000000..a2d2143271c
--- /dev/null
+++ b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'when the deploy_keys_on_protected_branches FF is turned on' do
+ before do
+ stub_feature_flags(deploy_keys_on_protected_branches: true)
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ let(:dropdown_sections_minus_deploy_keys) { all_dropdown_sections - ['Deploy Keys'] }
+
+ context 'when deploy keys are enabled to this project' do
+ let!(:deploy_key_1) { create(:deploy_key, title: 'title 1', projects: [project]) }
+ let!(:deploy_key_2) { create(:deploy_key, title: 'title 2', projects: [project]) }
+
+ context 'when only one deploy key can push' do
+ before do
+ deploy_key_1.deploy_keys_projects.first.update!(can_push: true)
+ end
+
+ it "shows all dropdown sections in the 'Allowed to push' main dropdown, with only one deploy key" do
+ visit project_protected_branches_path(project)
+
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ within('.qa-allowed-to-push-dropdown') do
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*all_dropdown_sections)
+ expect(page).to have_content('title 1')
+ expect(page).not_to have_content('title 2')
+ end
+ end
+
+ it "shows all sections but not deploy keys in the 'Allowed to merge' main dropdown" do
+ visit project_protected_branches_path(project)
+
+ find(".js-allowed-to-merge").click
+ wait_for_requests
+
+ within('.qa-allowed-to-merge-dropdown') do
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
+ end
+ end
+
+ it "shows all sections in the 'Allowed to push' update dropdown" do
+ create(:protected_branch, :no_one_can_push, project: project, name: 'master')
+
+ visit project_protected_branches_path(project)
+
+ within(".js-protected-branch-edit-form") do
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*all_dropdown_sections)
+ end
+ end
+ end
+
+ context 'when no deploy key can push' do
+ it "just shows all sections but not deploy keys in the 'Allowed to push' dropdown" do
+ visit project_protected_branches_path(project)
+
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ within('.qa-allowed-to-push-dropdown') do
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
+ end
+ end
+
+ it "just shows all sections but not deploy keys in the 'Allowed to push' update dropdown" do
+ create(:protected_branch, :no_one_can_push, project: project, name: 'master')
+
+ visit project_protected_branches_path(project)
+
+ within(".js-protected-branch-edit-form") do
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/snippets_shared_examples.rb b/spec/support/shared_examples/features/snippets_shared_examples.rb
index 8d68b1e4c0a..bd1a67f3bb5 100644
--- a/spec/support/shared_examples/features/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/features/snippets_shared_examples.rb
@@ -84,7 +84,7 @@ RSpec.shared_examples 'show and render proper snippet blob' do
expect(page).not_to have_selector('.js-blob-viewer-switcher')
# shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ expect(page).to have_button('Copy file contents', disabled: false)
# shows a raw button
expect(page).to have_link('Open raw')
@@ -106,7 +106,6 @@ RSpec.shared_examples 'show and render proper snippet blob' do
it 'displays the blob using the rich viewer' do
aggregate_failures do
# hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
expect(page).to have_selector('.blob-viewer[data-type="rich"]')
# shows rendered Markdown
@@ -116,7 +115,7 @@ RSpec.shared_examples 'show and render proper snippet blob' do
expect(page).to have_selector('.js-blob-viewer-switcher')
# shows a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+ expect(page).to have_button('Copy file contents', disabled: true)
# shows a raw button
expect(page).to have_link('Open raw')
@@ -128,7 +127,7 @@ RSpec.shared_examples 'show and render proper snippet blob' do
context 'switching to the simple viewer' do
before do
- find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
+ find_button('Display source').click
wait_for_requests
end
@@ -137,19 +136,18 @@ RSpec.shared_examples 'show and render proper snippet blob' do
aggregate_failures do
# hides the rich viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
# shows highlighted Markdown code
expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
# shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ expect(page).to have_button('Copy file contents', disabled: false)
end
end
context 'switching to the rich viewer again' do
before do
- find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
+ find_button('Display rendered file').click
wait_for_requests
end
@@ -157,11 +155,11 @@ RSpec.shared_examples 'show and render proper snippet blob' do
it 'displays the blob using the rich viewer' do
aggregate_failures do
# hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
expect(page).to have_selector('.blob-viewer[data-type="rich"]')
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ # Used to show an enabled copy button since the code has already been fetched
+ # Will be resolved in https://gitlab.com/gitlab-org/gitlab/-/issues/262389
+ expect(page).to have_button('Copy file contents', disabled: true)
end
end
end
@@ -169,7 +167,8 @@ RSpec.shared_examples 'show and render proper snippet blob' do
end
context 'visiting with a line number anchor' do
- let(:anchor) { 'L1' }
+ # L1 used to work and will be revisited in https://gitlab.com/gitlab-org/gitlab/-/issues/262391
+ let(:anchor) { 'LC1' }
it 'displays the blob using the simple viewer' do
subject
@@ -177,7 +176,6 @@ RSpec.shared_examples 'show and render proper snippet blob' do
aggregate_failures do
# hides the rich viewer
expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
# highlights the line in question
expect(page).to have_selector('#LC1.hll')
@@ -186,7 +184,7 @@ RSpec.shared_examples 'show and render proper snippet blob' do
expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
# shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ expect(page).to have_button('Copy file contents', disabled: false)
end
end
end
diff --git a/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb b/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
index d30e8241da0..0ef1ccdfe57 100644
--- a/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
@@ -1,14 +1,12 @@
# frozen_string_literal: true
# Requires a context containing:
-# project
+# wiki
RSpec.shared_examples 'wiki file attachments' do
include DropzoneHelper
context 'uploading attachments', :js do
- let(:wiki) { project.wiki }
-
def attach_with_dropzone(wait = false)
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, wait)
end
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
new file mode 100644
index 00000000000..44d82d2e753
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -0,0 +1,245 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User creates wiki page' do
+ include WikiHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ context "when wiki is empty" do
+ before do |example|
+ visit wiki_path(wiki)
+
+ wait_for_svg_to_be_loaded(example)
+
+ click_link "Create your first page"
+ end
+
+ it "shows validation error message" do
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "")
+
+ click_on("Create page")
+ end
+
+ expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "[link test](test)")
+
+ click_on("Create page")
+ end
+
+ expect(page).to have_content("Home").and have_content("link test")
+
+ click_link("link test")
+
+ expect(page).to have_content("Create New Page")
+ end
+
+ it "shows non-escaped link in the pages list" do
+ fill_in(:wiki_title, with: "one/two/three-test")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "wiki content")
+
+ click_on("Create page")
+ end
+
+ expect(current_path).to include("one/two/three-test")
+ expect(page).to have_link(href: wiki_page_path(wiki, 'one/two/three-test'))
+ end
+
+ it "has `Create home` as a commit message", :js do
+ wait_for_requests
+
+ expect(page).to have_field("wiki[message]", with: "Create home")
+ end
+
+ it "creates a page from the home page" do
+ fill_in(:wiki_content, with: "[test](test)\n[GitLab API doc](api)\n[Rake tasks](raketasks)\n# Wiki header\n")
+ fill_in(:wiki_message, with: "Adding links to wiki")
+
+ page.within(".wiki-form") do
+ click_button("Create page")
+ end
+
+ expect(current_path).to eq(wiki_page_path(wiki, "home"))
+ expect(page).to have_content("test GitLab API doc Rake tasks Wiki header")
+ .and have_content("Home")
+ .and have_content("Last edited by #{user.name}")
+ .and have_header_with_correct_id_and_link(1, "Wiki header", "wiki-header")
+
+ click_link("test")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "test"))
+
+ page.within(:css, ".nav-text") do
+ expect(page).to have_content("Create New Page")
+ end
+
+ click_link("Home")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "home"))
+
+ click_link("GitLab API")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "api"))
+
+ page.within(:css, ".nav-text") do
+ expect(page).to have_content("Create")
+ end
+
+ click_link("Home")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "home"))
+
+ click_link("Rake tasks")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "raketasks"))
+
+ page.within(:css, ".nav-text") do
+ expect(page).to have_content("Create")
+ end
+ end
+
+ it "creates ASCII wiki with LaTeX blocks", :js do
+ stub_application_setting(plantuml_url: "http://localhost", plantuml_enabled: true)
+
+ ascii_content = <<~MD
+ :stem: latexmath
+
+ [stem]
+ ++++
+ \\sqrt{4} = 2
+ ++++
+
+ another part
+
+ [latexmath]
+ ++++
+ \\beta_x \\gamma
+ ++++
+
+ stem:[2+2] is 4
+ MD
+
+ find("#wiki_format option[value=asciidoc]").select_option
+
+ fill_in(:wiki_content, with: ascii_content)
+
+ page.within(".wiki-form") do
+ click_button("Create page")
+ end
+
+ page.within ".md" do
+ expect(page).to have_selector(".katex", count: 3).and have_content("2+2 is 4")
+ end
+ end
+
+ it 'creates a wiki page with Org markup', :aggregate_failures do
+ org_content = <<~ORG
+ * Heading
+ ** Subheading
+ [[home][Link to Home]]
+ ORG
+
+ page.within('.wiki-form') do
+ find('#wiki_format option[value=org]').select_option
+ fill_in(:wiki_content, with: org_content)
+ click_button('Create page')
+ end
+
+ expect(page).to have_selector('h1', text: 'Heading')
+ expect(page).to have_selector('h2', text: 'Subheading')
+ expect(page).to have_link(href: wiki_page_path(wiki, 'home'))
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context "when wiki is not empty", :js do
+ before do
+ create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page')
+
+ visit wiki_path(wiki)
+ end
+
+ context "via the `new wiki page` page" do
+ it "creates a page with a single word" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_title, with: "foo")
+ fill_in(:wiki_content, with: "My awesome wiki!")
+ end
+
+ # Commit message field should have correct value.
+ expect(page).to have_field("wiki[message]", with: "Create foo")
+
+ click_button("Create page")
+
+ expect(page).to have_content("foo")
+ .and have_content("Last edited by #{user.name}")
+ .and have_content("My awesome wiki!")
+ end
+
+ it "creates a page with spaces in the name" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_title, with: "Spaces in the name")
+ fill_in(:wiki_content, with: "My awesome wiki!")
+ end
+
+ # Commit message field should have correct value.
+ expect(page).to have_field("wiki[message]", with: "Create Spaces in the name")
+
+ click_button("Create page")
+
+ expect(page).to have_content("Spaces in the name")
+ .and have_content("Last edited by #{user.name}")
+ .and have_content("My awesome wiki!")
+ end
+
+ it "creates a page with hyphens in the name" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_title, with: "hyphens-in-the-name")
+ fill_in(:wiki_content, with: "My awesome wiki!")
+ end
+
+ # Commit message field should have correct value.
+ expect(page).to have_field("wiki[message]", with: "Create hyphens in the name")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "My awesome wiki!")
+
+ click_button("Create page")
+ end
+
+ expect(page).to have_content("hyphens in the name")
+ .and have_content("Last edited by #{user.name}")
+ .and have_content("My awesome wiki!")
+ end
+ end
+
+ it "shows the emoji autocompletion dropdown" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ find("#wiki_content").native.send_keys("")
+
+ fill_in(:wiki_content, with: ":")
+ end
+
+ expect(page).to have_selector(".atwho-view")
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
new file mode 100644
index 00000000000..e1fd9c8dbec
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User deletes wiki page' do
+ include WikiHelpers
+
+ let(:wiki_page) { create(:wiki_page, wiki: wiki) }
+
+ before do
+ sign_in(user)
+ visit wiki_page_path(wiki, wiki_page)
+ end
+
+ it 'deletes a page', :js do
+ click_on('Edit')
+ click_on('Delete')
+ find('.modal-footer .btn-danger').click
+
+ expect(page).to have_content('Page was successfully deleted')
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
new file mode 100644
index 00000000000..a22d98f20c4
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User previews wiki changes' do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki) }
+
+ before do
+ sign_in(user)
+ end
+
+ shared_examples 'relative links' do
+ let_it_be(:page_content) do
+ <<~HEREDOC
+ Some text so key event for [ does not trigger an incorrect replacement.
+ [regular link](regular)
+ [relative link 1](../relative)
+ [relative link 2](./relative)
+ [relative link 3](./e/f/relative)
+ [spaced link](title with spaces)
+ HEREDOC
+ end
+
+ def relative_path(path)
+ (Pathname.new(wiki.wiki_base_path) + File.dirname(wiki_page.path).tr(' ', '-') + path).to_s
+ end
+
+ shared_examples "rewrites relative links" do
+ specify do
+ expect(element).to have_link('regular link', href: wiki.wiki_base_path + '/regular')
+ expect(element).to have_link('spaced link', href: wiki.wiki_base_path + '/title%20with%20spaces')
+
+ expect(element).to have_link('relative link 1', href: relative_path('../relative'))
+ expect(element).to have_link('relative link 2', href: relative_path('./relative'))
+ expect(element).to have_link('relative link 3', href: relative_path('./e/f/relative'))
+ end
+ end
+
+ context "when there are no spaces or hyphens in the page name" do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a/b/c/d', content: page_content) }
+
+ it_behaves_like 'rewrites relative links'
+ end
+
+ context "when there are spaces in the page name" do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a page/b page/c page/d page', content: page_content) }
+
+ it_behaves_like 'rewrites relative links'
+ end
+
+ context "when there are hyphens in the page name" do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a-page/b-page/c-page/d-page', content: page_content) }
+
+ it_behaves_like 'rewrites relative links'
+ end
+ end
+
+ context "when rendering a new wiki page", :js do
+ before do
+ wiki_page.create # rubocop:disable Rails/SaveBang
+ visit wiki_page_path(wiki, wiki_page)
+ end
+
+ it_behaves_like 'relative links' do
+ let(:element) { page.find('[data-testid="wiki_page_content"]') }
+ end
+ end
+
+ context "when previewing an existing wiki page", :js do
+ let(:preview) { page.find('.md-preview-holder') }
+
+ before do
+ wiki_page.create # rubocop:disable Rails/SaveBang
+ visit wiki_page_path(wiki, wiki_page, action: :edit)
+ end
+
+ it_behaves_like 'relative links' do
+ before do
+ click_on 'Preview'
+ end
+
+ let(:element) { preview }
+ end
+
+ it 'renders content with CommonMark' do
+ fill_in :wiki_content, with: "1. one\n - sublist\n"
+ click_on "Preview"
+
+ # the above generates two separate lists (not embedded) in CommonMark
+ expect(preview).to have_content("sublist")
+ expect(preview).not_to have_xpath("//ol//li//ul")
+ end
+
+ it "does not linkify double brackets inside code blocks as expected" do
+ fill_in :wiki_content, with: <<-HEREDOC
+ `[[do_not_linkify]]`
+ ```
+ [[also_do_not_linkify]]
+ ```
+ HEREDOC
+ click_on "Preview"
+
+ expect(preview).to have_content("do_not_linkify")
+ expect(preview).to have_content('[[do_not_linkify]]')
+ expect(preview).to have_content('[[also_do_not_linkify]]')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
new file mode 100644
index 00000000000..1a5f8d7d8df
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -0,0 +1,231 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User updates wiki page' do
+ include WikiHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when wiki is empty' do
+ before do |example|
+ visit(wiki_path(wiki))
+
+ wait_for_svg_to_be_loaded(example)
+
+ click_link "Create your first page"
+ end
+
+ it 'redirects back to the home edit page' do
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq wiki_path(wiki)
+ end
+
+ it 'updates a page that has a path', :js do
+ fill_in(:wiki_title, with: 'one/two/three-test')
+
+ page.within '.wiki-form' do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
+
+ expect(current_path).to include('one/two/three-test')
+ expect(find('.wiki-pages')).to have_content('three')
+
+ first(:link, text: 'three').click
+
+ expect(find('[data-testid="wiki_page_title"]')).to have_content('three')
+
+ click_on('Edit')
+
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_content('Edit Page')
+
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context 'when wiki is not empty' do
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page') }
+
+ before do
+ visit(wiki_path(wiki))
+
+ click_link('Edit')
+ end
+
+ it 'updates a page', :js do
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
+
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Save changes')
+
+ expect(page).to have_content('Home')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
+ end
+
+ it 'updates the commit message as the title is changed', :js do
+ fill_in(:wiki_title, with: '& < > \ \ { } &')
+
+ expect(page).to have_field('wiki[message]', with: 'Update & < > \ \ { } &')
+ end
+
+ it 'correctly escapes the commit message entities', :js do
+ fill_in(:wiki_title, with: 'Wiki title')
+
+ expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
+ end
+
+ it 'shows a validation error message' do
+ fill_in(:wiki_content, with: '')
+ click_button('Save changes')
+
+ expect(page).to have_selector('.wiki-form')
+ expect(page).to have_content('Edit Page')
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content("Content can't be blank")
+ expect(find('textarea#wiki_content').value).to eq('')
+ end
+
+ it 'shows the emoji autocompletion dropdown', :js do
+ find('#wiki_content').native.send_keys('')
+ fill_in(:wiki_content, with: ':')
+
+ expect(page).to have_selector('.atwho-view')
+ end
+
+ it 'shows the error message' do
+ wiki_page.update(content: 'Update') # rubocop:disable Rails/SaveBang
+
+ click_button('Save changes')
+
+ expect(page).to have_content('Someone edited the page the same time you did.')
+ end
+
+ it 'updates a page' do
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
+
+ it 'cancels editing of a page' do
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq(wiki_page_path(wiki, wiki_page))
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context 'when the page is in a subdir' do
+ let(:page_name) { 'page_name' }
+ let(:page_dir) { "foo/bar/#{page_name}" }
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki, title: page_dir, content: 'Home page') }
+
+ before do
+ visit wiki_page_path(wiki, wiki_page, action: :edit)
+ end
+
+ it 'moves the page to the root folder' do
+ fill_in(:wiki_title, with: "/#{page_name}")
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, page_name))
+ end
+
+ it 'moves the page to other dir' do
+ new_page_dir = "foo1/bar1/#{page_name}"
+
+ fill_in(:wiki_title, with: new_page_dir)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
+ end
+
+ it 'remains in the same place if title has not changed' do
+ original_path = wiki_page_path(wiki, wiki_page)
+
+ fill_in(:wiki_title, with: page_name)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(original_path)
+ end
+
+ it 'can be moved to a different dir with a different name' do
+ new_page_dir = "foo1/bar1/new_page_name"
+
+ fill_in(:wiki_title, with: new_page_dir)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
+ end
+
+ it 'can be renamed and moved to the root folder' do
+ new_name = 'new_page_name'
+
+ fill_in(:wiki_title, with: "/#{new_name}")
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, new_name))
+ end
+
+ it 'squishes the title before creating the page' do
+ new_page_dir = " foo1 / bar1 / #{page_name} "
+
+ fill_in(:wiki_title, with: new_page_dir)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, "foo1/bar1/#{page_name}"))
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context 'when an existing page exceeds the content size limit' do
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki, content: "one\ntwo\nthree") }
+
+ before do
+ stub_application_setting(wiki_page_max_content_bytes: 10)
+
+ visit wiki_page_path(wiki_page.wiki, wiki_page, action: :edit)
+ end
+
+ it 'allows changing the title if the content does not change' do
+ fill_in 'Title', with: 'new title'
+ click_on 'Save changes'
+
+ expect(page).to have_content('Wiki was successfully updated.')
+ end
+
+ it 'shows a validation error when trying to change the content' do
+ fill_in 'Content', with: 'new content'
+ click_on 'Save changes'
+
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content('Content is too long (11 Bytes). The maximum size is 10 Bytes.')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
new file mode 100644
index 00000000000..0330b345a18
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User uses wiki shortcuts' do
+ let(:wiki_page) { create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page') }
+
+ before do
+ sign_in(user)
+ visit wiki_page_path(wiki, wiki_page)
+ end
+
+ it 'Visit edit wiki page using "e" keyboard shortcut', :js do
+ find('body').native.send_key('e')
+
+ expect(find('.wiki-page-title')).to have_content('Edit Page')
+ end
+end
diff --git a/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb b/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
index 5c45e34595f..3b2fda4e05b 100644
--- a/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
@@ -1,11 +1,7 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe 'User views AsciiDoc page with includes', :js do
- let_it_be(:user) { create(:user) }
+RSpec.shared_examples 'User views AsciiDoc page with includes' do
let_it_be(:wiki_content_selector) { '[data-qa-selector=wiki_page_content]' }
- let(:project) { create(:project, :public, :wiki_repo) }
let!(:included_wiki_page) { create_wiki_page('included_page', content: 'Content from the included page')}
let!(:wiki_page) { create_wiki_page('home', content: "Content from the main page.\ninclude::included_page.asciidoc[]") }
@@ -16,16 +12,16 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
format: :asciidoc
}
- create(:wiki_page, wiki: project.wiki, **attrs)
+ create(:wiki_page, wiki: wiki, **attrs)
end
before do
sign_in(user)
end
- context 'when the file being included exists' do
+ context 'when the file being included exists', :js do
it 'includes the file contents' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Content from the main page. Content from the included page')
@@ -34,8 +30,10 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
context 'when there are multiple versions of the wiki pages' do
before do
+ # rubocop:disable Rails/SaveBang
included_wiki_page.update(message: 'updated included file', content: 'Updated content from the included page')
wiki_page.update(message: 'updated wiki page', content: "Updated content from the main page.\ninclude::included_page.asciidoc[]")
+ # rubocop:enable Rails/SaveBang
end
let(:latest_version_id) { wiki_page.versions.first.id }
@@ -43,7 +41,7 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
context 'viewing the latest version' do
it 'includes the latest content' do
- visit(project_wiki_path(project, wiki_page, version_id: latest_version_id))
+ visit(wiki_page_path(wiki, wiki_page, version_id: latest_version_id))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Updated content from the main page. Updated content from the included page')
@@ -53,7 +51,7 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
context 'viewing the original version' do
it 'includes the content from the original version' do
- visit(project_wiki_path(project, wiki_page, version_id: oldest_version_id))
+ visit(wiki_page_path(wiki, wiki_page, version_id: oldest_version_id))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Content from the main page. Content from the included page')
@@ -63,13 +61,13 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
end
end
- context 'when the file being included does not exist' do
+ context 'when the file being included does not exist', :js do
before do
included_wiki_page.delete
end
it 'outputs an error' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Content from the main page. [ERROR: include::included_page.asciidoc[] - unresolved directive]')
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
new file mode 100644
index 00000000000..d7f5b485a82
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+
+RSpec.shared_examples 'User views empty wiki' do
+ let(:element) { page.find('.row.empty-state') }
+ let(:container_name) { wiki.container.class.name.humanize(capitalize: false) }
+ let(:confluence_link) { 'Enable the Confluence Wiki integration' }
+
+ shared_examples 'wiki is not found' do
+ it 'shows an error message' do
+ visit wiki_path(wiki)
+
+ if @current_user
+ expect(page).to have_content('Page Not Found')
+ else
+ expect(page).to have_content('You need to sign in')
+ end
+ end
+ end
+
+ shared_examples 'empty wiki message' do |writable: false, issuable: false, confluence: false|
+ # This mirrors the logic in:
+ # - app/views/shared/empty_states/_wikis.html.haml
+ # - WikiHelper#wiki_empty_state_messages
+ it 'shows the empty state message with the expected elements' do
+ visit wiki_path(wiki)
+
+ if writable
+ expect(element).to have_content("The wiki lets you write documentation for your #{container_name}")
+ else
+ expect(element).to have_content("This #{container_name} has no wiki pages")
+ expect(element).to have_content("You must be a #{container_name} member")
+ end
+
+ if issuable && !writable
+ expect(element).to have_content("improve the wiki for this #{container_name}")
+ expect(element).to have_link("issue tracker", href: project_issues_path(project))
+ expect(element).to have_link("Suggest wiki improvement", href: new_project_issue_path(project))
+ else
+ expect(element).not_to have_content("improve the wiki for this #{container_name}")
+ expect(element).not_to have_link("issue tracker")
+ expect(element).not_to have_link("Suggest wiki improvement")
+ end
+
+ if confluence
+ expect(element).to have_link(confluence_link)
+ else
+ expect(element).not_to have_link(confluence_link)
+ end
+
+ if writable
+ element.click_link 'Create your first page'
+
+ expect(page).to have_button('Create page')
+ else
+ expect(element).not_to have_link('Create your first page')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index e93689af0aa..85eedbf4cc5 100644
--- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -1,14 +1,13 @@
# frozen_string_literal: true
-require 'spec_helper'
+# Requires a context containing:
+# wiki
+# user
-RSpec.describe 'User views a wiki page' do
+RSpec.shared_examples 'User views a wiki page' do
include WikiHelpers
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
let(:path) { 'image.png' }
- let(:wiki) { project.wiki }
let(:wiki_page) do
create(:wiki_page,
wiki: wiki,
@@ -16,13 +15,12 @@ RSpec.describe 'User views a wiki page' do
end
before do
- project.add_maintainer(user)
sign_in(user)
end
context 'when wiki is empty', :js do
before do
- visit project_wikis_path(project)
+ visit wiki_path(wiki)
wait_for_svg_to_be_loaded
@@ -57,7 +55,7 @@ RSpec.describe 'User views a wiki page' do
first(:link, text: 'three').click
- expect(find('.nav-text')).to have_content('three')
+ expect(find('[data-testid="wiki_page_title"]')).to have_content('three')
click_on('Edit')
@@ -83,7 +81,7 @@ RSpec.describe 'User views a wiki page' do
context 'when a page does not have history' do
before do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
end
it 'shows all the pages' do
@@ -92,7 +90,7 @@ RSpec.describe 'User views a wiki page' do
end
context 'shows a file stored in a page' do
- let(:path) { upload_file_to_wiki(project, user, 'dk.png') }
+ let(:path) { upload_file_to_wiki(wiki, user, 'dk.png') }
it do
expect(page).to have_xpath("//img[@data-src='#{wiki.wiki_base_path}/#{path}']")
@@ -121,9 +119,9 @@ RSpec.describe 'User views a wiki page' do
end
it 'shows the page history' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
- expect(page).to have_selector('a.btn', text: 'Edit')
+ expect(page).to have_selector('[data-testid="wiki_edit_button"]')
click_on('Page history')
@@ -133,9 +131,9 @@ RSpec.describe 'User views a wiki page' do
end
it 'does not show the "Edit" button' do
- visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id))
+ visit(wiki_page_path(wiki, wiki_page, version_id: wiki_page.versions.last.id))
- expect(page).not_to have_selector('a.btn', text: 'Edit')
+ expect(page).not_to have_selector('[data-testid="wiki_edit_button"]')
end
context 'show the diff' do
@@ -150,7 +148,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'links to the correct diffs' do
- visit project_wiki_history_path(project, wiki_page)
+ visit wiki_page_path(wiki, wiki_page, action: :history)
commit1 = wiki.commit('HEAD^')
commit2 = wiki.commit
@@ -208,9 +206,9 @@ RSpec.describe 'User views a wiki page' do
end
it 'preserves the special characters' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
- expect(page).to have_css('.wiki-page-title', text: title)
+ expect(page).to have_css('[data-testid="wiki_page_title"]', text: title)
expect(page).to have_css('.wiki-pages li', text: title)
end
end
@@ -223,9 +221,9 @@ RSpec.describe 'User views a wiki page' do
end
it 'safely displays the page' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
- expect(page).to have_css('.wiki-page-title', text: title)
+ expect(page).to have_selector('[data-testid="wiki_page_title"]', text: title)
expect(page).to have_content('foo bar')
end
end
@@ -236,7 +234,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'safely displays the message' do
- visit(project_wiki_history_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page, action: :history))
expect(page).to have_content('<script>alert(true)<script>')
end
@@ -248,11 +246,11 @@ RSpec.describe 'User views a wiki page' do
before do
allow(Gitlab::EncodingHelper).to receive(:encode!).and_return(content)
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
end
it 'does not show "Edit" button' do
- expect(page).not_to have_selector('a.btn', text: 'Edit')
+ expect(page).not_to have_selector('[data-testid="wiki_edit_button"]')
end
it 'shows error' do
@@ -263,7 +261,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'opens a default wiki page', :js do
- visit project_path(project)
+ visit wiki.container.web_url
find('.shortcuts-wiki').click
diff --git a/spec/features/projects/wiki/user_views_wiki_pages_spec.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
index 4f29ae0cc8a..314c2074eee 100644
--- a/spec/features/projects/wiki/user_views_wiki_pages_spec.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
@@ -1,23 +1,22 @@
# frozen_string_literal: true
-require 'spec_helper'
+# Requires a context containing:
+# wiki
+# user
-RSpec.describe 'User views wiki pages' do
+RSpec.shared_examples 'User views wiki pages' do
include WikiHelpers
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
-
let!(:wiki_page1) do
- create(:wiki_page, wiki: project.wiki, title: '3 home', content: '3')
+ create(:wiki_page, wiki: wiki, title: '3 home', content: '3')
end
let!(:wiki_page2) do
- create(:wiki_page, wiki: project.wiki, title: '1 home', content: '1')
+ create(:wiki_page, wiki: wiki, title: '1 home', content: '1')
end
let!(:wiki_page3) do
- create(:wiki_page, wiki: project.wiki, title: '2 home', content: '2')
+ create(:wiki_page, wiki: wiki, title: '2 home', content: '2')
end
let(:pages) do
@@ -25,9 +24,8 @@ RSpec.describe 'User views wiki pages' do
end
before do
- project.add_maintainer(user)
sign_in(user)
- visit(project_wikis_pages_path(project))
+ visit(wiki_path(wiki, action: :pages))
end
context 'ordered by title' do
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
new file mode 100644
index 00000000000..a7ba7a8ad07
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User views wiki sidebar' do
+ include WikiHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when there are some existing pages' do
+ before do
+ create(:wiki_page, wiki: wiki, title: 'home', content: 'home')
+ create(:wiki_page, wiki: wiki, title: 'another', content: 'another')
+ end
+
+ it 'renders a default sidebar when there is no customized sidebar' do
+ visit wiki_path(wiki)
+
+ expect(page).to have_content('another')
+ expect(page).not_to have_link('View All Pages')
+ end
+
+ context 'when there is a customized sidebar' do
+ before do
+ create(:wiki_page, wiki: wiki, title: '_sidebar', content: 'My customized sidebar')
+ end
+
+ it 'renders my customized sidebar instead of the default one' do
+ visit wiki_path(wiki)
+
+ expect(page).to have_content('My customized sidebar')
+ expect(page).not_to have_content('Another')
+ end
+ end
+ end
+
+ context 'when there are 15 existing pages' do
+ before do
+ (1..5).each { |i| create(:wiki_page, wiki: wiki, title: "my page #{i}") }
+ (6..10).each { |i| create(:wiki_page, wiki: wiki, title: "parent/my page #{i}") }
+ (11..15).each { |i| create(:wiki_page, wiki: wiki, title: "grandparent/parent/my page #{i}") }
+ end
+
+ it 'shows all pages in the sidebar' do
+ visit wiki_path(wiki)
+
+ (1..15).each { |i| expect(page).to have_content("my page #{i}") }
+ expect(page).not_to have_link('View All Pages')
+ end
+
+ context 'when there are more than 15 existing pages' do
+ before do
+ create(:wiki_page, wiki: wiki, title: 'my page 16')
+ end
+
+ it 'shows the first 15 pages in the sidebar' do
+ visit wiki_path(wiki)
+
+ expect(page).to have_text('my page', count: 15)
+ expect(page).to have_link('View All Pages')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/boards_create_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/boards_create_shared_examples.rb
new file mode 100644
index 00000000000..ec64519cd9c
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/boards_create_shared_examples.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'boards create mutation' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user, reload: true) { create(:user) }
+ let(:name) { 'board name' }
+ let(:mutation) { graphql_mutation(:create_board, params) }
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ def mutation_response
+ graphql_mutation_response(:create_board)
+ end
+
+ context 'when the user does not have permission' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not create the board' do
+ expect { subject }.not_to change { Board.count }
+ end
+ end
+
+ context 'when the user has permission' do
+ before do
+ parent.add_maintainer(current_user)
+ end
+
+ context 'when the parent (project_path or group_path) param is given' do
+ context 'when everything is ok' do
+ it 'creates the board' do
+ expect { subject }.to change { Board.count }.from(0).to(1)
+ end
+
+ it 'returns the created board' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('board')
+ expect(mutation_response['board']['name']).to eq(name)
+ end
+ end
+
+ context 'when the Boards::CreateService returns an error response' do
+ before do
+ allow_next_instance_of(Boards::CreateService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'There was an error.'))
+ end
+ end
+
+ it 'does not create a board' do
+ expect { subject }.not_to change { Board.count }
+ end
+
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('board')
+ expect(mutation_response['board']).to be_nil
+ expect(mutation_response['errors'].first).to eq('There was an error.')
+ end
+ end
+ end
+
+ context 'when neither project_path nor group_path param is given' do
+ let(:params) { { name: name } }
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['group_path or project_path arguments are required']
+
+ it 'does not create the board' do
+ expect { subject }.not_to change { Board.count }
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb b/spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb
new file mode 100644
index 00000000000..54b3f84a6e6
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/spammable_mutation_fields_examples.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'spam flag is present' do
+ specify :aggregate_failures do
+ subject
+
+ expect(mutation_response).to have_key('spam')
+ expect(mutation_response['spam']).to be_falsey
+ end
+end
+
+RSpec.shared_examples 'can raise spam flag' do
+ it 'spam parameters are passed to the service' do
+ expect(service).to receive(:new).with(anything, anything, hash_including(api: true, request: instance_of(ActionDispatch::Request)))
+
+ subject
+ end
+
+ context 'when the snippet is detected as spam' do
+ it 'raises spam flag' do
+ allow_next_instance_of(service) do |instance|
+ allow(instance).to receive(:spam_check) do |snippet, user, _|
+ snippet.spam!
+ end
+ end
+
+ subject
+
+ expect(mutation_response['spam']).to be true
+ expect(mutation_response['errors']).to include("Your snippet has been recognized as spam and has been discarded.")
+ end
+ end
+
+ context 'when :snippet_spam flag is disabled' do
+ before do
+ stub_feature_flags(snippet_spam: false)
+ end
+
+ it 'request parameter is not passed to the service' do
+ expect(service).to receive(:new).with(anything, anything, hash_not_including(request: instance_of(ActionDispatch::Request)))
+
+ subject
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb b/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
index 522211340ea..24c8a247c93 100644
--- a/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
@@ -52,11 +52,15 @@ RSpec.shared_examples 'a Note mutation when the given resource id is not for a N
it_behaves_like 'a Note mutation that does not create a Note'
- it_behaves_like 'a mutation that returns top-level errors', errors: ['Cannot add notes to this resource']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/ does not represent an instance of Noteable/) }
+ end
end
RSpec.shared_examples 'a Note mutation when the given resource id is not for a Note' do
let(:note) { create(:issue) }
- it_behaves_like 'a mutation that returns top-level errors', errors: ['Resource is not a note']
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { include(/does not represent an instance of Note/) }
+ end
end
diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
index e93077c42e1..7707e79386c 100644
--- a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
-RSpec.shared_examples 'resource mentions migration' do |migration_class, resource_class|
+RSpec.shared_examples 'resource mentions migration' do |migration_class, resource_class_name|
it 'migrates resource mentions' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
+ resource_class = "#{Gitlab::BackgroundMigration::UserMentions::Models}::#{resource_class_name}".constantize
expect do
- subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
end.to change { user_mentions.count }.by(1)
user_mention = user_mentions.last
@@ -16,23 +17,23 @@ RSpec.shared_examples 'resource mentions migration' do |migration_class, resourc
# check that performing the same job twice does not fail and does not change counts
expect do
- subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
-RSpec.shared_examples 'resource notes mentions migration' do |migration_class, resource_class|
+RSpec.shared_examples 'resource notes mentions migration' do |migration_class, resource_class_name|
it 'migrates mentions from note' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
# there are 5 notes for each noteable_type, but two do not have mentions and
# another one's noteable_id points to an inexistent resource
- expect(notes.where(noteable_type: resource_class.to_s).count).to eq 5
+ expect(notes.where(noteable_type: resource_class_name).count).to eq 5
expect(user_mentions.count).to eq 0
expect do
- subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
end.to change { user_mentions.count }.by(2)
# check that the user_mention for regular note is created
@@ -51,7 +52,7 @@ RSpec.shared_examples 'resource notes mentions migration' do |migration_class, r
# check that performing the same job twice does not fail and does not change counts
expect do
- subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
@@ -83,24 +84,25 @@ RSpec.shared_examples 'schedules resource mentions migration' do |resource_class
end
end
-RSpec.shared_examples 'resource migration not run' do |migration_class, resource_class|
+RSpec.shared_examples 'resource migration not run' do |migration_class, resource_class_name|
it 'does not migrate mentions' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
+ resource_class = "#{Gitlab::BackgroundMigration::UserMentions::Models}::#{resource_class_name}".constantize
expect do
- subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
-RSpec.shared_examples 'resource notes migration not run' do |migration_class, resource_class|
+RSpec.shared_examples 'resource notes migration not run' do |migration_class, resource_class_name|
it 'does not migrate mentions' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
expect do
- subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index db5e9461f3f..0df1af3b10a 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -227,7 +227,7 @@ RSpec.shared_examples 'common trace features' do
let(:token) { 'my_secret_token' }
before do
- build.project.update(runners_token: token)
+ build.project.update!(runners_token: token)
trace.append(token, 0)
end
@@ -240,7 +240,7 @@ RSpec.shared_examples 'common trace features' do
let(:token) { 'my_secret_token' }
before do
- build.update(token: token)
+ build.update!(token: token)
trace.append(token, 0)
end
@@ -531,7 +531,7 @@ RSpec.shared_examples 'trace with disabled live trace feature' do
context "when erase old trace with 'save'" do
before do
build.send(:write_attribute, :trace, nil)
- build.save
+ build.save # rubocop:disable Rails/SaveBang
end
it 'old trace is not deleted' do
diff --git a/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
index e43ce936b90..469c0c287b1 100644
--- a/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'diff statistics' do |test_include_stats_flag: true|
- subject { described_class.new(diffable, collection_default_args) }
+ subject { described_class.new(diffable, **collection_default_args) }
def stub_stats_find_by_path(path, stats_mock)
expect_next_instance_of(Gitlab::Git::DiffStatsCollection) do |collection|
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
new file mode 100644
index 00000000000..33061f17bde
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+# required context:
+# - importable: group or project
+# - relation_hash: a note relation that's being imported
+# - created_object: the object created with the relation factory
+RSpec.shared_examples 'Notes user references' do
+ let(:relation_sym) { :notes }
+ let(:mapped_user) { create(:user) }
+ let(:exported_member) do
+ {
+ 'id' => 111,
+ 'access_level' => 30,
+ 'source_id' => 1,
+ 'source_type' => importable.class.name == 'Project' ? 'Project' : 'Namespace',
+ 'user_id' => 3,
+ 'notification_level' => 3,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'user' => {
+ 'id' => 999,
+ 'email' => mapped_user.email,
+ 'username' => mapped_user.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member].compact,
+ user: importer_user,
+ importable: importable
+ )
+ end
+
+ shared_examples 'sets the note author to the importer user' do
+ it { expect(created_object.author).to eq(importer_user) }
+ end
+
+ shared_examples 'sets the note author to the mapped user' do
+ it { expect(created_object.author).to eq(mapped_user) }
+ end
+
+ shared_examples 'does not add original autor note' do
+ it { expect(created_object.note).not_to include('*By Administrator') }
+ end
+
+ shared_examples 'adds original autor note' do
+ it { expect(created_object.note).to include('*By Administrator') }
+ end
+
+ context 'when the importer is admin' do
+ let(:importer_user) { create(:admin) }
+
+ context 'and the note author is not mapped' do
+ let(:exported_member) { nil }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+
+ context 'and the note author is the importer user' do
+ let(:mapped_user) { importer_user }
+
+ include_examples 'sets the note author to the mapped user'
+
+ include_examples 'does not add original autor note'
+ end
+
+ context 'and the note author exists in the target instance' do
+ let(:mapped_user) { create(:user) }
+
+ include_examples 'sets the note author to the mapped user'
+
+ include_examples 'does not add original autor note'
+ end
+ end
+
+ context 'when the importer is not admin' do
+ let(:importer_user) { create(:user) }
+
+ context 'and the note author is not mapped' do
+ let(:exported_member) { nil }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+
+ context 'and the note author is the importer user' do
+ let(:mapped_user) { importer_user }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+
+ context 'and the note author exists in the target instance' do
+ let(:mapped_user) { create(:user) }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/repository_size_checker_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/repository_size_checker_shared_examples.rb
new file mode 100644
index 00000000000..bb909ffe82a
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/repository_size_checker_shared_examples.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'checker size above limit' do
+ context 'when size is above the limit' do
+ let(:current_size) { 100 }
+
+ it 'returns true' do
+ expect(subject.above_size_limit?).to eq(true)
+ end
+ end
+end
+
+RSpec.shared_examples 'checker size not over limit' do
+ it 'returns false when not over the limit' do
+ expect(subject.above_size_limit?).to eq(false)
+ end
+end
+
+RSpec.shared_examples 'checker size exceeded' do
+ context 'when current size is below or equal to the limit' do
+ let(:current_size) { 50 }
+
+ it 'returns zero' do
+ expect(subject.exceeded_size).to eq(0)
+ end
+ end
+
+ context 'when current size is over the limit' do
+ let(:current_size) { 51 }
+
+ it 'returns zero' do
+ expect(subject.exceeded_size).to eq(1.megabytes)
+ end
+ end
+
+ context 'when change size will be over the limit' do
+ let(:current_size) { 50 }
+
+ it 'returns zero' do
+ expect(subject.exceeded_size(1.megabytes)).to eq(1.megabytes)
+ end
+ end
+
+ context 'when change size will not be over the limit' do
+ let(:current_size) { 49 }
+
+ it 'returns zero' do
+ expect(subject.exceeded_size(1.megabytes)).to eq(0)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search/recent_items.rb b/spec/support/shared_examples/lib/gitlab/search/recent_items.rb
index f96ff4b101e..b3b33e434b9 100644
--- a/spec/support/shared_examples/lib/gitlab/search/recent_items.rb
+++ b/spec/support/shared_examples/lib/gitlab/search/recent_items.rb
@@ -1,12 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
-
RSpec.shared_examples 'search recent items' do
let_it_be(:user) { create(:user) }
- let_it_be(:recent_items) { described_class.new(user: user, items_limit: 5) }
- let(:item) { create_item(content: 'hello world 1', project: project) }
- let(:project) { create(:project, :public) }
+ let_it_be(:recent_items) { described_class.new(user: user) }
+ let(:item) { create_item(content: 'hello world 1', parent: parent) }
+ let(:parent) { create(parent_type, :public) }
describe '#log_view', :clean_gitlab_redis_shared_state do
it 'adds the item to the recent items' do
@@ -18,13 +17,15 @@ RSpec.shared_examples 'search recent items' do
end
it 'removes an item when it exceeds the size items_limit' do
- (1..6).each do |i|
- recent_items.log_view(create_item(content: "item #{i}", project: project))
+ recent_items = described_class.new(user: user, items_limit: 3)
+
+ 4.times do |i|
+ recent_items.log_view(create_item(content: "item #{i}", parent: parent))
end
results = recent_items.search('item')
- expect(results.map(&:title)).to contain_exactly('item 6', 'item 5', 'item 4', 'item 3', 'item 2')
+ expect(results.map(&:title)).to contain_exactly('item 3', 'item 2', 'item 1')
end
it 'expires the items after expires_after' do
@@ -39,7 +40,7 @@ RSpec.shared_examples 'search recent items' do
it 'does not include results logged for another user' do
another_user = create(:user)
- another_item = create_item(content: 'hello world 2', project: project)
+ another_item = create_item(content: 'hello world 2', parent: parent)
described_class.new(user: another_user).log_view(another_item)
recent_items.log_view(item)
@@ -50,11 +51,11 @@ RSpec.shared_examples 'search recent items' do
end
describe '#search', :clean_gitlab_redis_shared_state do
- let(:item1) { create_item(content: "matching item 1", project: project) }
- let(:item2) { create_item(content: "matching item 2", project: project) }
- let(:item3) { create_item(content: "matching item 3", project: project) }
- let(:non_matching_item) { create_item(content: "different item", project: project) }
- let!(:non_viewed_item) { create_item(content: "matching but not viewed item", project: project) }
+ let(:item1) { create_item(content: "matching item 1", parent: parent) }
+ let(:item2) { create_item(content: "matching item 2", parent: parent) }
+ let(:item3) { create_item(content: "matching item 3", parent: parent) }
+ let(:non_matching_item) { create_item(content: "different item", parent: parent) }
+ let!(:non_viewed_item) { create_item(content: "matching but not viewed item", parent: parent) }
before do
recent_items.log_view(item1)
@@ -74,14 +75,24 @@ RSpec.shared_examples 'search recent items' do
end
it 'does not leak items you no longer have access to' do
- private_project = create(:project, :public, namespace: create(:group))
- private_item = create_item(content: 'matching item title', project: private_project)
+ private_parent = create(parent_type, :public)
+ private_item = create_item(content: 'matching item title', parent: private_parent)
recent_items.log_view(private_item)
- private_project.update!(visibility_level: Project::PRIVATE)
+ private_parent.update!(visibility_level: ::Gitlab::VisibilityLevel::PRIVATE)
expect(recent_items.search('matching')).not_to include(private_item)
end
+
+ it "limits results to #{Gitlab::Search::RecentItems::SEARCH_LIMIT} items" do
+ (Gitlab::Search::RecentItems::SEARCH_LIMIT + 1).times do |i|
+ recent_items.log_view(create_item(content: "item #{i}", parent: parent))
+ end
+
+ results = recent_items.search('item')
+
+ expect(results.count).to eq(Gitlab::Search::RecentItems::SEARCH_LIMIT)
+ end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb
new file mode 100644
index 00000000000..d0bef2ad730
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results filtered by confidential' do
+ context 'filter not provided (all behavior)' do
+ let(:filters) { {} }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+
+ context 'confidential filter' do
+ let(:filters) { { confidential: true } }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns only confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).not_to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+
+ context 'not confidential filter' do
+ let(:filters) { { confidential: false } }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).not_to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
new file mode 100644
index 00000000000..765279a78fe
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results sorted' do
+ context 'sort: newest' do
+ let(:sort) { 'newest' }
+
+ it 'sorts results by created_at' do
+ expect(results.objects(scope).map(&:id)).to eq([new_result.id, old_result.id, very_old_result.id])
+ end
+ end
+
+ context 'sort: oldest' do
+ let(:sort) { 'oldest' }
+
+ it 'sorts results by created_at' do
+ expect(results.objects(scope).map(&:id)).to eq([very_old_result.id, old_result.id, new_result.id])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search_issue_state_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_state_filter_shared_examples.rb
index e80ec516407..e80ec516407 100644
--- a/spec/support/shared_examples/lib/gitlab/search_issue_state_filter_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/search_state_filter_shared_examples.rb
diff --git a/spec/support/shared_examples/mailers/notify_shared_examples.rb b/spec/support/shared_examples/mailers/notify_shared_examples.rb
index 1f5803b90a0..7ce7b2161f6 100644
--- a/spec/support/shared_examples/mailers/notify_shared_examples.rb
+++ b/spec/support/shared_examples/mailers/notify_shared_examples.rb
@@ -267,3 +267,9 @@ RSpec.shared_examples 'appearance header and footer not enabled' do
end
end
end
+
+RSpec.shared_examples 'no email is sent' do
+ it 'does not send an email' do
+ expect(subject.message).to be_a_kind_of(ActionMailer::Base::NullMail)
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
index f37ef3533c3..826ee453919 100644
--- a/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/has_repository_shared_examples.rb
@@ -6,6 +6,14 @@ RSpec.shared_examples 'model with repository' do
let(:expected_full_path) { raise NotImplementedError }
let(:expected_web_url_path) { expected_full_path }
let(:expected_repo_url_path) { expected_full_path }
+ let(:expected_lfs_enabled) { false }
+
+ it 'container class includes HasRepository' do
+ # NOTE: This is not enforced at runtime, since we also need to support Geo::DeletedProject
+ expect(described_class).to include_module(HasRepository)
+ expect(container).to be_kind_of(HasRepository)
+ expect(stubbed_container).to be_kind_of(HasRepository)
+ end
describe '#commits_by' do
let(:commits) { container.repository.commits('HEAD', limit: 3).commits }
@@ -74,6 +82,10 @@ RSpec.shared_examples 'model with repository' do
it 'returns valid repo' do
expect(container.repository).to be_kind_of(Repository)
end
+
+ it 'uses the same container' do
+ expect(container.repository.container).to be(container)
+ end
end
describe '#storage' do
@@ -88,6 +100,16 @@ RSpec.shared_examples 'model with repository' do
end
end
+ describe '#lfs_enabled?' do
+ before do
+ stub_lfs_setting(enabled: true)
+ end
+
+ it 'returns the expected value' do
+ expect(container.lfs_enabled?).to eq(expected_lfs_enabled)
+ end
+ end
+
describe '#empty_repo?' do
context 'when the repo does not exist' do
it 'returns true' do
diff --git a/spec/support/shared_examples/models/concerns/shardable_shared_examples.rb b/spec/support/shared_examples/models/concerns/shardable_shared_examples.rb
new file mode 100644
index 00000000000..fa929d5b791
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/shardable_shared_examples.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'shardable scopes' do
+ let_it_be(:secondary_shard) { create(:shard, name: 'test_second_storage') }
+
+ before do
+ record_2.update!(shard: secondary_shard)
+ end
+
+ describe '.for_repository_storage' do
+ it 'returns the objects for a given repository storage' do
+ expect(described_class.for_repository_storage('default')).to eq([record_1])
+ end
+ end
+
+ describe '.excluding_repository_storage' do
+ it 'returns the objects excluding the given repository storage' do
+ expect(described_class.excluding_repository_storage('default')).to eq([record_2])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
index d199bae4170..f91e4bd8cf7 100644
--- a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
@@ -9,6 +9,11 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
let(:user) { create(:user) }
let(:timebox_table_name) { timebox_type.to_s.pluralize.to_sym }
+ # Values implementions can override
+ let(:mid_point) { Time.now.utc.to_date }
+ let(:open_on_left) { nil }
+ let(:open_on_right) { nil }
+
describe 'modules' do
context 'with a project' do
it_behaves_like 'AtomicInternalId' do
@@ -240,4 +245,85 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
expect(timebox.to_ability_name).to eq(timebox_type.to_s)
end
end
+
+ describe '.within_timeframe' do
+ let(:factory) { timebox_type }
+ let(:min_date) { mid_point - 10.days }
+ let(:max_date) { mid_point + 10.days }
+
+ def box(from, to)
+ create(factory, *timebox_args,
+ start_date: from || open_on_left,
+ due_date: to || open_on_right)
+ end
+
+ it 'can find overlapping timeboxes' do
+ fully_open = box(nil, nil)
+ # ----| ................ # Not overlapping
+ non_overlapping_open_on_left = box(nil, min_date - 1.day)
+ # |--| ................ # Not overlapping
+ non_overlapping_closed_on_left = box(min_date - 2.days, min_date - 1.day)
+ # ------|............... # Overlapping
+ overlapping_open_on_left_just = box(nil, min_date)
+ # -----------------------| # Overlapping
+ overlapping_open_on_left_fully = box(nil, max_date + 1.day)
+ # ---------|............ # Overlapping
+ overlapping_open_on_left_partial = box(nil, min_date + 1.day)
+ # |-----|............ # Overlapping
+ overlapping_closed_partial = box(min_date - 1.day, min_date + 1.day)
+ # |--------------| # Overlapping
+ exact_match = box(min_date, max_date)
+ # |--------------------| # Overlapping
+ larger = box(min_date - 1.day, max_date + 1.day)
+ # ...|-----|...... # Overlapping
+ smaller = box(min_date + 1.day, max_date - 1.day)
+ # .........|-----| # Overlapping
+ at_end = box(max_date - 1.day, max_date)
+ # .........|--------- # Overlapping
+ at_end_open = box(max_date - 1.day, nil)
+ # |-------------------- # Overlapping
+ cover_from_left = box(min_date - 1.day, nil)
+ # .........|--------| # Overlapping
+ cover_from_middle_closed = box(max_date - 1.day, max_date + 1.day)
+ # ...............|--| # Overlapping
+ overlapping_at_end_just = box(max_date, max_date + 1.day)
+ # ............... |-| # Not Overlapping
+ not_overlapping_at_right_closed = box(max_date + 1.day, max_date + 2.days)
+ # ............... |-- # Not Overlapping
+ not_overlapping_at_right_open = box(max_date + 1.day, nil)
+
+ matches = described_class.within_timeframe(min_date, max_date)
+
+ expect(matches).to include(
+ overlapping_open_on_left_just,
+ overlapping_open_on_left_fully,
+ overlapping_open_on_left_partial,
+ overlapping_closed_partial,
+ exact_match,
+ larger,
+ smaller,
+ at_end,
+ at_end_open,
+ cover_from_left,
+ cover_from_middle_closed,
+ overlapping_at_end_just
+ )
+
+ expect(matches).not_to include(
+ non_overlapping_open_on_left,
+ non_overlapping_closed_on_left,
+ not_overlapping_at_right_closed,
+ not_overlapping_at_right_open
+ )
+
+ # Whether we match the 'fully-open' range depends on whether
+ # it is in fact open (i.e. whether the class allows infinite
+ # ranges)
+ if open_on_left.nil? && open_on_right.nil?
+ expect(matches).not_to include(fully_open)
+ else
+ expect(matches).to include(fully_open)
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/mentionable_shared_examples.rb b/spec/support/shared_examples/models/mentionable_shared_examples.rb
index 94c52bdaaa6..0ee0b7e6d88 100644
--- a/spec/support/shared_examples/models/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/models/mentionable_shared_examples.rb
@@ -207,29 +207,8 @@ RSpec.shared_examples 'an editable mentionable' do
end
RSpec.shared_examples 'mentions in description' do |mentionable_type|
- describe 'when store_mentioned_users_to_db feature disabled' do
+ describe 'when storing user mentions' do
before do
- stub_feature_flags(store_mentioned_users_to_db: false)
- mentionable.store_mentions!
- end
-
- context 'when mentionable description contains mentions' do
- let(:user) { create(:user) }
- let(:mentionable) { create(mentionable_type, description: "#{user.to_reference} some description") }
-
- it 'stores no mentions' do
- expect(mentionable.user_mentions.count).to eq 0
- end
-
- it 'renders description_html correctly' do
- expect(mentionable.description_html).to include("<a href=\"/#{user.username}\" data-user=\"#{user.id}\"")
- end
- end
- end
-
- describe 'when store_mentioned_users_to_db feature enabled' do
- before do
- stub_feature_flags(store_mentioned_users_to_db: true)
mentionable.store_mentions!
end
diff --git a/spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb b/spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb
index 7701ab42007..66cd8d1df12 100644
--- a/spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb
+++ b/spec/support/shared_examples/models/project_latest_successful_build_for_shared_examples.rb
@@ -60,4 +60,20 @@ RSpec.shared_examples 'latest successful build for sha or ref' do
expect(subject).to be_nil
end
end
+
+ context 'with build belonging to a child pipeline' do
+ let(:child_pipeline) { create_pipeline(project) }
+ let(:parent_bridge) { create(:ci_bridge, pipeline: pipeline, project: pipeline.project) }
+ let!(:pipeline_source) { create(:ci_sources_pipeline, source_job: parent_bridge, pipeline: child_pipeline)}
+ let!(:child_build) { create_build(child_pipeline, 'child-build') }
+ let(:build_name) { child_build.name }
+
+ before do
+ child_pipeline.update!(source: :parent_pipeline)
+ end
+
+ it 'returns the child build' do
+ expect(subject).to eq(child_build)
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
index d1437244082..b8d12a6da59 100644
--- a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
@@ -31,6 +31,41 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
end
+ def as_item(item)
+ item # Override to perform a transformation, if necessary
+ end
+
+ def as_items(items)
+ items.map { |item| as_item(item) }
+ end
+
+ describe '#scoped_items' do
+ it 'includes all items with the same scope' do
+ scope = as_items([item1, item2, new_item, create_item])
+ irrelevant = create(factory, {}) # This should not share the scope
+ context = RelativePositioning.mover.context(item1)
+
+ same_scope = as_items(context.scoped_items)
+
+ expect(same_scope).to include(*scope)
+ expect(same_scope).not_to include(as_item(irrelevant))
+ end
+ end
+
+ describe '#relative_siblings' do
+ it 'includes all items with the same scope, except self' do
+ scope = as_items([item2, new_item, create_item])
+ irrelevant = create(factory, {}) # This should not share the scope
+ context = RelativePositioning.mover.context(item1)
+
+ siblings = as_items(context.relative_siblings)
+
+ expect(siblings).to include(*scope)
+ expect(siblings).not_to include(as_item(item1))
+ expect(siblings).not_to include(as_item(irrelevant))
+ end
+ end
+
describe '.move_nulls_to_end' do
let(:item3) { create_item }
let(:sibling_query) { item1.class.relative_positioning_query_base(item1) }
@@ -47,7 +82,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(item1.relative_position).to be(1000)
expect(sibling_query.where(relative_position: nil)).not_to exist
- expect(sibling_query.reorder(:relative_position, :id)).to eq([item1, item2, item3])
+ expect(as_items(sibling_query.reorder(:relative_position, :id))).to eq(as_items([item1, item2, item3]))
end
it 'preserves relative position' do
@@ -117,19 +152,36 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(bunch.map(&:relative_position)).to all(be < nils.map(&:relative_position).min)
end
+ it 'manages to move nulls found in the relative scope' do
+ nils = create_items_with_positions([nil] * 4)
+
+ described_class.move_nulls_to_end(sibling_query.to_a)
+ positions = nils.map { |item| item.reset.relative_position }
+
+ expect(positions).to all(be_present)
+ expect(positions).to all(be_valid_position)
+ end
+
+ it 'can move many nulls' do
+ nils = create_items_with_positions([nil] * 101)
+
+ described_class.move_nulls_to_end(nils)
+
+ expect(nils.map(&:relative_position)).to all(be_valid_position)
+ end
+
it 'does not have an N+1 issue' do
create_items_with_positions(10..12)
-
- a, b, c, d, e, f = create_items_with_positions([nil, nil, nil, nil, nil, nil])
+ a, b, c, d, e, f, *xs = create_items_with_positions([nil] * 10)
baseline = ActiveRecord::QueryRecorder.new do
- described_class.move_nulls_to_end([a, e])
+ described_class.move_nulls_to_end([a, b])
end
- expect { described_class.move_nulls_to_end([b, c, d]) }
+ expect { described_class.move_nulls_to_end([c, d, e, f]) }
.not_to exceed_query_limit(baseline)
- expect { described_class.move_nulls_to_end([f]) }
+ expect { described_class.move_nulls_to_end(xs) }
.not_to exceed_query_limit(baseline.count)
end
end
@@ -149,7 +201,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(items.sort_by(&:relative_position)).to eq(items)
expect(sibling_query.where(relative_position: nil)).not_to exist
- expect(sibling_query.reorder(:relative_position, :id)).to eq(items)
+ expect(as_items(sibling_query.reorder(:relative_position, :id))).to eq(as_items(items))
expect(item3.relative_position).to be(1000)
end
@@ -652,3 +704,119 @@ RSpec.shared_examples 'a class that supports relative positioning' do
(RelativePositioning::MIN_POSITION..).take(size)
end
end
+
+RSpec.shared_examples 'no-op relative positioning' do
+ def create_item(**params)
+ create(factory, params.merge(default_params))
+ end
+
+ let_it_be(:item1) { create_item }
+ let_it_be(:item2) { create_item }
+ let_it_be(:new_item) { create_item(relative_position: nil) }
+
+ def any_relative_positions
+ new_item.class.reorder(:relative_position, :id).pluck(:id, :relative_position)
+ end
+
+ shared_examples 'a no-op method' do
+ it 'does not raise errors' do
+ expect { perform }.not_to raise_error
+ end
+
+ it 'does not perform any DB queries' do
+ expect { perform }.not_to exceed_query_limit(0)
+ end
+
+ it 'does not change any relative_position' do
+ expect { perform }.not_to change { any_relative_positions }
+ end
+ end
+
+ describe '.scoped_items' do
+ subject { RelativePositioning.mover.context(item1).scoped_items }
+
+ it 'is empty' do
+ expect(subject).to be_empty
+ end
+ end
+
+ describe '.relative_siblings' do
+ subject { RelativePositioning.mover.context(item1).relative_siblings }
+
+ it 'is empty' do
+ expect(subject).to be_empty
+ end
+ end
+
+ describe '.move_nulls_to_end' do
+ subject { item1.class.move_nulls_to_end([new_item, item1]) }
+
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject
+ end
+ end
+
+ it 'does not move any items' do
+ expect(subject).to eq(0)
+ end
+ end
+
+ describe '.move_nulls_to_start' do
+ subject { item1.class.move_nulls_to_start([new_item, item1]) }
+
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject
+ end
+ end
+
+ it 'does not move any items' do
+ expect(subject).to eq(0)
+ end
+ end
+
+ describe 'instance methods' do
+ subject { new_item }
+
+ describe '#move_to_start' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_to_start
+ end
+ end
+ end
+
+ describe '#move_to_end' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_to_end
+ end
+ end
+ end
+
+ describe '#move_between' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_between(item1, item2)
+ end
+ end
+ end
+
+ describe '#move_before' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_before(item1)
+ end
+ end
+ end
+
+ describe '#move_after' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_after(item1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb b/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
index 07552b62cdd..5198508d48b 100644
--- a/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
+++ b/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
@@ -73,3 +73,13 @@ RSpec.shared_examples 'timebox resource event actions' do
end
end
end
+
+RSpec.shared_examples 'timebox resource tracks issue metrics' do |type|
+ describe '#usage_metrics' do
+ it 'tracks usage' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:"track_issue_#{type}_changed_action")
+
+ create(described_class.name.underscore.to_sym, issue: create(:issue))
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/snippet_shared_examples.rb b/spec/support/shared_examples/models/snippet_shared_examples.rb
new file mode 100644
index 00000000000..a8fdf9bb81e
--- /dev/null
+++ b/spec/support/shared_examples/models/snippet_shared_examples.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'size checker for snippet' do |action|
+ it 'sets up size checker', :aggregate_failures do
+ expect(checker.current_size).to eq(current_size.megabytes)
+ expect(checker.limit).to eq(Gitlab::CurrentSettings.snippet_size_limit)
+ expect(checker.enabled?).to eq(true)
+ expect(checker.instance_variable_get(:@namespace)).to eq(namespace)
+ end
+end
diff --git a/spec/support/shared_examples/models/throttled_touch_shared_examples.rb b/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
index 14b851d2828..e869cbce6ae 100644
--- a/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
+++ b/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
@@ -13,8 +13,8 @@ RSpec.shared_examples 'throttled touch' do
first_updated_at = Time.zone.now - (ThrottledTouch::TOUCH_INTERVAL * 2)
second_updated_at = Time.zone.now - (ThrottledTouch::TOUCH_INTERVAL * 1.5)
- Timecop.freeze(first_updated_at) { subject.touch }
- Timecop.freeze(second_updated_at) { subject.touch }
+ travel_to(first_updated_at) { subject.touch }
+ travel_to(second_updated_at) { subject.touch }
expect(subject.updated_at).to be_like_time(first_updated_at)
end
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index 557025569b8..7b591ad84d1 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'UpdateProjectStatistics' do
+RSpec.shared_examples 'UpdateProjectStatistics' do |with_counter_attribute|
let(:project) { subject.project }
let(:project_statistics_name) { described_class.project_statistics_name }
let(:statistic_attribute) { described_class.statistic_attribute }
@@ -13,108 +13,230 @@ RSpec.shared_examples 'UpdateProjectStatistics' do
subject.read_attribute(statistic_attribute).to_i
end
- it { is_expected.to be_new_record }
+ def read_pending_increment
+ Gitlab::Redis::SharedState.with do |redis|
+ key = project.statistics.counter_key(project_statistics_name)
+ redis.get(key).to_i
+ end
+ end
- context 'when creating' do
- it 'updates the project statistics' do
- delta0 = reload_stat
+ it { is_expected.to be_new_record }
- subject.save!
+ context 'when feature flag efficient_counter_attribute is disabled' do
+ before do
+ stub_feature_flags(efficient_counter_attribute: false)
+ end
- delta1 = reload_stat
+ context 'when creating' do
+ it 'updates the project statistics' do
+ delta0 = reload_stat
- expect(delta1).to eq(delta0 + read_attribute)
- expect(delta1).to be > delta0
- end
+ subject.save!
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
+ delta1 = reload_stat
- subject.save!
- end
- end
+ expect(delta1).to eq(delta0 + read_attribute)
+ expect(delta1).to be > delta0
+ end
- context 'when updating' do
- let(:delta) { 42 }
+ it 'schedules a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async).once
- before do
- subject.save!
+ subject.save!
+ end
end
- it 'updates project statistics' do
- expect(ProjectStatistics)
- .to receive(:increment_statistic)
- .and_call_original
+ context 'when updating' do
+ let(:delta) { 42 }
- subject.write_attribute(statistic_attribute, read_attribute + delta)
+ before do
+ subject.save!
+ end
- expect { subject.save! }
- .to change { reload_stat }
- .by(delta)
- end
+ it 'updates project statistics' do
+ expect(ProjectStatistics)
+ .to receive(:increment_statistic)
+ .and_call_original
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
- subject.write_attribute(statistic_attribute, read_attribute + delta)
- subject.save!
- end
+ expect { subject.save! }
+ .to change { reload_stat }
+ .by(delta)
+ end
- it 'avoids N + 1 queries' do
- subject.write_attribute(statistic_attribute, read_attribute + delta)
+ it 'schedules a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async).once
- control_count = ActiveRecord::QueryRecorder.new do
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
subject.save!
end
- subject.write_attribute(statistic_attribute, read_attribute + delta)
+ it 'avoids N + 1 queries' do
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
- expect do
- subject.save!
- end.not_to exceed_query_limit(control_count)
- end
- end
+ control_count = ActiveRecord::QueryRecorder.new do
+ subject.save!
+ end
- context 'when destroying' do
- before do
- subject.save!
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ expect do
+ subject.save!
+ end.not_to exceed_query_limit(control_count)
+ end
end
- it 'updates the project statistics' do
- delta0 = reload_stat
+ context 'when destroying' do
+ before do
+ subject.save!
+ end
- subject.destroy!
+ it 'updates the project statistics' do
+ delta0 = reload_stat
- delta1 = reload_stat
+ subject.destroy!
- expect(delta1).to eq(delta0 - read_attribute)
- expect(delta1).to be < delta0
- end
+ delta1 = reload_stat
+
+ expect(delta1).to eq(delta0 - read_attribute)
+ expect(delta1).to be < delta0
+ end
+
+ it 'schedules a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async).once
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
+ subject.destroy!
+ end
+
+ context 'when it is destroyed from the project level' do
+ it 'does not update the project statistics' do
+ expect(ProjectStatistics)
+ .not_to receive(:increment_statistic)
+
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+
+ it 'does not schedule a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
- subject.destroy!
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+ end
end
+ end
- context 'when it is destroyed from the project level' do
- it 'does not update the project statistics' do
- expect(ProjectStatistics)
- .not_to receive(:increment_statistic)
+ def expect_flush_counter_increments_worker_performed
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, project.statistics.class.name, project.statistics.id, project_statistics_name)
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, project.statistics.class.name, project.statistics.id, :storage_size)
- project.update!(pending_delete: true)
- project.destroy!
+ yield
+
+ # simulate worker running now
+ expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ FlushCounterIncrementsWorker.new.perform(project.statistics.class.name, project.statistics.id, project_statistics_name)
+ end
+
+ if with_counter_attribute
+ context 'when statistic is a counter attribute', :clean_gitlab_redis_shared_state do
+ context 'when creating' do
+ it 'stores pending increments for async update' do
+ initial_stat = reload_stat
+ expected_increment = read_attribute
+
+ expect_flush_counter_increments_worker_performed do
+ subject.save!
+
+ expect(read_pending_increment).to eq(expected_increment)
+ expect(expected_increment).to be > initial_stat
+ expect(expected_increment).to be_positive
+ end
+ end
end
- it 'does not schedule a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
+ context 'when updating' do
+ let(:delta) { 42 }
+
+ before do
+ subject.save!
+ redis_shared_state_cleanup!
+ end
+
+ it 'stores pending increments for async update' do
+ expect(ProjectStatistics)
+ .to receive(:increment_statistic)
+ .and_call_original
+
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ expect_flush_counter_increments_worker_performed do
+ subject.save!
+
+ expect(read_pending_increment).to eq(delta)
+ end
+ end
+
+ it 'avoids N + 1 queries' do
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ subject.save!
+ end
+
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ expect do
+ subject.save!
+ end.not_to exceed_query_limit(control_count)
+ end
+ end
- project.update!(pending_delete: true)
- project.destroy!
+ context 'when destroying' do
+ before do
+ subject.save!
+ redis_shared_state_cleanup!
+ end
+
+ it 'stores pending increment for async update' do
+ initial_stat = reload_stat
+ expected_increment = -read_attribute
+
+ expect_flush_counter_increments_worker_performed do
+ subject.destroy!
+
+ expect(read_pending_increment).to eq(expected_increment)
+ expect(expected_increment).to be < initial_stat
+ expect(expected_increment).to be_negative
+ end
+ end
+
+ context 'when it is destroyed from the project level' do
+ it 'does not update the project statistics' do
+ expect(ProjectStatistics)
+ .not_to receive(:increment_statistic)
+
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+
+ it 'does not schedule a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+ end
end
end
end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index b87f7fe97e1..62da9e15259 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -4,21 +4,99 @@ RSpec.shared_examples 'wiki model' do
let_it_be(:user) { create(:user, :commit_email) }
let(:wiki_container) { raise NotImplementedError }
let(:wiki_container_without_repo) { raise NotImplementedError }
+ let(:wiki_lfs_enabled) { false }
let(:wiki) { described_class.new(wiki_container, user) }
let(:commit) { subject.repository.head_commit }
subject { wiki }
+ it 'container class includes HasWiki' do
+ # NOTE: This is not enforced at runtime, since we also need to support Geo::DeletedProject
+ expect(wiki_container).to be_kind_of(HasWiki)
+ expect(wiki_container_without_repo).to be_kind_of(HasWiki)
+ end
+
it_behaves_like 'model with repository' do
let(:container) { wiki }
let(:stubbed_container) { described_class.new(wiki_container_without_repo, user) }
let(:expected_full_path) { "#{container.container.full_path}.wiki" }
let(:expected_web_url_path) { "#{container.container.web_url(only_path: true).sub(%r{^/}, '')}/-/wikis/home" }
+ let(:expected_lfs_enabled) { wiki_lfs_enabled }
+ end
+
+ describe '.container_class' do
+ it 'is set to the container class' do
+ expect(described_class.container_class).to eq(wiki_container.class)
+ end
+ end
+
+ describe '.find_by_id' do
+ it 'returns a wiki instance if the container is found' do
+ wiki = described_class.find_by_id(wiki_container.id)
+
+ expect(wiki).to be_a(described_class)
+ expect(wiki.container).to eq(wiki_container)
+ end
+
+ it 'returns nil if the container is not found' do
+ expect(described_class.find_by_id(-1)).to be_nil
+ end
+ end
+
+ describe '#initialize' do
+ it 'accepts a valid user' do
+ expect do
+ described_class.new(wiki_container, user)
+ end.not_to raise_error
+ end
+
+ it 'accepts a blank user' do
+ expect do
+ described_class.new(wiki_container, nil)
+ end.not_to raise_error
+ end
+
+ it 'raises an error for invalid users' do
+ expect do
+ described_class.new(wiki_container, Object.new)
+ end.to raise_error(ArgumentError, 'user must be a User, got Object')
+ end
+ end
+
+ describe '#run_after_commit' do
+ it 'delegates to the container' do
+ expect(wiki_container).to receive(:run_after_commit)
+
+ wiki.run_after_commit
+ end
+ end
+
+ describe '#==' do
+ it 'returns true for wikis from the same container' do
+ expect(wiki).to eq(described_class.new(wiki_container))
+ end
+
+ it 'returns false for wikis from different containers' do
+ expect(wiki).not_to eq(described_class.new(wiki_container_without_repo))
+ end
+ end
+
+ describe '#id' do
+ it 'returns the ID of the container' do
+ expect(wiki.id).to eq(wiki_container.id)
+ end
+ end
+
+ describe '#to_global_id' do
+ it 'returns a global ID' do
+ expect(wiki.to_global_id.to_s).to eq("gid://gitlab/#{wiki.class.name}/#{wiki.id}")
+ end
end
describe '#repository' do
it 'returns a wiki repository' do
expect(subject.repository.repo_type).to be_wiki
+ expect(subject.repository.container).to be(subject)
end
end
@@ -164,7 +242,7 @@ RSpec.shared_examples 'wiki model' do
def total_pages(entries)
entries.sum do |entry|
- entry.is_a?(WikiDirectory) ? entry.pages.size : 1
+ entry.is_a?(WikiDirectory) ? total_pages(entry.entries) : 1
end
end
@@ -204,8 +282,9 @@ RSpec.shared_examples 'wiki model' do
expect(page.title).to eq('index page')
end
- it 'returns nil if the page does not exist' do
- expect(subject.find_page('non-existent')).to eq(nil)
+ it 'returns nil if the page or version does not exist' do
+ expect(subject.find_page('non-existent')).to be_nil
+ expect(subject.find_page('index page', 'non-existent')).to be_nil
end
it 'can find a page by slug' do
diff --git a/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb b/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
new file mode 100644
index 00000000000..7710e756e5b
--- /dev/null
+++ b/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'Self-managed Core resource access tokens' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(false)
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:admin_resource_access_tokens) }
+ end
+
+ context 'with developer' do
+ let(:current_user) { developer }
+
+ it { is_expected.not_to be_allowed(:admin_resource_access_tokens) }
+ end
+end
+
+RSpec.shared_examples 'GitLab.com Core resource access tokens' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(true)
+ stub_ee_application_setting(should_check_namespace_plan: true)
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.not_to be_allowed(:admin_resource_access_tokens) }
+ end
+end
diff --git a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
index 50a8b81b518..3cdba315d1f 100644
--- a/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/issuable_quick_actions_shared_examples.rb
@@ -109,7 +109,7 @@ RSpec.shared_examples 'issuable quick actions' do
QuickAction.new(
action_text: "/unlock",
before_action: -> {
- issuable.update(discussion_locked: true)
+ issuable.update!(discussion_locked: true)
},
expectation: ->(noteable, can_use_quick_action) {
if can_use_quick_action
@@ -128,7 +128,7 @@ RSpec.shared_examples 'issuable quick actions' do
QuickAction.new(
action_text: "/remove_milestone",
before_action: -> {
- issuable.update(milestone_id: milestone.id)
+ issuable.update!(milestone_id: milestone.id)
},
expectation: ->(noteable, can_use_quick_action) {
if can_use_quick_action
@@ -171,7 +171,7 @@ RSpec.shared_examples 'issuable quick actions' do
QuickAction.new(
action_text: "/remove_estimate",
before_action: -> {
- issuable.update(time_estimate: 30000)
+ issuable.update!(time_estimate: 30000)
},
expectation: ->(noteable, can_use_quick_action) {
if can_use_quick_action
@@ -228,7 +228,7 @@ RSpec.shared_examples 'issuable quick actions' do
before do
project.add_developer(old_assignee)
- issuable.update(assignees: [old_assignee])
+ issuable.update!(assignees: [old_assignee])
end
context 'when user can update issuable' do
diff --git a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
index 258d9ab85e4..acbc6429646 100644
--- a/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/merge_request/merge_quick_action_shared_examples.rb
@@ -52,7 +52,7 @@ RSpec.shared_examples 'merge quick action' do
context 'when the head diff changes in the meanwhile' do
before do
merge_request.source_branch = 'another_branch'
- merge_request.save
+ merge_request.save!
sign_in(user)
visit project_merge_request_path(project, merge_request)
end
diff --git a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
index 5c122b4b5d6..4b5299cebec 100644
--- a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
@@ -75,7 +75,7 @@ RSpec.shared_examples 'Composer package creation' do |user_type, status, add_mem
expect(response).to have_gitlab_http_status(status)
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
end
diff --git a/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
index 3e058838773..e776cf13217 100644
--- a/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
@@ -79,11 +79,3 @@ RSpec.shared_examples 'returns repositories for allowed users' do |user_type, sc
end
end
end
-
-RSpec.shared_examples 'a gitlab tracking event' do |category, action|
- it "creates a gitlab tracking event #{action}" do
- expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
-
- subject
- end
-end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
new file mode 100644
index 00000000000..ec32cb4b2ff
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -0,0 +1,309 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'Debian repository shared context' do |object_type|
+ before do
+ stub_feature_flags(debian_packages: true)
+ end
+
+ if object_type == :project
+ let(:project) { create(:project, :public) }
+ elsif object_type == :group
+ let(:group) { create(:group, :public) }
+ end
+
+ let(:user) { create(:user) }
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ let(:distribution) { 'bullseye' }
+ let(:component) { 'main' }
+ let(:architecture) { 'amd64' }
+ let(:source_package) { 'sample' }
+ let(:letter) { source_package[0..2] == 'lib' ? source_package[0..3] : source_package[0] }
+ let(:package_name) { 'libsample0' }
+ let(:package_version) { '1.2.3~alpha2-1' }
+ let(:file_name) { "#{package_name}_#{package_version}_#{architecture}.deb" }
+
+ let(:method) { :get }
+
+ let(:workhorse_params) do
+ if method == :put
+ file_upload = fixture_file_upload("spec/fixtures/packages/debian/#{file_name}")
+ { file: file_upload }
+ else
+ {}
+ end
+ end
+
+ let(:params) { workhorse_params }
+
+ let(:auth_headers) { {} }
+ let(:workhorse_headers) do
+ if method == :put
+ workhorse_token = JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256')
+ { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token }
+ else
+ {}
+ end
+ end
+
+ let(:headers) { auth_headers.merge(workhorse_headers) }
+
+ let(:send_rewritten_field) { true }
+
+ subject do
+ if method == :put
+ workhorse_finalize(
+ api(url),
+ method: method,
+ file_key: :file,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ else
+ send method, api(url), headers: headers, params: params
+ end
+ end
+end
+
+RSpec.shared_context 'Debian repository auth headers' do |user_role, user_token, auth_method = :token|
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+
+ let(:auth_headers) do
+ if user_role == :anonymous
+ {}
+ elsif auth_method == :token
+ { 'Private-Token' => token }
+ else
+ basic_auth_header(user.username, token)
+ end
+ end
+end
+
+RSpec.shared_context 'Debian repository project access' do |project_visibility_level, user_role, user_token, auth_method|
+ include_context 'Debian repository auth headers', user_role, user_token, auth_method do
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository GET request' do |user_role, add_member, status, body|
+ context "for user type #{user_role}" do
+ before do
+ project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ it "returns #{status}#{and_body}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository PUT request' do |user_role, add_member, status, body|
+ context "for user type #{user_role}" do
+ before do
+ project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ if status == :created
+ it 'creates package files' do
+ pending "Debian package creation not implemented"
+ expect { subject }
+ .to change { project.packages.debian.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
+ else
+ it "returns #{status}#{and_body}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'rejects Debian access with unknown project id' do
+ context 'with an unknown project' do
+ let(:project) { double(id: non_existing_record_id) }
+
+ context 'as anonymous' do
+ it_behaves_like 'Debian project repository GET request', :anonymous, true, :not_found, nil
+ end
+
+ context 'as authenticated user' do
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
+
+ it_behaves_like 'Debian project repository GET request', :anonymous, true, :not_found, nil
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository GET endpoint' do |success_status, success_body|
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ 'PUBLIC' | :developer | true | true | success_status | success_body
+ 'PUBLIC' | :guest | true | true | success_status | success_body
+ 'PUBLIC' | :developer | true | false | success_status | success_body
+ 'PUBLIC' | :guest | true | false | success_status | success_body
+ 'PUBLIC' | :developer | false | true | success_status | success_body
+ 'PUBLIC' | :guest | false | true | success_status | success_body
+ 'PUBLIC' | :developer | false | false | success_status | success_body
+ 'PUBLIC' | :guest | false | false | success_status | success_body
+ 'PUBLIC' | :anonymous | false | true | success_status | success_body
+ 'PRIVATE' | :developer | true | true | success_status | success_body
+ 'PRIVATE' | :guest | true | true | :forbidden | nil
+ 'PRIVATE' | :developer | true | false | :not_found | nil
+ 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | false | true | :not_found | nil
+ 'PRIVATE' | :guest | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :not_found | nil
+ 'PRIVATE' | :guest | false | false | :not_found | nil
+ 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ end
+
+ with_them do
+ include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
+ it_behaves_like 'Debian project repository GET request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ end
+ end
+ end
+
+ it_behaves_like 'rejects Debian access with unknown project id'
+end
+
+RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_status, success_body|
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ 'PUBLIC' | :developer | true | true | success_status | nil
+ 'PUBLIC' | :guest | true | true | :forbidden | nil
+ 'PUBLIC' | :developer | true | false | :unauthorized | nil
+ 'PUBLIC' | :guest | true | false | :unauthorized | nil
+ 'PUBLIC' | :developer | false | true | :forbidden | nil
+ 'PUBLIC' | :guest | false | true | :forbidden | nil
+ 'PUBLIC' | :developer | false | false | :unauthorized | nil
+ 'PUBLIC' | :guest | false | false | :unauthorized | nil
+ 'PUBLIC' | :anonymous | false | true | :unauthorized | nil
+ 'PRIVATE' | :developer | true | true | success_status | nil
+ 'PRIVATE' | :guest | true | true | :forbidden | nil
+ 'PRIVATE' | :developer | true | false | :not_found | nil
+ 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | false | true | :not_found | nil
+ 'PRIVATE' | :guest | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :not_found | nil
+ 'PRIVATE' | :guest | false | false | :not_found | nil
+ 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ end
+
+ with_them do
+ include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
+ it_behaves_like 'Debian project repository PUT request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ end
+ end
+ end
+
+ it_behaves_like 'rejects Debian access with unknown project id'
+end
+
+RSpec.shared_context 'Debian repository group access' do |group_visibility_level, user_role, user_token, auth_method|
+ include_context 'Debian repository auth headers', user_role, user_token, auth_method do
+ before do
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility_level, false))
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian group repository GET request' do |user_role, add_member, status, body|
+ context "for user type #{user_role}" do
+ before do
+ group.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ it "returns #{status}#{and_body}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'rejects Debian access with unknown group id' do
+ context 'with an unknown group' do
+ let(:group) { double(id: non_existing_record_id) }
+
+ context 'as anonymous' do
+ it_behaves_like 'Debian group repository GET request', :anonymous, true, :not_found, nil
+ end
+
+ context 'as authenticated user' do
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
+
+ it_behaves_like 'Debian group repository GET request', :anonymous, true, :not_found, nil
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian group repository GET endpoint' do |success_status, success_body|
+ context 'with valid group' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:group_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ 'PUBLIC' | :developer | true | true | success_status | success_body
+ 'PUBLIC' | :guest | true | true | success_status | success_body
+ 'PUBLIC' | :developer | true | false | success_status | success_body
+ 'PUBLIC' | :guest | true | false | success_status | success_body
+ 'PUBLIC' | :developer | false | true | success_status | success_body
+ 'PUBLIC' | :guest | false | true | success_status | success_body
+ 'PUBLIC' | :developer | false | false | success_status | success_body
+ 'PUBLIC' | :guest | false | false | success_status | success_body
+ 'PUBLIC' | :anonymous | false | true | success_status | success_body
+ 'PRIVATE' | :developer | true | true | success_status | success_body
+ 'PRIVATE' | :guest | true | true | :forbidden | nil
+ 'PRIVATE' | :developer | true | false | :not_found | nil
+ 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | false | true | :not_found | nil
+ 'PRIVATE' | :guest | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :not_found | nil
+ 'PRIVATE' | :guest | false | false | :not_found | nil
+ 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ end
+
+ with_them do
+ include_context 'Debian repository group access', params[:group_visibility_level], params[:user_role], params[:user_token], :basic do
+ it_behaves_like 'Debian group repository GET request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ end
+ end
+ end
+
+ it_behaves_like 'rejects Debian access with unknown group id'
+end
diff --git a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
index f26af6cb766..5145880ef9a 100644
--- a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
@@ -90,7 +90,7 @@ RSpec.shared_examples 'group and project boards query' do
it_behaves_like 'a working graphql query' do
before do
- post_graphql(query_single_board, current_user: current_user)
+ post_graphql(query_single_board("id: \"gid://gitlab/Board/1\""), current_user: current_user)
end
end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 6aac51a5903..58e99776fd9 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -26,7 +26,7 @@ RSpec.shared_examples 'process nuget service index request' do |user_type, statu
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'nuget_service_index'
+ it_behaves_like 'a package tracking event', described_class.name, 'cli_metadata'
it 'returns a valid json response' do
subject
@@ -169,7 +169,7 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
context 'with correct params' do
it_behaves_like 'package workhorse uploads'
it_behaves_like 'creates nuget package files'
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
end
@@ -286,7 +286,7 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
it 'returns a valid package archive' do
subject
@@ -336,7 +336,7 @@ RSpec.shared_examples 'process nuget search request' do |user_type, status, add_
it_behaves_like 'returns a valid json search response', status, 4, [1, 5, 5, 1]
- it_behaves_like 'a gitlab tracking event', described_class.name, 'search_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'search_package'
context 'with skip set to 2' do
let(:skip) { 2 }
diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
index c9a33701161..d730ed53109 100644
--- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
@@ -126,3 +126,11 @@ RSpec.shared_examples 'job token for package uploads' do
end
end
end
+
+RSpec.shared_examples 'a package tracking event' do |category, action|
+ it "creates a gitlab tracking event #{action}" do
+ expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
+
+ expect { subject }.to change { Packages::Event.count }.by(1)
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index 715c494840e..bbcf856350d 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'PyPi package creation' do |user_type, status, add_member = true|
+RSpec.shared_examples 'PyPI package creation' do |user_type, status, add_member = true|
RSpec.shared_examples 'creating pypi package files' do
it 'creates package files' do
expect { subject }
@@ -52,7 +52,7 @@ RSpec.shared_examples 'PyPi package creation' do |user_type, status, add_member
context 'with correct params' do
it_behaves_like 'package workhorse uploads'
it_behaves_like 'creating pypi package files'
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
end
@@ -106,7 +106,7 @@ RSpec.shared_examples 'PyPi package creation' do |user_type, status, add_member
end
end
-RSpec.shared_examples 'PyPi package versions' do |user_type, status, add_member = true|
+RSpec.shared_examples 'PyPI package versions' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
@@ -119,11 +119,11 @@ RSpec.shared_examples 'PyPi package versions' do |user_type, status, add_member
end
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_package'
end
end
-RSpec.shared_examples 'PyPi package download' do |user_type, status, add_member = true|
+RSpec.shared_examples 'PyPI package download' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
@@ -136,11 +136,11 @@ RSpec.shared_examples 'PyPi package download' do |user_type, status, add_member
end
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
end
-RSpec.shared_examples 'process PyPi api request' do |user_type, status, add_member = true|
+RSpec.shared_examples 'process PyPI api request' do |user_type, status, add_member = true|
context "for user type #{user_type}" do
before do
project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
@@ -155,13 +155,13 @@ RSpec.shared_examples 'rejects PyPI access with unknown project id' do
let(:project) { OpenStruct.new(id: 1234567890) }
context 'as anonymous' do
- it_behaves_like 'process PyPi api request', :anonymous, :not_found
+ it_behaves_like 'process PyPI api request', :anonymous, :not_found
end
context 'as authenticated user' do
subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'process PyPi api request', :anonymous, :not_found
+ it_behaves_like 'process PyPI api request', :anonymous, :not_found
end
end
end
diff --git a/spec/support/shared_examples/requests/api/snippets_shared_examples.rb b/spec/support/shared_examples/requests/api/snippets_shared_examples.rb
index 051367fbe96..2b72c69cb37 100644
--- a/spec/support/shared_examples/requests/api/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/snippets_shared_examples.rb
@@ -1,46 +1,30 @@
# frozen_string_literal: true
RSpec.shared_examples 'raw snippet files' do
- let_it_be(:unauthorized_user) { create(:user) }
+ let_it_be(:user_token) { create(:personal_access_token, user: snippet.author) }
let(:snippet_id) { snippet.id }
let(:user) { snippet.author }
let(:file_path) { '%2Egitattributes' }
let(:ref) { 'master' }
- context 'with no user' do
- it 'requires authentication' do
- get api(api_path)
+ subject { get api(api_path, personal_access_token: user_token) }
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
- end
+ context 'with an invalid snippet ID' do
+ let(:snippet_id) { non_existing_record_id }
- shared_examples 'not found' do
it 'returns 404' do
- get api(api_path, user)
+ subject
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Snippet Not Found')
end
end
- context 'when not authorized' do
- let(:user) { unauthorized_user }
-
- it_behaves_like 'not found'
- end
-
- context 'with an invalid snippet ID' do
- let(:snippet_id) { 'invalid' }
-
- it_behaves_like 'not found'
- end
-
context 'with valid params' do
it 'returns the raw file info' do
expect(Gitlab::Workhorse).to receive(:send_git_blob).and_call_original
- get api(api_path, user)
+ subject
aggregate_failures do
expect(response).to have_gitlab_http_status(:ok)
@@ -52,6 +36,17 @@ RSpec.shared_examples 'raw snippet files' do
end
end
+ context 'with unauthorized user' do
+ let(:user_token) { create(:personal_access_token) }
+
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Snippet Not Found')
+ end
+ end
+
context 'with invalid params' do
using RSpec::Parameterized::TableSyntax
@@ -68,12 +63,12 @@ RSpec.shared_examples 'raw snippet files' do
end
with_them do
- before do
- get api(api_path, user)
- end
+ it 'returns the proper response code and message' do
+ subject
- it { expect(response).to have_gitlab_http_status(status) }
- it { expect(json_response[key]).to eq(message) }
+ expect(response).to have_gitlab_http_status(status)
+ expect(json_response[key]).to eq(message)
+ end
end
end
end
@@ -216,3 +211,133 @@ RSpec.shared_examples 'invalid snippet updates' do
expect(json_response['error']).to eq 'title is empty'
end
end
+
+RSpec.shared_examples 'snippet access with different users' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:requester, :visibility, :status) do
+ :admin | :public | :ok
+ :admin | :private | :ok
+ :admin | :internal | :ok
+ :author | :public | :ok
+ :author | :private | :ok
+ :author | :internal | :ok
+ :other | :public | :ok
+ :other | :private | :not_found
+ :other | :internal | :ok
+ nil | :public | :ok
+ nil | :private | :not_found
+ nil | :internal | :not_found
+ end
+
+ with_them do
+ let(:snippet) { snippet_for(visibility) }
+
+ it 'returns the correct response' do
+ request_user = user_for(requester)
+
+ get api(path, request_user)
+
+ expect(response).to have_gitlab_http_status(status)
+ end
+ end
+
+ def user_for(user_type)
+ case user_type
+ when :author
+ user
+ when :other
+ other_user
+ when :admin
+ admin
+ else
+ nil
+ end
+ end
+
+ def snippet_for(snippet_type)
+ case snippet_type
+ when :private
+ private_snippet
+ when :internal
+ internal_snippet
+ when :public
+ public_snippet
+ end
+ end
+end
+
+RSpec.shared_examples 'expected response status' do
+ it 'returns the correct response' do
+ get api(path, personal_access_token: user_token)
+
+ expect(response).to have_gitlab_http_status(status)
+ end
+end
+
+RSpec.shared_examples 'unauthenticated project snippet access' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:user_token) { nil }
+
+ where(:project_visibility, :snippet_visibility, :status) do
+ :public | :public | :ok
+ :public | :private | :not_found
+ :public | :internal | :not_found
+ :internal | :public | :not_found
+ :private | :public | :not_found
+ end
+
+ with_them do
+ it_behaves_like 'expected response status'
+ end
+end
+
+RSpec.shared_examples 'non-member project snippet access' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility, :snippet_visibility, :status) do
+ :public | :public | :ok
+ :public | :internal | :ok
+ :internal | :public | :ok
+ :public | :private | :not_found
+ :private | :public | :not_found
+ end
+
+ with_them do
+ it_behaves_like 'expected response status'
+ end
+end
+
+RSpec.shared_examples 'member project snippet access' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ project.add_guest(user)
+ end
+
+ where(:project_visibility, :snippet_visibility, :status) do
+ :public | :public | :ok
+ :public | :internal | :ok
+ :internal | :public | :ok
+ :public | :private | :ok
+ :private | :public | :ok
+ end
+
+ with_them do
+ it_behaves_like 'expected response status'
+ end
+end
+
+RSpec.shared_examples 'project snippet access levels' do
+ let_it_be(:user_token) { create(:personal_access_token, user: user) }
+
+ let(:project) { create(:project, project_visibility) }
+ let(:snippet) { create(:project_snippet, :repository, snippet_visibility, project: project) }
+
+ it_behaves_like 'unauthenticated project snippet access'
+
+ it_behaves_like 'non-member project snippet access'
+
+ it_behaves_like 'member project snippet access'
+end
diff --git a/spec/support/shared_examples/requests/api/tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/tracking_shared_examples.rb
new file mode 100644
index 00000000000..2e6feae3f98
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/tracking_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a gitlab tracking event' do |category, action|
+ it "creates a gitlab tracking event #{action}" do
+ expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index 08ccbd4a9c1..730df4dc5ab 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -48,7 +48,7 @@ RSpec.shared_examples 'rate-limited token-authenticated requests' do
expect_rejection { make_request(request_args) }
- Timecop.travel(period.from_now) do
+ travel_to(period.from_now) do
requests_per_period.times do
make_request(request_args)
expect(response).not_to have_gitlab_http_status(:too_many_requests)
@@ -175,7 +175,7 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
expect_rejection { request_authenticated_web_url }
- Timecop.travel(period.from_now) do
+ travel_to(period.from_now) do
requests_per_period.times do
request_authenticated_web_url
expect(response).not_to have_gitlab_http_status(:too_many_requests)
diff --git a/spec/support/shared_examples/requests/snippet_shared_examples.rb b/spec/support/shared_examples/requests/snippet_shared_examples.rb
index 84ef7723b9b..dae3a3e74be 100644
--- a/spec/support/shared_examples/requests/snippet_shared_examples.rb
+++ b/spec/support/shared_examples/requests/snippet_shared_examples.rb
@@ -99,18 +99,6 @@ RSpec.shared_examples 'snippet blob content' do
end
end
-RSpec.shared_examples 'snippet_multiple_files feature disabled' do
- before do
- stub_feature_flags(snippet_multiple_files: false)
-
- subject
- end
-
- it 'does not return files attributes' do
- expect(json_response).not_to have_key('files')
- end
-end
-
RSpec.shared_examples 'snippet creation with files parameter' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/support/shared_examples/requests/user_activity_shared_examples.rb b/spec/support/shared_examples/requests/user_activity_shared_examples.rb
new file mode 100644
index 00000000000..37da1ce5c63
--- /dev/null
+++ b/spec/support/shared_examples/requests/user_activity_shared_examples.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'updating of user activity' do |paths_to_visit|
+ let(:user) { create(:user, last_activity_on: nil) }
+
+ before do
+ group = create(:group, name: 'group')
+ project = create(:project, :public, namespace: group, name: 'project')
+
+ create(:issue, project: project, iid: 10)
+ create(:merge_request, source_project: project, iid: 15)
+
+ project.add_maintainer(user)
+ end
+
+ context 'without an authenticated user' do
+ it 'does not set the last activity cookie' do
+ get "/group/project"
+
+ expect(response.cookies['user_last_activity_on']).to be_nil
+ end
+ end
+
+ context 'with an authenticated user' do
+ before do
+ login_as(user)
+ end
+
+ context 'with a POST request' do
+ it 'does not set the last activity cookie' do
+ post "/group/project/archive"
+
+ expect(response.cookies['user_last_activity_on']).to be_nil
+ end
+ end
+
+ paths_to_visit.each do |path|
+ context "on GET to #{path}" do
+ it 'updates the last activity date' do
+ expect(Users::ActivityService).to receive(:new).and_call_original
+
+ get path
+
+ expect(user.last_activity_on).to eq(Date.today)
+ end
+
+ context 'when calling it twice' do
+ it 'updates last_activity_on just once' do
+ expect(Users::ActivityService).to receive(:new).once.and_call_original
+
+ 2.times do
+ get path
+ end
+ end
+ end
+
+ context 'when last_activity_on is nil' do
+ before do
+ user.update_attribute(:last_activity_on, nil)
+ end
+
+ it 'updates the last activity date' do
+ expect(user.last_activity_on).to be_nil
+
+ get path
+
+ expect(user.last_activity_on).to eq(Date.today)
+ end
+ end
+
+ context 'when last_activity_on is stale' do
+ before do
+ user.update_attribute(:last_activity_on, 2.days.ago.to_date)
+ end
+
+ it 'updates the last activity date' do
+ get path
+
+ expect(user.last_activity_on).to eq(Date.today)
+ end
+ end
+
+ context 'when last_activity_on is up to date' do
+ before do
+ user.update_attribute(:last_activity_on, Date.today)
+ end
+
+ it 'does not try to update it' do
+ expect(Users::ActivityService).not_to receive(:new)
+
+ get path
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/serializers/note_entity_shared_examples.rb b/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
index 7b2ec02c7b6..a90a2dc3667 100644
--- a/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/note_entity_shared_examples.rb
@@ -24,7 +24,7 @@ RSpec.shared_examples 'note entity' do
context 'when note was edited' do
before do
- note.update(updated_at: 1.minute.from_now, updated_by: user)
+ note.update!(updated_at: 1.minute.from_now, updated_by: user)
end
it 'exposes last_edited_at and last_edited_by elements' do
@@ -34,7 +34,7 @@ RSpec.shared_examples 'note entity' do
context 'when note is a system note' do
before do
- note.update(system: true)
+ note.update!(system: true)
end
it 'exposes system_note_icon_name element' do
diff --git a/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb b/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
index fced2e59ace..f28c78aec97 100644
--- a/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
@@ -7,7 +7,7 @@ RSpec.shared_examples 'boards create service' do
end
it 'creates the default lists' do
- board = service.execute
+ board = service.execute.payload
expect(board.lists.size).to eq 2
expect(board.lists.first).to be_backlog
diff --git a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
index f352b430cc7..4aa5d7d890b 100644
--- a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
@@ -131,7 +131,7 @@ RSpec.shared_examples 'issues move service' do |group|
updated_at1 = issue1.updated_at
updated_at2 = issue2.updated_at
- Timecop.travel(1.minute.from_now) do
+ travel_to(1.minute.from_now) do
described_class.new(parent, user, params).execute(issue)
end
diff --git a/spec/support/shared_examples/services/incident_shared_examples.rb b/spec/support/shared_examples/services/incident_shared_examples.rb
index d6e79931df5..39c22ac8aa3 100644
--- a/spec/support/shared_examples/services/incident_shared_examples.rb
+++ b/spec/support/shared_examples/services/incident_shared_examples.rb
@@ -45,3 +45,74 @@ RSpec.shared_examples 'not an incident issue' do
expect(issue.labels).not_to include(have_attributes(label_properties))
end
end
+
+# This shared example is to test the execution of incident management label services
+# For example:
+# - IncidentManagement::CreateIncidentSlaExceededLabelService
+# - IncidentManagement::CreateIncidentLabelService
+
+# It doesn't require any defined variables
+
+RSpec.shared_examples 'incident management label service' do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:user) { User.alert_bot }
+ let(:service) { described_class.new(project, user) }
+
+ subject(:execute) { service.execute }
+
+ describe 'execute' do
+ let(:incident_label_attributes) { described_class::LABEL_PROPERTIES }
+ let(:title) { incident_label_attributes[:title] }
+ let(:color) { incident_label_attributes[:color] }
+ let(:description) { incident_label_attributes[:description] }
+
+ shared_examples 'existing label' do
+ it 'returns the existing label' do
+ expect { execute }.not_to change(Label, :count)
+
+ expect(execute).to be_success
+ expect(execute.payload).to eq(label: label)
+ end
+ end
+
+ shared_examples 'new label' do
+ it 'creates a new label' do
+ expect { execute }.to change(Label, :count).by(1)
+
+ label = project.reload.labels.last
+ expect(execute).to be_success
+ expect(execute.payload).to eq(label: label)
+ expect(label.title).to eq(title)
+ expect(label.color).to eq(color)
+ expect(label.description).to eq(description)
+ end
+ end
+
+ context 'with predefined project label' do
+ it_behaves_like 'existing label' do
+ let!(:label) { create(:label, project: project, title: title) }
+ end
+ end
+
+ context 'with predefined group label' do
+ let(:project) { create(:project, group: group) }
+ let(:group) { create(:group) }
+
+ it_behaves_like 'existing label' do
+ let!(:label) { create(:group_label, group: group, title: title) }
+ end
+ end
+
+ context 'without label' do
+ context 'when user has permissions to create labels' do
+ it_behaves_like 'new label'
+ end
+
+ context 'when user has no permissions to create labels' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'new label'
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/merge_request_shared_examples.rb b/spec/support/shared_examples/services/merge_request_shared_examples.rb
index a7032640217..2bd06ac3e9c 100644
--- a/spec/support/shared_examples/services/merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/services/merge_request_shared_examples.rb
@@ -13,11 +13,10 @@ RSpec.shared_examples 'reviewer_ids filter' do
end
context 'with reviewer_ids' do
- let(:reviewer_ids_param) { { reviewer_ids: [reviewer1.id, reviewer2.id, reviewer3.id] } }
+ let(:reviewer_ids_param) { { reviewer_ids: [reviewer1.id, reviewer2.id] } }
let(:reviewer1) { create(:user) }
let(:reviewer2) { create(:user) }
- let(:reviewer3) { create(:user) }
context 'when the current user can admin the merge_request' do
context 'when merge_request_reviewer feature is enabled' do
@@ -25,14 +24,13 @@ RSpec.shared_examples 'reviewer_ids filter' do
stub_feature_flags(merge_request_reviewer: true)
end
- context 'with reviewers who can read the merge_request' do
+ context 'with a reviewer who can read the merge_request' do
before do
project.add_developer(reviewer1)
- project.add_developer(reviewer2)
end
it 'contains reviewers who can read the merge_request' do
- expect(execute.reviewers).to contain_exactly(reviewer1, reviewer2)
+ expect(execute.reviewers).to contain_exactly(reviewer1)
end
end
end
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 7fd59c3d963..65f4b3b5513 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -170,6 +170,8 @@ RSpec.shared_examples 'filters on each package_type' do |is_project: false|
let_it_be(:package5) { create(:pypi_package, project: project) }
let_it_be(:package6) { create(:composer_package, project: project) }
let_it_be(:package7) { create(:generic_package, project: project) }
+ let_it_be(:package8) { create(:golang_package, project: project) }
+ let_it_be(:package9) { create(:debian_package, project: project) }
Packages::Package.package_types.keys.each do |package_type|
context "for package type #{package_type}" do
diff --git a/spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb b/spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb
new file mode 100644
index 00000000000..df8b1f91629
--- /dev/null
+++ b/spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# Shared examples that test requests against URLs with escaped elements
+#
+RSpec.shared_examples "URLs containing escaped elements return expected status" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:url, :result_status) do
+ "https://user:0a%23@test.example.com/project.git" | :success
+ "https://git.example.com:1%2F%2F@source.developers.google.com/project.git" | :success
+ CGI.escape("git://localhost:1234/some-path?some-query=some-val\#@example.com/") | :error
+ CGI.escape(CGI.escape("https://user:0a%23@test.example.com/project.git")) | :error
+ end
+
+ with_them do
+ it "returns expected status" do
+ expect(result[:status]).to eq(result_status)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb b/spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb
new file mode 100644
index 00000000000..5680d4f772c
--- /dev/null
+++ b/spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'validates IP address' do
+ subject { object }
+
+ it { is_expected.to allow_value('192.168.17.43').for(attribute.to_sym) }
+ it { is_expected.to allow_value('2001:0db8:85a3:0000:0000:8a2e:0370:7334').for(attribute.to_sym) }
+
+ it { is_expected.not_to allow_value('invalid IP').for(attribute.to_sym) }
+end
diff --git a/spec/support/test_reports/test_reports_helper.rb b/spec/support/test_reports/test_reports_helper.rb
index ad9ecb6f460..18b40a20cf1 100644
--- a/spec/support/test_reports/test_reports_helper.rb
+++ b/spec/support/test_reports/test_reports_helper.rb
@@ -3,6 +3,7 @@
module TestReportsHelper
def create_test_case_rspec_success(name = 'test_spec')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'rspec',
name: 'Test#sum when a is 1 and b is 3 returns summary',
classname: "spec.#{name}",
file: './spec/test_spec.rb',
@@ -12,6 +13,7 @@ module TestReportsHelper
def create_test_case_rspec_failed(name = 'test_spec', execution_time = 2.22)
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'rspec',
name: 'Test#sum when a is 1 and b is 3 returns summary',
classname: "spec.#{name}",
file: './spec/test_spec.rb',
@@ -22,6 +24,7 @@ module TestReportsHelper
def create_test_case_rspec_skipped(name = 'test_spec')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'rspec',
name: 'Test#sum when a is 3 and b is 3 returns summary',
classname: "spec.#{name}",
file: './spec/test_spec.rb',
@@ -31,6 +34,7 @@ module TestReportsHelper
def create_test_case_rspec_error(name = 'test_spec')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'rspec',
name: 'Test#sum when a is 4 and b is 4 returns summary',
classname: "spec.#{name}",
file: './spec/test_spec.rb',
@@ -52,6 +56,7 @@ module TestReportsHelper
def create_test_case_java_success(name = 'addTest')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'java',
name: name,
classname: 'CalculatorTest',
execution_time: 5.55,
@@ -60,6 +65,7 @@ module TestReportsHelper
def create_test_case_java_failed(name = 'addTest')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'java',
name: name,
classname: 'CalculatorTest',
execution_time: 6.66,
@@ -69,6 +75,7 @@ module TestReportsHelper
def create_test_case_java_skipped(name = 'addTest')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'java',
name: name,
classname: 'CalculatorTest',
execution_time: 7.77,
@@ -77,6 +84,7 @@ module TestReportsHelper
def create_test_case_java_error(name = 'addTest')
Gitlab::Ci::Reports::TestCase.new(
+ suite_name: 'java',
name: name,
classname: 'CalculatorTest',
execution_time: 8.88,
diff --git a/spec/support_specs/helpers/stub_feature_flags_spec.rb b/spec/support_specs/helpers/stub_feature_flags_spec.rb
index 5d1e4e1627d..57dd3015f5e 100644
--- a/spec/support_specs/helpers/stub_feature_flags_spec.rb
+++ b/spec/support_specs/helpers/stub_feature_flags_spec.rb
@@ -3,12 +3,31 @@
require 'spec_helper'
RSpec.describe StubFeatureFlags do
- let(:feature_name) { :test_feature }
+ let_it_be(:dummy_feature_flag) { :dummy_feature_flag }
+
+ # We inject dummy feature flag defintion
+ # to ensure that we strong validate it's usage
+ # as well
+ before(:all) do
+ definition = Feature::Definition.new(
+ nil,
+ name: dummy_feature_flag,
+ type: 'development',
+ # we allow ambigious usage of `default_enabled:`
+ default_enabled: [false, true]
+ )
+
+ Feature::Definition.definitions[dummy_feature_flag] = definition
+ end
+
+ after(:all) do
+ Feature::Definition.definitions.delete(dummy_feature_flag)
+ end
describe '#stub_feature_flags' do
using RSpec::Parameterized::TableSyntax
- let(:feature_name) { :test_feature }
+ let(:feature_name) { dummy_feature_flag }
context 'when checking global state' do
where(:feature_actors, :expected_result) do
@@ -121,14 +140,14 @@ RSpec.describe StubFeatureFlags do
describe 'stub timing' do
context 'let_it_be variable' do
- let_it_be(:let_it_be_var) { Feature.enabled?(:any_feature_flag) }
+ let_it_be(:let_it_be_var) { Feature.enabled?(dummy_feature_flag) }
it { expect(let_it_be_var).to eq true }
end
context 'before_all variable' do
before_all do
- @suite_var = Feature.enabled?(:any_feature_flag)
+ @suite_var = Feature.enabled?(dummy_feature_flag)
end
it { expect(@suite_var).to eq true }
@@ -136,14 +155,14 @@ RSpec.describe StubFeatureFlags do
context 'before(:all) variable' do
before(:all) do
- @suite_var = Feature.enabled?(:any_feature_flag)
+ @suite_var = Feature.enabled?(dummy_feature_flag)
end
it { expect(@suite_var).to eq true }
end
context 'with stub_feature_flags meta' do
- let(:var) { Feature.enabled?(:any_feature_flag) }
+ let(:var) { Feature.enabled?(dummy_feature_flag) }
context 'as true', :stub_feature_flags do
it { expect(var).to eq true }
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index a2cc2b12e5e..8963164ac53 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -284,62 +284,75 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
context 'multiple repository storages' do
- let_it_be(:default_storage_hash) { Gitlab.config.repositories.storages.default.to_h }
+ include StubConfiguration
+
+ let(:default_storage_name) { 'default' }
+ let(:second_storage_name) { 'test_second_storage' }
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry')
-
- allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
-
- # Avoid asking gitaly about the root ref (which will fail because of the
- # mocked storages)
- allow_any_instance_of(Repository).to receive(:empty?).and_return(false)
-
- FileUtils.mkdir_p(b_storage_dir)
-
- # Even when overriding the storage, we have to move it there, so it exists
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- FileUtils.mv(
- File.join(Settings.absolute(storages['default'].legacy_disk_path), project_b.repository.disk_path + '.git'),
- Rails.root.join(storages['test_second_storage'].legacy_disk_path, project_b.repository.disk_path + '.git')
- )
- end
- end
-
- after do
- FileUtils.rm_rf(test_second_storage_dir)
+ stub_storage_settings( second_storage_name => {
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address,
+ 'path' => TestEnv::SECOND_STORAGE_PATH
+ })
end
- let(:test_second_storage_dir) { Dir.mktmpdir }
+ shared_examples 'includes repositories in all repository storages' do
+ specify :aggregate_failures do
+ project_a = create(:project, :repository)
+ project_a.track_project_repository
+ project_snippet_a = create(:project_snippet, :repository, project: project_a, author: project_a.owner)
+ project_b = create(:project, :repository, repository_storage: second_storage_name)
+ project_b.track_project_repository
+ project_snippet_b = create(:project_snippet, :repository, project: project_b, author: project_b.owner)
+ project_snippet_b.snippet_repository.update!(shard: project_b.project_repository.shard)
+ create(:wiki_page, container: project_a)
+ create(:design, :with_file, issue: create(:issue, project: project_a))
+
+ move_repository_to_secondary(project_b)
+ move_repository_to_secondary(project_snippet_b)
- let(:test_second_storage) do
- Gitlab::GitalyClient::StorageSettings.new(default_storage_hash.merge('path' => test_second_storage_dir))
- end
-
- let(:storages) do
- {
- 'default' => Gitlab.config.repositories.storages.default,
- 'test_second_storage' => test_second_storage
- }
- end
-
- let!(:project_a) { create(:project, :repository) }
- let!(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
- let!(:b_storage_dir) { File.join(test_second_storage_dir, File.dirname(project_b.disk_path)) }
-
- context 'no concurrency' do
- it 'includes repositories in all repository storages' do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
tar_contents, exit_status = Gitlab::Popen.popen(
%W{tar -tvf #{backup_tar} repositories}
)
+ tar_lines = tar_contents.lines.grep(/\.bundle/)
+
expect(exit_status).to eq(0)
- expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
- expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
+
+ [
+ "#{project_a.disk_path}.bundle",
+ "#{project_a.disk_path}.wiki.bundle",
+ "#{project_a.disk_path}.design.bundle",
+ "#{project_b.disk_path}.bundle",
+ "#{project_snippet_a.disk_path}.bundle",
+ "#{project_snippet_b.disk_path}.bundle"
+ ].each do |repo_name|
+ expect(tar_lines.grep(/#{repo_name}/).size).to eq 1
+ end
end
+
+ def move_repository_to_secondary(record)
+ Gitlab::GitalyClient::StorageSettings.allow_disk_access do
+ default_shard_legacy_path = Gitlab.config.repositories.storages.default.legacy_disk_path
+ secondary_legacy_path = Gitlab.config.repositories.storages[second_storage_name].legacy_disk_path
+ dst_dir = File.join(secondary_legacy_path, File.dirname(record.disk_path))
+
+ FileUtils.mkdir_p(dst_dir) unless Dir.exist?(dst_dir)
+
+ FileUtils.mv(
+ File.join(default_shard_legacy_path, record.disk_path + '.git'),
+ File.join(secondary_legacy_path, record.disk_path + '.git')
+ )
+ end
+ end
+ end
+
+ context 'no concurrency' do
+ it_behaves_like 'includes repositories in all repository storages'
end
context 'with concurrency' do
@@ -347,17 +360,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 4)
end
- it 'includes repositories in all repository storages' do
- expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
-
- tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} repositories}
- )
-
- expect(exit_status).to eq(0)
- expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
- expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
- end
+ it_behaves_like 'includes repositories in all repository storages'
end
end
@@ -370,7 +373,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
it 'has defaults' do
- expect_next_instance_of(::Backup::Repository) do |instance|
+ expect_next_instance_of(::Backup::Repositories) do |instance|
expect(instance).to receive(:dump)
.with(max_concurrency: 1, max_storage_concurrency: 1)
.and_call_original
@@ -383,7 +386,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 5)
stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
- expect_next_instance_of(::Backup::Repository) do |instance|
+ expect_next_instance_of(::Backup::Repositories) do |instance|
expect(instance).to receive(:dump)
.with(max_concurrency: 5, max_storage_concurrency: 2)
.and_call_original
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 99efd394e83..046e066a45f 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -98,6 +98,29 @@ RSpec.describe 'gitlab:db namespace rake task' do
end
end
+ describe 'unattended' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:schema_migration_table_exists, :needs_migrations, :rake_output) do
+ false | false | "unattended_migrations_completed"
+ false | true | "unattended_migrations_completed"
+ true | false | "unattended_migrations_static"
+ true | true | "unattended_migrations_completed"
+ end
+
+ before do
+ allow(Rake::Task['gitlab:db:configure']).to receive(:invoke).and_return(true)
+ end
+
+ with_them do
+ it 'outputs changed message for automation after operations happen' do
+ allow(ActiveRecord::Base.connection.schema_migration).to receive(:table_exists?).and_return(schema_migration_table_exists)
+ allow_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(needs_migrations)
+ expect { run_rake_task('gitlab:db:unattended') }. to output(/^#{rake_output}$/).to_stdout
+ end
+ end
+ end
+
describe 'clean_structure_sql' do
let_it_be(:clean_rake_task) { 'gitlab:db:clean_structure_sql' }
let_it_be(:test_task_name) { 'gitlab:db:_test_multiple_structure_cleans' }
@@ -164,25 +187,77 @@ RSpec.describe 'gitlab:db namespace rake task' do
end
end
+ describe 'drop_tables' do
+ subject { run_rake_task('gitlab:db:drop_tables') }
+
+ let(:tables) { %w(one two) }
+ let(:views) { %w(three four) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ allow(connection).to receive(:execute).and_return(nil)
+
+ allow(connection).to receive(:tables).and_return(tables)
+ allow(connection).to receive(:views).and_return(views)
+ end
+
+ it 'drops all tables, except schema_migrations' do
+ expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "one" CASCADE')
+ expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "two" CASCADE')
+
+ subject
+ end
+
+ it 'drops all views' do
+ expect(connection).to receive(:execute).with('DROP VIEW IF EXISTS "three" CASCADE')
+ expect(connection).to receive(:execute).with('DROP VIEW IF EXISTS "four" CASCADE')
+
+ subject
+ end
+
+ it 'truncates schema_migrations table' do
+ expect(connection).to receive(:execute).with('TRUNCATE schema_migrations')
+
+ subject
+ end
+
+ it 'drops extra schemas' do
+ Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
+ expect(connection).to receive(:execute).with("DROP SCHEMA IF EXISTS \"#{schema}\"")
+ end
+
+ subject
+ end
+ end
+
describe 'reindex' do
+ let(:reindex) { double('reindex') }
+ let(:indexes) { double('indexes') }
+
context 'when no index_name is given' do
- it 'raises an error' do
- expect do
- run_rake_task('gitlab:db:reindex', '')
- end.to raise_error(ArgumentError, /must give the index name/)
+ it 'rebuilds a random number of large indexes' do
+ expect(Gitlab::Database::Reindexing).to receive_message_chain('candidate_indexes.random_few').and_return(indexes)
+ expect(Gitlab::Database::Reindexing).to receive(:perform).with(indexes)
+
+ run_rake_task('gitlab:db:reindex')
end
end
- it 'calls the index rebuilder with the proper arguments' do
- reindex = double('rebuilder')
+ context 'with index name given' do
+ let(:index) { double('index') }
+
+ it 'calls the index rebuilder with the proper arguments' do
+ expect(Gitlab::Database::PostgresIndex).to receive(:by_identifier).with('public.foo_idx').and_return(index)
+ expect(Gitlab::Database::Reindexing).to receive(:perform).with([index])
- expect(Gitlab::Database::ConcurrentReindex).to receive(:new)
- .with('some_index_name', logger: instance_of(Logger))
- .and_return(reindex)
+ run_rake_task('gitlab:db:reindex', '[public.foo_idx]')
+ end
- expect(reindex).to receive(:execute)
+ it 'raises an error if the index does not exist' do
+ expect(Gitlab::Database::PostgresIndex).to receive(:by_identifier).with('public.absent_index').and_raise(ActiveRecord::RecordNotFound)
- run_rake_task('gitlab:db:reindex', '[some_index_name]')
+ expect { run_rake_task('gitlab:db:reindex', '[public.absent_index]') }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
end
diff --git a/spec/tasks/gitlab/web_hook_rake_spec.rb b/spec/tasks/gitlab/web_hook_rake_spec.rb
index 3e18ce5ab29..9f373e3a20a 100644
--- a/spec/tasks/gitlab/web_hook_rake_spec.rb
+++ b/spec/tasks/gitlab/web_hook_rake_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'gitlab:web_hook namespace rake tasks' do
it 'raises an error if an unknown namespace is specified' do
stub_env('URL' => url, 'NAMESPACE' => group.full_path)
- group.destroy
+ group.destroy!
expect { run_rake_task('gitlab:web_hook:add') }.to raise_error(SystemExit)
end
@@ -69,7 +69,7 @@ RSpec.describe 'gitlab:web_hook namespace rake tasks' do
it 'raises an error if an unknown namespace is specified' do
stub_env('URL' => url, 'NAMESPACE' => group.full_path)
- group.destroy
+ group.destroy!
expect { run_rake_task('gitlab:web_hook:rm') }.to raise_error(SystemExit)
end
diff --git a/spec/uploaders/file_uploader_spec.rb b/spec/uploaders/file_uploader_spec.rb
index 9f1d276d092..1287b809223 100644
--- a/spec/uploaders/file_uploader_spec.rb
+++ b/spec/uploaders/file_uploader_spec.rb
@@ -285,7 +285,7 @@ RSpec.describe FileUploader do
end
let!(:fog_file) do
- fog_connection.directories.new(key: 'uploads').files.create(
+ fog_connection.directories.new(key: 'uploads').files.create( # rubocop:disable Rails/SaveBang
key: 'tmp/uploads/test/123123',
body: 'content'
)
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index c73a9a7aab1..ba8d0ccbd02 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -795,7 +795,7 @@ RSpec.describe ObjectStorage do
end
let!(:fog_file) do
- fog_connection.directories.new(key: 'uploads').files.create(
+ fog_connection.directories.new(key: 'uploads').files.create( # rubocop:disable Rails/SaveBang
key: 'tmp/uploads/test/123123',
body: 'content'
)
diff --git a/spec/uploaders/pages/deployment_uploader_spec.rb b/spec/uploaders/pages/deployment_uploader_spec.rb
new file mode 100644
index 00000000000..1832f73bd67
--- /dev/null
+++ b/spec/uploaders/pages/deployment_uploader_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::DeploymentUploader do
+ let(:pages_deployment) { create(:pages_deployment) }
+ let(:uploader) { described_class.new(pages_deployment, :file) }
+
+ let(:file) do
+ fixture_file_upload("spec/fixtures/pages.zip")
+ end
+
+ subject { uploader }
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[/\h{2}/\h{2}/\h{64}/pages_deployments/\d+],
+ cache_dir: %r[pages/@hashed/tmp/cache],
+ work_dir: %r[pages/@hashed/tmp/work]
+
+ context 'when object store is REMOTE' do
+ before do
+ stub_pages_object_storage
+ end
+
+ it_behaves_like 'builds correct paths', store_dir: %r[\A\h{2}/\h{2}/\h{64}/pages_deployments/\d+\z]
+
+ it 'preserves original file when stores it' do
+ uploader.store!(file)
+
+ expect(File.exist?(file.path)).to be true
+ end
+ end
+
+ context 'when file is stored in valid local_path' do
+ before do
+ uploader.store!(file)
+ end
+
+ subject { uploader.file.path }
+
+ it { is_expected.to match(%r[#{uploader.root}/@hashed/\h{2}/\h{2}/\h{64}/pages_deployments/#{pages_deployment.id}/pages.zip]) }
+
+ it 'preserves original file when stores it' do
+ expect(File.exist?(file.path)).to be true
+ end
+ end
+
+ describe '.default_store' do
+ it 'returns local store when object storage is not enabled' do
+ expect(described_class.default_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+
+ it 'returns remote store when object storage is enabled' do
+ stub_pages_object_storage
+
+ expect(described_class.default_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+end
diff --git a/spec/uploaders/terraform/versioned_state_uploader_spec.rb b/spec/uploaders/terraform/versioned_state_uploader_spec.rb
index ecc3f943480..eeb54cb61c7 100644
--- a/spec/uploaders/terraform/versioned_state_uploader_spec.rb
+++ b/spec/uploaders/terraform/versioned_state_uploader_spec.rb
@@ -12,9 +12,18 @@ RSpec.describe Terraform::VersionedStateUploader do
end
describe '#filename' do
- it 'contains the UUID of the terraform state record' do
+ it 'contains the version of the terraform state record' do
expect(subject.filename).to eq("#{model.version}.tfstate")
end
+
+ context 'legacy state with versioning disabled' do
+ let(:state) { create(:legacy_terraform_state) }
+ let(:model) { create(:terraform_state_version, terraform_state: state) }
+
+ it 'contains the UUID of the terraform state record' do
+ expect(subject.filename).to eq("#{model.uuid}.tfstate")
+ end
+ end
end
describe '#store_dir' do
@@ -25,5 +34,14 @@ RSpec.describe Terraform::VersionedStateUploader do
expect(subject.store_dir).to eq(:store_dir)
end
+
+ context 'legacy state with versioning disabled' do
+ let(:state) { create(:legacy_terraform_state) }
+ let(:model) { create(:terraform_state_version, terraform_state: state) }
+
+ it 'contains the ID of the project' do
+ expect(subject.store_dir).to include(model.project_id.to_s)
+ end
+ end
end
end
diff --git a/spec/validators/ip_address_validator_spec.rb b/spec/validators/ip_address_validator_spec.rb
new file mode 100644
index 00000000000..382250378c2
--- /dev/null
+++ b/spec/validators/ip_address_validator_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IpAddressValidator do
+ let(:model) do
+ Class.new do
+ include ActiveModel::Model
+ include ActiveModel::Validations
+
+ attr_accessor :ip_address
+ alias_method :ip_address_before_type_cast, :ip_address
+
+ validates :ip_address, ip_address: true
+ end.new
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:ip_address, :validity, :errors) do
+ 'invalid IP' | false | { ip_address: ['must be a valid IPv4 or IPv6 address'] }
+ '192.168.17.43' | true | {}
+ '2001:0db8:85a3::8a2e:0370:7334' | true | {}
+ nil | true | {}
+ '' | true | {}
+ end
+
+ with_them do
+ before do
+ model.ip_address = ip_address
+ model.validate
+ end
+
+ it { expect(model.valid?).to eq(validity) }
+ it { expect(model.errors.messages).to eq(errors) }
+ end
+end
diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb
index 70fb77944cc..e9223c84674 100644
--- a/spec/views/admin/dashboard/index.html.haml_spec.rb
+++ b/spec/views/admin/dashboard/index.html.haml_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe 'admin/dashboard/index.html.haml' do
render
expect(rendered).not_to have_content "Users in License"
- expect(rendered).not_to have_content "Active Users"
+ expect(rendered).not_to have_content "Billable Users"
expect(rendered).not_to have_content "Maximum Users"
expect(rendered).not_to have_content "Users over License"
end
diff --git a/spec/views/groups/edit.html.haml_spec.rb b/spec/views/groups/edit.html.haml_spec.rb
index 83623ea7bb4..f40b03fda2a 100644
--- a/spec/views/groups/edit.html.haml_spec.rb
+++ b/spec/views/groups/edit.html.haml_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'groups/edit.html.haml' do
expect(rendered).to have_content("Prevent sharing a project within #{test_group.name} with other groups")
expect(rendered).to have_css('.js-descr', text: 'help text here')
- expect(rendered).to have_field('group_share_with_group_lock', checkbox_options)
+ expect(rendered).to have_field('group_share_with_group_lock', **checkbox_options)
end
end
diff --git a/spec/views/jira_connect/subscriptions/index.html.haml_spec.rb b/spec/views/jira_connect/subscriptions/index.html.haml_spec.rb
new file mode 100644
index 00000000000..dcc36c93327
--- /dev/null
+++ b/spec/views/jira_connect/subscriptions/index.html.haml_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'jira_connect/subscriptions/index.html.haml' do
+ let(:user) { build_stubbed(:user) }
+
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ assign(:subscriptions, [])
+ end
+
+ context 'when the user is signed in' do
+ it 'shows link to user profile' do
+ render
+
+ expect(rendered).to have_link(user.to_reference)
+ end
+ end
+
+ context 'when the user is not signed in' do
+ let(:user) { nil }
+
+ it 'shows "Sign in" link' do
+ render
+
+ expect(rendered).to have_link('Sign in to GitLab')
+ end
+ end
+end
diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
index 777dc0c8571..2c37565328a 100644
--- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
@@ -92,7 +92,11 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do
end
context 'on settings' do
+ let(:gitlab_com?) { false }
+
before do
+ allow(::Gitlab).to receive(:com?) { gitlab_com? }
+
render
end
@@ -100,6 +104,20 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do
expect(rendered).to have_link('General', href: general_admin_application_settings_path)
end
+ context 'when GitLab.com' do
+ let(:gitlab_com?) { true }
+
+ it 'does not include Integrations link' do
+ expect(rendered).not_to have_link('Integrations', href: integrations_admin_application_settings_path)
+ end
+ end
+
+ context 'when not GitLab.com' do
+ it 'includes Integrations link' do
+ expect(rendered).to have_link('Integrations', href: integrations_admin_application_settings_path)
+ end
+ end
+
context 'when GitLab FOSS' do
it 'does not include Templates link' do
expect(rendered).not_to have_link('Templates', href: '/admin/application_settings/templates')
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index bf5b5785b8d..3fb9fb54b01 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'layouts/nav/sidebar/_project' do
- let(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
before do
assign(:project, project)
@@ -246,6 +246,30 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
+ describe 'Tracing' do
+ it 'is not visible to unauthorized user' do
+ allow(view).to receive(:can?).and_return(false)
+
+ render
+
+ expect(rendered).not_to have_text 'Tracing'
+ end
+
+ it 'links to Tracing page' do
+ render
+
+ expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
+ end
+
+ context 'without project.tracing_external_url' do
+ it 'links to Tracing page' do
+ render
+
+ expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
+ end
+ end
+ end
+
describe 'Alert Management' do
it 'shows the Alerts sidebar entry' do
render
@@ -299,10 +323,10 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
allow(Gitlab).to receive(:com?).and_return(true)
end
- it 'does not display "Access Tokens" nav item' do
+ it 'displays "Access Tokens" nav item' do
render
- expect(rendered).not_to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
end
end
end
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index 1b8b28367c1..aab50209953 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -20,6 +20,14 @@ RSpec.describe 'profiles/preferences/show' do
it 'has an id for anchoring' do
expect(rendered).to have_css('#navigation-theme')
end
+
+ it 'has correct stylesheet tags' do
+ Gitlab::Themes.each do |theme|
+ next unless theme.css_filename
+
+ expect(rendered).to have_selector("link[href*=\"themes/#{theme.css_filename}\"]", visible: false)
+ end
+ end
end
context 'syntax highlighting theme' do
@@ -67,7 +75,7 @@ RSpec.describe 'profiles/preferences/show' do
end
def have_integrations_section
- have_css('#integrations.profile-settings-sidebar', { text: 'Integrations' })
+ have_css('#integrations.profile-settings-sidebar', text: 'Integrations')
end
before do
diff --git a/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb b/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb
deleted file mode 100644
index 7cdc817d784..00000000000
--- a/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/merge_requests/diffs/_diffs.html.haml' do
- include Devise::Test::ControllerHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
- let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project, author: user) }
-
- before do
- allow(view).to receive(:url_for).and_return(controller.request.fullpath)
-
- assign(:merge_request, merge_request)
- assign(:environment, merge_request.environments_for(user).last)
- assign(:diffs, merge_request.diffs)
- assign(:merge_request_diffs, merge_request.diffs)
- assign(:diff_notes_disabled, true) # disable note creation
- assign(:use_legacy_diff_notes, false)
- assign(:grouped_diff_discussions, {})
- assign(:notes, [])
- end
-
- context 'for a commit' do
- let(:commit) { merge_request.commits.last }
-
- before do
- assign(:commit, commit)
- end
-
- it "shows the commit scope" do
- render
-
- expect(rendered).to have_content "Only comments from the following commit are shown below"
- end
- end
-end
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index b4d20da0a5c..24ab64b20f5 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -6,37 +6,85 @@ RSpec.describe 'projects/settings/operations/show' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+ let_it_be(:error_tracking_setting) do
+ create(:project_error_tracking_setting, project: project)
+ end
+
+ let_it_be_with_reload(:tracing_setting) do
+ create(:project_tracing_setting, project: project)
+ end
+
+ let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
+ let_it_be(:alerts_service) { create(:alerts_service, project: project) }
+
let(:operations_show_locals) do
{
- prometheus_service: project.find_or_initialize_service('prometheus'),
- alerts_service: project.find_or_initialize_service('alerts')
+ prometheus_service: prometheus_service,
+ alerts_service: alerts_service
}
end
+ before_all do
+ project.add_reporter(user)
+ end
+
before do
assign :project, project
+
+ allow(view).to receive(:error_tracking_setting)
+ .and_return(error_tracking_setting)
+ allow(view).to receive(:tracing_setting)
+ .and_return(tracing_setting)
+ allow(view).to receive(:current_user).and_return(user)
end
describe 'Operations > Error Tracking' do
- before do
- project.add_reporter(user)
+ context 'Settings page ' do
+ it 'renders the Operations Settings page' do
+ render template: 'projects/settings/operations/show', locals: operations_show_locals
- allow(view).to receive(:error_tracking_setting)
- .and_return(error_tracking_setting)
- allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:incident_management_available?) { false }
+ expect(rendered).to have_content _('Error tracking')
+ expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
+ end
end
+ end
- let_it_be(:error_tracking_setting) do
- create(:project_error_tracking_setting, project: project)
+ describe 'Operations > Tracing' do
+ context 'with project.tracing_external_url' do
+ it 'links to project.tracing_external_url' do
+ render template: 'projects/settings/operations/show', locals: operations_show_locals
+
+ expect(rendered).to have_link('Tracing', href: tracing_setting.external_url)
+ end
+
+ context 'with malicious external_url' do
+ let(:malicious_tracing_url) { "https://replaceme.com/'><script>alert(document.cookie)</script>" }
+ let(:cleaned_url) { "https://replaceme.com/'>" }
+
+ before do
+ tracing_setting.update_column(:external_url, malicious_tracing_url)
+ end
+
+ it 'sanitizes external_url' do
+ render template: 'projects/settings/operations/show', locals: operations_show_locals
+
+ expect(tracing_setting.external_url).to eq(malicious_tracing_url)
+ expect(rendered).to have_link('Tracing', href: cleaned_url)
+ end
+ end
end
- context 'Settings page ' do
- it 'renders the Operations Settings page' do
+ context 'without project.tracing_external_url' do
+ let(:tracing_setting) { build(:project_tracing_setting, project: project) }
+
+ before do
+ tracing_setting.external_url = nil
+ end
+
+ it 'links to Tracing page' do
render template: 'projects/settings/operations/show', locals: operations_show_locals
- expect(rendered).to have_content _('Error tracking')
- expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
+ expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
end
end
end
diff --git a/spec/views/projects/tracing/show.html.haml_spec.rb b/spec/views/projects/tracing/show.html.haml_spec.rb
new file mode 100644
index 00000000000..96dc6a18fc7
--- /dev/null
+++ b/spec/views/projects/tracing/show.html.haml_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/tracings/show' do
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
+
+ before do
+ assign(:project, project)
+ allow(view).to receive(:error_tracking_setting)
+ .and_return(error_tracking_setting)
+ end
+
+ context 'with project.tracing_external_url' do
+ let_it_be(:tracing_url) { 'https://tracing.url' }
+ let_it_be(:tracing_setting) { create(:project_tracing_setting, project: project, external_url: tracing_url) }
+
+ before do
+ allow(view).to receive(:can?).and_return(true)
+ allow(view).to receive(:tracing_setting).and_return(tracing_setting)
+ end
+
+ it 'renders iframe' do
+ render
+
+ expect(rendered).to match(/iframe/)
+ end
+
+ context 'with malicious external_url' do
+ let(:malicious_tracing_url) { "https://replaceme.com/'><script>alert(document.cookie)</script>" }
+ let(:cleaned_url) { "https://replaceme.com/'&gt;" }
+
+ before do
+ tracing_setting.update_column(:external_url, malicious_tracing_url)
+ end
+
+ it 'sanitizes external_url' do
+ render
+
+ expect(tracing_setting.external_url).to eq(malicious_tracing_url)
+ expect(rendered).to have_xpath("//iframe[@src=\"#{cleaned_url}\"]")
+ end
+ end
+ end
+
+ context 'without project.tracing_external_url' do
+ before do
+ allow(view).to receive(:can?).and_return(true)
+ end
+
+ it 'renders empty state' do
+ render
+
+ expect(rendered).to have_link('Add Jaeger URL')
+ expect(rendered).not_to match(/iframe/)
+ end
+ end
+end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 9e95dc40ff8..033b2304e33 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -60,6 +60,28 @@ RSpec.describe 'search/_results' do
expect(rendered).to have_selector('#js-search-filter-by-state')
end
+
+ context 'Feature search_filter_by_confidential' do
+ context 'when disabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'does not render the confidential drop down' do
+ render
+
+ expect(rendered).not_to have_selector('#js-search-filter-by-confidential')
+ end
+ end
+
+ context 'when enabled' do
+ it 'renders the confidential drop down' do
+ render
+
+ expect(rendered).to have_selector('#js-search-filter-by-confidential')
+ end
+ end
+ end
end
end
end
diff --git a/spec/views/shared/_label_row.html.haml_spec.rb b/spec/views/shared/_label_row.html.haml_spec.rb
index 8f8aa3072e2..e9a0bfdcd4e 100644
--- a/spec/views/shared/_label_row.html.haml_spec.rb
+++ b/spec/views/shared/_label_row.html.haml_spec.rb
@@ -2,43 +2,126 @@
require 'spec_helper'
RSpec.describe 'shared/_label_row.html.haml' do
- label_types = {
- 'project label': :label,
- 'group label': :group_label
- }
+ let_it_be(:group) { create(:group) }
+ let(:label) { build_stubbed(:group_label, group: group).present(issuable_subject: group) }
- label_types.each do |label_type, label_factory|
- let!(:label) do
- label_record = create(label_factory) # rubocop: disable Rails/SaveBang
- label_record.present(issuable_subject: label_record.subject)
+ before do
+ allow(view).to receive(:label) { label }
+ end
+
+ context 'with a project context' do
+ let_it_be(:project) { create(:project, group: group) }
+ let(:label) { build_stubbed(:label, project: project).present(issuable_subject: project) }
+
+ before do
+ assign(:project, label.project)
+
+ render
+ end
+
+ it 'has label title' do
+ expect(rendered).to have_text(label.title)
+ end
+
+ it 'has a non-linked label title' do
+ expect(rendered).not_to have_link(label.title)
+ end
+
+ it 'has Issues link' do
+ expect(rendered).to have_link('Issues')
+ end
+
+ it 'has Merge request link' do
+ expect(rendered).to have_link('Merge requests')
+ end
+
+ it 'shows the path from where the label was created' do
+ expect(rendered).to have_css('.label-badge', text: project.full_name)
+ end
+ end
+
+ context 'with a subgroup context' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let(:label) { build_stubbed(:group_label, group: subgroup).present(issuable_subject: subgroup) }
+
+ before do
+ assign(:group, label.group)
+
+ render
+ end
+
+ it 'has label title' do
+ expect(rendered).to have_text(label.title)
+ end
+
+ it 'has a non-linked label title' do
+ expect(rendered).not_to have_link(label.title)
+ end
+
+ it 'has Issues link' do
+ expect(rendered).to have_link('Issues')
+ end
+
+ it 'has Merge request link' do
+ expect(rendered).to have_link('Merge requests')
+ end
+
+ it 'shows the path from where the label was created' do
+ expect(rendered).to have_css('.label-badge', text: subgroup.full_name)
+ end
+ end
+
+ context 'with a group context' do
+ before do
+ assign(:group, label.group)
+
+ render
+ end
+
+ it 'has label title' do
+ expect(rendered).to have_text(label.title)
+ end
+
+ it 'has a non-linked label title' do
+ expect(rendered).not_to have_link(label.title)
+ end
+
+ it 'has Issues link' do
+ expect(rendered).to have_link('Issues')
end
- context "for a #{label_type}" do
- before do
- if label.project_label?
- @project = label.project
- else
- @group = label.group
- end
- end
+ it 'has Merge request link' do
+ expect(rendered).to have_link('Merge requests')
+ end
+
+ it 'does not show a path from where the label was created' do
+ expect(rendered).not_to have_css('.label-badge')
+ end
+ end
- it 'has a non-linked label title' do
- render 'shared/label_row', label: label
+ context 'with an admin context' do
+ before do
+ render
+ end
- expect(rendered).not_to have_css('a', text: label.title)
- end
+ it 'has label title' do
+ expect(rendered).to have_text(label.title)
+ end
- it "has Issues link for #{label_type}" do
- render 'shared/label_row', label: label
+ it 'has a non-linked label title' do
+ expect(rendered).not_to have_link(label.title)
+ end
- expect(rendered).to have_css('a', text: 'Issues')
- end
+ it 'does not show Issues link' do
+ expect(rendered).not_to have_link('Issues')
+ end
- it "has Merge request link for #{label_type}" do
- render 'shared/label_row', label: label
+ it 'does not show Merge request link' do
+ expect(rendered).not_to have_link('Merge requests')
+ end
- expect(rendered).to have_css('a', text: 'Merge requests')
- end
+ it 'does not show a path from where the label was created' do
+ expect(rendered).not_to have_css('.label-badge')
end
end
end
diff --git a/spec/views/shared/milestones/_issuables.html.haml_spec.rb b/spec/views/shared/milestones/_issuables.html.haml_spec.rb
index 70ab6914580..5eed2c96a45 100644
--- a/spec/views/shared/milestones/_issuables.html.haml_spec.rb
+++ b/spec/views/shared/milestones/_issuables.html.haml_spec.rb
@@ -6,8 +6,7 @@ RSpec.describe 'shared/milestones/_issuables.html.haml' do
let(:issuables_size) { 100 }
before do
- allow(view).to receive_messages(title: nil, id: nil, show_project_name: nil,
- show_full_project_name: nil, dom_class: '',
+ allow(view).to receive_messages(title: nil, id: nil, show_project_name: nil, dom_class: '',
issuables: double(length: issuables_size).as_null_object)
stub_template 'shared/milestones/_issuable.html.haml' => ''
diff --git a/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb b/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb
index 620900b3402..ff692d0eda6 100644
--- a/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb
+++ b/spec/workers/analytics/instance_statistics/count_job_trigger_worker_spec.rb
@@ -14,16 +14,4 @@ RSpec.describe Analytics::InstanceStatistics::CountJobTriggerWorker do
expect(Analytics::InstanceStatistics::CounterJobWorker.jobs.count).to eq(expected_count)
end
end
-
- context 'when the `store_instance_statistics_measurements` feature flag is off' do
- before do
- stub_feature_flags(store_instance_statistics_measurements: false)
- end
-
- it 'does not trigger any CounterJobWorker job' do
- subject.perform
-
- expect(Analytics::InstanceStatistics::CounterJobWorker.jobs.count).to eq(0)
- end
- end
end
diff --git a/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb b/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb
index 8db86071dc4..667ec0bcb75 100644
--- a/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb
+++ b/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do
it 'counts a scope and stores the result' do
subject
- measurement = Analytics::InstanceStatistics::Measurement.first
+ measurement = Analytics::InstanceStatistics::Measurement.users.first
expect(measurement.recorded_at).to be_like_time(recorded_at)
expect(measurement.identifier).to eq('users')
expect(measurement.count).to eq(2)
@@ -33,7 +33,7 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do
it 'sets 0 as the count' do
subject
- measurement = Analytics::InstanceStatistics::Measurement.first
+ measurement = Analytics::InstanceStatistics::Measurement.groups.first
expect(measurement.recorded_at).to be_like_time(recorded_at)
expect(measurement.identifier).to eq('groups')
expect(measurement.count).to eq(0)
@@ -51,4 +51,20 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do
expect { subject }.not_to change { Analytics::InstanceStatistics::Measurement.count }
end
+
+ context 'when pipelines_succeeded identifier is passed' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :success) }
+
+ let(:successful_pipelines_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:pipelines_succeeded) }
+ let(:job_args) { [successful_pipelines_measurement_identifier, pipeline.id, pipeline.id, recorded_at] }
+
+ it 'counts successful pipelines' do
+ subject
+
+ measurement = Analytics::InstanceStatistics::Measurement.pipelines_succeeded.first
+ expect(measurement.recorded_at).to be_like_time(recorded_at)
+ expect(measurement.identifier).to eq('pipelines_succeeded')
+ expect(measurement.count).to eq(1)
+ end
+ end
end
diff --git a/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb b/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb
index 2d633828ae3..9d4d48d0568 100644
--- a/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb
+++ b/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb
@@ -11,17 +11,5 @@ RSpec.describe AuthorizedProjectUpdate::PeriodicRecalculateWorker do
subject.perform
end
-
- context 'feature flag :periodic_project_authorization_recalculation is disabled' do
- before do
- stub_feature_flags(periodic_project_authorization_recalculation: false)
- end
-
- it 'does not call AuthorizedProjectUpdate::PeriodicRecalculateService' do
- expect(AuthorizedProjectUpdate::PeriodicRecalculateService).not_to receive(:new)
-
- subject.perform
- end
- end
end
end
diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
index c49e4c453bf..a27c431523e 100644
--- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
@@ -14,17 +14,5 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker do
subject.perform(start_user_id, end_user_id)
end
-
- context 'feature flag :periodic_project_authorization_recalculation is disabled' do
- before do
- stub_feature_flags(periodic_project_authorization_recalculation: false)
- end
-
- it 'does not call AuthorizedProjectUpdate::RecalculateForUserRangeService' do
- expect(AuthorizedProjectUpdate::RecalculateForUserRangeService).not_to receive(:new)
-
- subject.perform(start_user_id, end_user_id)
- end
- end
end
end
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index e7f7ae84621..11b50961e9e 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -20,10 +20,10 @@ RSpec.describe BuildFinishedWorker do
expect_any_instance_of(BuildTraceSectionsWorker).to receive(:perform)
expect_any_instance_of(BuildCoverageWorker).to receive(:perform)
expect(BuildHooksWorker).to receive(:perform_async)
- expect(ArchiveTraceWorker).to receive(:perform_async)
expect(ExpirePipelineCacheWorker).to receive(:perform_async)
expect(ChatNotificationWorker).not_to receive(:perform_async)
expect(Ci::BuildReportResultWorker).not_to receive(:perform)
+ expect(ArchiveTraceWorker).to receive(:perform_in)
subject
end
diff --git a/spec/workers/ci/build_trace_chunk_flush_worker_spec.rb b/spec/workers/ci/build_trace_chunk_flush_worker_spec.rb
index 352ad6d4cf6..8aac80a02be 100644
--- a/spec/workers/ci/build_trace_chunk_flush_worker_spec.rb
+++ b/spec/workers/ci/build_trace_chunk_flush_worker_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Ci::BuildTraceChunkFlushWorker do
described_class.new.perform(chunk.id)
- expect(chunk.reload).to be_persisted
+ expect(chunk.reload).to be_migrated
end
describe '#perform' do
@@ -24,7 +24,7 @@ RSpec.describe Ci::BuildTraceChunkFlushWorker do
it 'migrates build trace chunk to a safe store' do
subject
- expect(chunk.reload).to be_persisted
+ expect(chunk.reload).to be_migrated
end
end
end
diff --git a/spec/workers/ci/delete_objects_worker_spec.rb b/spec/workers/ci/delete_objects_worker_spec.rb
new file mode 100644
index 00000000000..6cb8e0cba37
--- /dev/null
+++ b/spec/workers/ci/delete_objects_worker_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeleteObjectsWorker do
+ let(:worker) { described_class.new }
+
+ it { expect(described_class.idempotent?).to be_truthy }
+
+ describe '#perform' do
+ it 'executes a service' do
+ expect_next_instance_of(Ci::DeleteObjectsService) do |instance|
+ expect(instance).to receive(:execute)
+ expect(instance).to receive(:remaining_batches_count).once.and_call_original
+ end
+
+ worker.perform
+ end
+ end
+
+ describe '#max_running_jobs' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ stub_feature_flags(
+ ci_delete_objects_low_concurrency: low,
+ ci_delete_objects_medium_concurrency: medium,
+ ci_delete_objects_high_concurrency: high
+ )
+ end
+
+ subject(:max_running_jobs) { worker.max_running_jobs }
+
+ where(:low, :medium, :high, :expected) do
+ false | false | false | 0
+ true | true | true | 2
+ true | false | false | 2
+ false | true | false | 20
+ false | true | true | 20
+ false | false | true | 50
+ end
+
+ with_them do
+ it 'sets up concurrency depending on the feature flag' do
+ expect(max_running_jobs).to eq(expected)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/schedule_delete_objects_cron_worker_spec.rb b/spec/workers/ci/schedule_delete_objects_cron_worker_spec.rb
new file mode 100644
index 00000000000..142df271f90
--- /dev/null
+++ b/spec/workers/ci/schedule_delete_objects_cron_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ScheduleDeleteObjectsCronWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'enqueues DeleteObjectsWorker jobs' do
+ expect(Ci::DeleteObjectsWorker).to receive(:perform_with_capacity)
+
+ worker.perform
+ end
+ end
+end
diff --git a/spec/workers/cleanup_container_repository_worker_spec.rb b/spec/workers/cleanup_container_repository_worker_spec.rb
index 0545f7a35e4..9cf8974a2a1 100644
--- a/spec/workers/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/cleanup_container_repository_worker_spec.rb
@@ -40,14 +40,35 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
context 'container expiration policy' do
let(:params) { { key: 'value', 'container_expiration_policy' => true } }
+ before do
+ allow(ContainerRepository)
+ .to receive(:find_by_id).with(repository.id).and_return(repository)
+ end
+
it 'executes the destroy service' do
+ expect(repository).to receive(:start_expiration_policy!).and_call_original
+ expect(repository).to receive(:reset_expiration_policy_started_at!).and_call_original
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
.with(project, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
- expect(service).to receive(:execute)
+ expect(service).to receive(:execute).and_return(status: :success)
+
+ subject.perform(nil, repository.id, params)
+ expect(repository.reload.expiration_policy_started_at).to be_nil
+ end
+
+ it "doesn't reset the expiration policy started at if the destroy service returns an error" do
+ expect(repository).to receive(:start_expiration_policy!).and_call_original
+ expect(repository).not_to receive(:reset_expiration_policy_started_at!)
+ expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
+ .with(project, nil, params.merge('container_expiration_policy' => true))
+ .and_return(service)
+
+ expect(service).to receive(:execute).and_return(status: :error, message: 'timeout while deleting tags')
subject.perform(nil, repository.id, params)
+ expect(repository.reload.expiration_policy_started_at).not_to be_nil
end
end
end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
new file mode 100644
index 00000000000..2c79f347903
--- /dev/null
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
+ let(:job_tracker) do
+ described_class.new('namespace')
+ end
+
+ describe '#register' do
+ it 'adds jid to the set' do
+ job_tracker.register('a-job-id')
+
+ expect(job_tracker.running_jids).to contain_exactly('a-job-id')
+ end
+
+ it 'updates the counter' do
+ expect { job_tracker.register('a-job-id') }
+ .to change { job_tracker.count }
+ .from(0)
+ .to(1)
+ end
+
+ it 'does it in only one Redis call' do
+ expect(job_tracker).to receive(:with_redis).once.and_call_original
+
+ job_tracker.register('a-job-id')
+ end
+ end
+
+ describe '#remove' do
+ before do
+ job_tracker.register(%w[a-job-id other-job-id])
+ end
+
+ it 'removes jid from the set' do
+ job_tracker.remove('other-job-id')
+
+ expect(job_tracker.running_jids).to contain_exactly('a-job-id')
+ end
+
+ it 'updates the counter' do
+ expect { job_tracker.remove('other-job-id') }
+ .to change { job_tracker.count }
+ .from(2)
+ .to(1)
+ end
+
+ it 'does it in only one Redis call' do
+ expect(job_tracker).to receive(:with_redis).once.and_call_original
+
+ job_tracker.remove('other-job-id')
+ end
+ end
+
+ describe '#clean_up' do
+ before do
+ job_tracker.register('a-job-id')
+ end
+
+ context 'with running jobs' do
+ before do
+ expect(Gitlab::SidekiqStatus).to receive(:completed_jids)
+ .with(%w[a-job-id])
+ .and_return([])
+ end
+
+ it 'does not remove the jid from the set' do
+ expect { job_tracker.clean_up }
+ .not_to change { job_tracker.running_jids.include?('a-job-id') }
+ end
+
+ it 'does only one Redis call to get the job ids' do
+ expect(job_tracker).to receive(:with_redis).once.and_call_original
+
+ job_tracker.clean_up
+ end
+ end
+
+ context 'with completed jobs' do
+ it 'removes the jid from the set' do
+ expect { job_tracker.clean_up }
+ .to change { job_tracker.running_jids.include?('a-job-id') }
+ end
+
+ it 'updates the counter' do
+ expect { job_tracker.clean_up }
+ .to change { job_tracker.count }
+ .from(1)
+ .to(0)
+ end
+
+ it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do
+ expect(job_tracker).to receive(:with_redis).twice.and_call_original
+
+ job_tracker.clean_up
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/limited_capacity/worker_spec.rb b/spec/workers/concerns/limited_capacity/worker_spec.rb
new file mode 100644
index 00000000000..8a15675c04d
--- /dev/null
+++ b/spec/workers/concerns/limited_capacity/worker_spec.rb
@@ -0,0 +1,285 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_failures do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ include LimitedCapacity::Worker
+ end
+ end
+
+ let(:worker) { worker_class.new }
+
+ let(:job_tracker) do
+ LimitedCapacity::JobTracker.new(worker_class.name)
+ end
+
+ before do
+ worker.jid = 'my-jid'
+ allow(worker).to receive(:job_tracker).and_return(job_tracker)
+ end
+
+ describe 'required methods' do
+ it { expect { worker.perform_work }.to raise_error(NotImplementedError) }
+ it { expect { worker.remaining_work_count }.to raise_error(NotImplementedError) }
+ it { expect { worker.max_running_jobs }.to raise_error(NotImplementedError) }
+ end
+
+ describe 'Sidekiq options' do
+ it 'does not retry failed jobs' do
+ expect(worker_class.sidekiq_options['retry']).to eq(0)
+ end
+
+ it 'does not deduplicate jobs' do
+ expect(worker_class.get_deduplicate_strategy).to eq(:none)
+ end
+ end
+
+ describe '.perform_with_capacity' do
+ subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) }
+
+ before do
+ expect_next_instance_of(worker_class) do |instance|
+ expect(instance).to receive(:remove_failed_jobs)
+ expect(instance).to receive(:report_prometheus_metrics)
+
+ allow(instance).to receive(:remaining_work_count).and_return(remaining_work_count)
+ allow(instance).to receive(:remaining_capacity).and_return(remaining_capacity)
+ end
+ end
+
+ context 'when capacity is larger than work' do
+ let(:remaining_work_count) { 2 }
+ let(:remaining_capacity) { 3 }
+
+ it 'enqueues jobs for remaining work' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg]])
+
+ perform_with_capacity
+ end
+ end
+
+ context 'when capacity is lower than work' do
+ let(:remaining_work_count) { 5 }
+ let(:remaining_capacity) { 3 }
+
+ it 'enqueues jobs for remaining work' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg], [:arg]])
+
+ perform_with_capacity
+ end
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform) { worker.perform(:arg) }
+
+ context 'with capacity' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(worker).to receive(:running_jobs_count).and_return(0)
+ allow(worker).to receive(:remaining_work_count).and_return(0)
+ end
+
+ it 'calls perform_work' do
+ expect(worker).to receive(:perform_work).with(:arg)
+
+ perform
+ end
+
+ it 're-enqueues itself' do
+ allow(worker).to receive(:perform_work)
+ expect(worker).to receive(:re_enqueue).with(:arg)
+
+ perform
+ end
+
+ it 'registers itself in the running set' do
+ allow(worker).to receive(:perform_work)
+ expect(job_tracker).to receive(:register).with('my-jid')
+
+ perform
+ end
+
+ it 'removes itself from the running set' do
+ expect(job_tracker).to receive(:remove).with('my-jid')
+
+ allow(worker).to receive(:perform_work)
+
+ perform
+ end
+
+ it 'reports prometheus metrics' do
+ allow(worker).to receive(:perform_work)
+ expect(worker).to receive(:report_prometheus_metrics)
+
+ perform
+ end
+ end
+
+ context 'with capacity and without work' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(worker).to receive(:running_jobs_count).and_return(0)
+ allow(worker).to receive(:remaining_work_count).and_return(0)
+ allow(worker).to receive(:perform_work)
+ end
+
+ it 'does not re-enqueue itself' do
+ expect(worker_class).not_to receive(:perform_async)
+
+ perform
+ end
+ end
+
+ context 'without capacity' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(worker).to receive(:running_jobs_count).and_return(15)
+ allow(worker).to receive(:remaining_work_count).and_return(10)
+ end
+
+ it 'does not call perform_work' do
+ expect(worker).not_to receive(:perform_work)
+
+ perform
+ end
+
+ it 'does not re-enqueue itself' do
+ expect(worker_class).not_to receive(:perform_async)
+
+ perform
+ end
+
+ it 'does not register in the running set' do
+ expect(job_tracker).not_to receive(:register)
+
+ perform
+ end
+
+ it 'removes itself from the running set' do
+ expect(job_tracker).to receive(:remove).with('my-jid')
+
+ perform
+ end
+
+ it 'reports prometheus metrics' do
+ expect(worker).to receive(:report_prometheus_metrics)
+
+ perform
+ end
+ end
+
+ context 'when perform_work fails' do
+ it 'does not re-enqueue itself' do
+ expect(worker).not_to receive(:re_enqueue)
+
+ expect { perform }.to raise_error(NotImplementedError)
+ end
+
+ it 'removes itself from the running set' do
+ expect(job_tracker).to receive(:remove)
+
+ expect { perform }.to raise_error(NotImplementedError)
+ end
+
+ it 'reports prometheus metrics' do
+ expect(worker).to receive(:report_prometheus_metrics)
+
+ expect { perform }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+
+ describe '#remaining_capacity' do
+ subject(:remaining_capacity) { worker.remaining_capacity }
+
+ before do
+ expect(worker).to receive(:max_running_jobs).and_return(max_capacity)
+ end
+
+ context 'when changing the capacity to a lower value' do
+ let(:max_capacity) { -1 }
+
+ it { expect(remaining_capacity).to eq(0) }
+ end
+
+ context 'when registering new jobs' do
+ let(:max_capacity) { 2 }
+
+ before do
+ job_tracker.register('a-job-id')
+ end
+
+ it { expect(remaining_capacity).to eq(1) }
+ end
+
+ context 'with jobs in the queue' do
+ let(:max_capacity) { 2 }
+
+ before do
+ expect(worker_class).to receive(:queue_size).and_return(1)
+ end
+
+ it { expect(remaining_capacity).to eq(1) }
+ end
+
+ context 'with both running jobs and queued jobs' do
+ let(:max_capacity) { 10 }
+
+ before do
+ expect(worker_class).to receive(:queue_size).and_return(5)
+ expect(worker).to receive(:running_jobs_count).and_return(3)
+ end
+
+ it { expect(remaining_capacity).to eq(2) }
+ end
+ end
+
+ describe '#remove_failed_jobs' do
+ subject(:remove_failed_jobs) { worker.remove_failed_jobs }
+
+ before do
+ job_tracker.register('a-job-id')
+ allow(worker).to receive(:max_running_jobs).and_return(2)
+
+ expect(job_tracker).to receive(:clean_up).and_call_original
+ end
+
+ context 'with failed jobs' do
+ it 'update the available capacity' do
+ expect { remove_failed_jobs }.to change { worker.remaining_capacity }.by(1)
+ end
+ end
+ end
+
+ describe '#report_prometheus_metrics' do
+ subject(:report_prometheus_metrics) { worker.report_prometheus_metrics }
+
+ before do
+ allow(worker).to receive(:running_jobs_count).and_return(5)
+ allow(worker).to receive(:max_running_jobs).and_return(7)
+ allow(worker).to receive(:remaining_work_count).and_return(9)
+ end
+
+ it 'reports number of running jobs' do
+ labels = { worker: 'DummyWorker' }
+
+ report_prometheus_metrics
+
+ expect(Gitlab::Metrics.registry.get(:limited_capacity_worker_running_jobs).get(labels)).to eq(5)
+ expect(Gitlab::Metrics.registry.get(:limited_capacity_worker_max_running_jobs).get(labels)).to eq(7)
+ expect(Gitlab::Metrics.registry.get(:limited_capacity_worker_remaining_work_count).get(labels)).to eq(9)
+ end
+ end
+end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 868eb6b192e..6b185c30670 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -7,19 +7,24 @@ RSpec.describe ContainerExpirationPolicyWorker do
subject { described_class.new.perform }
- context 'With no container expiration policies' do
- it 'Does not execute any policies' do
+ RSpec.shared_examples 'not executing any policy' do
+ it 'does not run any policy' do
expect(ContainerExpirationPolicyService).not_to receive(:new)
subject
end
end
+ context 'With no container expiration policies' do
+ it_behaves_like 'not executing any policy'
+ end
+
context 'With container expiration policies' do
- context 'a valid policy' do
- let!(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
- let(:user) { container_expiration_policy.project.owner }
+ let_it_be(:container_expiration_policy, reload: true) { create(:container_expiration_policy, :runnable) }
+ let_it_be(:container_repository) { create(:container_repository, project: container_expiration_policy.project) }
+ let_it_be(:user) { container_expiration_policy.project.owner }
+ context 'a valid policy' do
it 'runs the policy' do
service = instance_double(ContainerExpirationPolicyService, execute: true)
@@ -31,33 +36,30 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
context 'a disabled policy' do
- let!(:container_expiration_policy) { create(:container_expiration_policy, :runnable, :disabled) }
- let(:user) {container_expiration_policy.project.owner }
-
- it 'does not run the policy' do
- expect(ContainerExpirationPolicyService)
- .not_to receive(:new).with(container_expiration_policy, user)
-
- subject
+ before do
+ container_expiration_policy.disable!
end
+
+ it_behaves_like 'not executing any policy'
end
context 'a policy that is not due for a run' do
- let!(:container_expiration_policy) { create(:container_expiration_policy) }
- let(:user) {container_expiration_policy.project.owner }
+ before do
+ container_expiration_policy.update_column(:next_run_at, 2.minutes.from_now)
+ end
- it 'does not run the policy' do
- expect(ContainerExpirationPolicyService)
- .not_to receive(:new).with(container_expiration_policy, user)
+ it_behaves_like 'not executing any policy'
+ end
- subject
+ context 'a policy linked to no container repository' do
+ before do
+ container_expiration_policy.container_repositories.delete_all
end
+
+ it_behaves_like 'not executing any policy'
end
context 'an invalid policy' do
- let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
- let_it_be(:user) {container_expiration_policy.project.owner }
-
before do
container_expiration_policy.update_column(:name_regex, '*production')
end
diff --git a/spec/workers/deployments/drop_older_deployments_worker_spec.rb b/spec/workers/deployments/drop_older_deployments_worker_spec.rb
new file mode 100644
index 00000000000..0cf524ca16f
--- /dev/null
+++ b/spec/workers/deployments/drop_older_deployments_worker_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::DropOlderDeploymentsWorker do
+ subject { described_class.new.perform(deployment&.id) }
+
+ describe '#perform' do
+ let(:deployment) { create(:deployment, :success) }
+
+ it 'executes Deployments::OlderDeploymentsDropService' do
+ expect(Deployments::OlderDeploymentsDropService)
+ .to receive(:new).with(deployment.id).and_call_original
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/deployments/execute_hooks_worker_spec.rb b/spec/workers/deployments/execute_hooks_worker_spec.rb
new file mode 100644
index 00000000000..fb1dc8cf290
--- /dev/null
+++ b/spec/workers/deployments/execute_hooks_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::ExecuteHooksWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ before do
+ allow(ProjectServiceWorker).to receive(:perform_async)
+ end
+
+ it 'executes project services for deployment_hooks' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ service = create(:service, type: 'SlackService', project: project, deployment_events: true, active: true)
+
+ expect(ProjectServiceWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
+
+ worker.perform(deployment.id)
+ end
+
+ it 'does not execute an inactive service' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ create(:service, type: 'SlackService', project: project, deployment_events: true, active: false)
+
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ worker.perform(deployment.id)
+ end
+
+ it 'does not execute if a deployment does not exist' do
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ worker.perform(non_existing_record_id)
+ end
+
+ it 'execute webhooks' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ web_hook = create(:project_hook, deployment_events: true, project: project)
+
+ expect_next_instance_of(WebHookService, web_hook, an_instance_of(Hash), "deployment_hooks") do |service|
+ expect(service).to receive(:async_execute)
+ end
+
+ worker.perform(deployment.id)
+ end
+ end
+end
diff --git a/spec/workers/deployments/link_merge_request_worker_spec.rb b/spec/workers/deployments/link_merge_request_worker_spec.rb
new file mode 100644
index 00000000000..a55dd897bc7
--- /dev/null
+++ b/spec/workers/deployments/link_merge_request_worker_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::LinkMergeRequestWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'links merge requests to the deployment' do
+ deployment = create(:deployment)
+ service = instance_double(Deployments::LinkMergeRequestsService)
+
+ expect(Deployments::LinkMergeRequestsService)
+ .to receive(:new)
+ .with(deployment)
+ .and_return(service)
+
+ expect(service).to receive(:execute)
+
+ worker.perform(deployment.id)
+ end
+
+ it 'does not link merge requests when the deployment is not found' do
+ expect(Deployments::LinkMergeRequestsService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id)
+ end
+ end
+
+ context 'idempotent' do
+ include_examples 'an idempotent worker' do
+ let(:project) { create(:project, :repository) }
+ let(:environment) { create(:environment, project: project) }
+ let(:deployment) { create(:deployment, :success, project: project, environment: environment) }
+ let(:job_args) { deployment.id }
+
+ it 'links merge requests to deployment' do
+ mr1 = create(
+ :merge_request,
+ :merged,
+ source_project: project,
+ target_project: project,
+ source_branch: 'source1',
+ target_branch: deployment.ref
+ )
+
+ mr2 = create(
+ :merge_request,
+ :merged,
+ source_project: project,
+ target_project: project,
+ source_branch: 'source2',
+ target_branch: deployment.ref
+ )
+
+ mr3 = create(
+ :merge_request,
+ :merged,
+ source_project: project,
+ target_project: project,
+ target_branch: 'foo'
+ )
+
+ subject
+
+ expect(deployment.merge_requests).to include(mr1, mr2)
+ expect(deployment.merge_requests).not_to include(mr3)
+ end
+ end
+ end
+end
diff --git a/spec/workers/deployments/success_worker_spec.rb b/spec/workers/deployments/success_worker_spec.rb
index 7c21a3147a7..d9996e66919 100644
--- a/spec/workers/deployments/success_worker_spec.rb
+++ b/spec/workers/deployments/success_worker_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe Deployments::SuccessWorker do
context 'when successful deployment' do
let(:deployment) { create(:deployment, :success) }
- it 'executes Deployments::AfterCreateService' do
- expect(Deployments::AfterCreateService)
+ it 'executes Deployments::UpdateEnvironmentService' do
+ expect(Deployments::UpdateEnvironmentService)
.to receive(:new).with(deployment).and_call_original
subject
@@ -19,8 +19,8 @@ RSpec.describe Deployments::SuccessWorker do
context 'when canceled deployment' do
let(:deployment) { create(:deployment, :canceled) }
- it 'does not execute Deployments::AfterCreateService' do
- expect(Deployments::AfterCreateService).not_to receive(:new)
+ it 'does not execute Deployments::UpdateEnvironmentService' do
+ expect(Deployments::UpdateEnvironmentService).not_to receive(:new)
subject
end
@@ -29,8 +29,8 @@ RSpec.describe Deployments::SuccessWorker do
context 'when deploy record does not exist' do
let(:deployment) { nil }
- it 'does not execute Deployments::AfterCreateService' do
- expect(Deployments::AfterCreateService).not_to receive(:new)
+ it 'does not execute Deployments::UpdateEnvironmentService' do
+ expect(Deployments::UpdateEnvironmentService).not_to receive(:new)
subject
end
diff --git a/spec/workers/deployments/update_environment_worker_spec.rb b/spec/workers/deployments/update_environment_worker_spec.rb
new file mode 100644
index 00000000000..d67cbd62616
--- /dev/null
+++ b/spec/workers/deployments/update_environment_worker_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::UpdateEnvironmentWorker do
+ subject(:worker) { described_class.new }
+
+ context 'when successful deployment' do
+ let(:deployment) { create(:deployment, :success) }
+
+ it 'executes Deployments::UpdateEnvironmentService' do
+ service = instance_double(Deployments::UpdateEnvironmentService)
+
+ expect(Deployments::UpdateEnvironmentService)
+ .to receive(:new)
+ .with(deployment)
+ .and_return(service)
+
+ expect(service).to receive(:execute)
+
+ worker.perform(deployment.id)
+ end
+ end
+
+ context 'when canceled deployment' do
+ let(:deployment) { create(:deployment, :canceled) }
+
+ it 'does not execute Deployments::UpdateEnvironmentService' do
+ expect(Deployments::UpdateEnvironmentService).not_to receive(:new)
+
+ worker.perform(deployment.id)
+ end
+ end
+
+ context 'when deploy record does not exist' do
+ it 'does not execute Deployments::UpdateEnvironmentService' do
+ expect(Deployments::UpdateEnvironmentService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id)
+ end
+ end
+
+ context 'idempotent' do
+ include_examples 'an idempotent worker' do
+ let(:project) { create(:project, :repository) }
+ let(:environment) { create(:environment, name: 'production') }
+ let(:deployment) { create(:deployment, :success, project: project, environment: environment) }
+ let(:merge_request) { create(:merge_request, target_branch: 'master', source_branch: 'feature', source_project: project) }
+ let(:job_args) { deployment.id }
+
+ before do
+ merge_request.metrics.update!(merged_at: 1.hour.ago)
+ end
+
+ it 'updates merge requests metrics' do
+ subject
+
+ expect(merge_request.reload.metrics.first_deployed_to_production_at)
+ .to be_like_time(deployment.finished_at)
+ end
+ end
+ end
+end
diff --git a/spec/workers/design_management/copy_design_collection_worker_spec.rb b/spec/workers/design_management/copy_design_collection_worker_spec.rb
new file mode 100644
index 00000000000..45bfc47ca7e
--- /dev/null
+++ b/spec/workers/design_management/copy_design_collection_worker_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DesignManagement::CopyDesignCollectionWorker, :clean_gitlab_redis_shared_state do
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:target_issue) { create(:issue) }
+
+ subject { described_class.new.perform(user.id, issue.id, target_issue.id) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [user.id, issue.id, target_issue.id] }
+
+ specify { subject }
+ end
+
+ it 'calls DesignManagement::CopyDesignCollection::CopyService' do
+ expect_next_instance_of(DesignManagement::CopyDesignCollection::CopyService) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+
+ subject
+ end
+
+ it 'logs if there was an error calling the service' do
+ message = 'Error message'
+
+ allow_next_instance_of(DesignManagement::CopyDesignCollection::CopyService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: message))
+ end
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(message)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/design_management/new_version_worker_spec.rb b/spec/workers/design_management/new_version_worker_spec.rb
index 4d57c46487e..3320d7a062d 100644
--- a/spec/workers/design_management/new_version_worker_spec.rb
+++ b/spec/workers/design_management/new_version_worker_spec.rb
@@ -36,6 +36,10 @@ RSpec.describe DesignManagement::NewVersionWorker do
expect { worker.perform(version.id) }.to change { Note.system.count }.by(1)
end
+ it 'does not create a system note if skip_system_notes is true' do
+ expect { worker.perform(version.id, true) }.not_to change { Note.system.count }
+ end
+
it 'invokes GenerateImageVersionsService' do
expect_next_instance_of(DesignManagement::GenerateImageVersionsService) do |service|
expect(service).to receive(:execute)
diff --git a/spec/workers/disallow_two_factor_for_group_worker_spec.rb b/spec/workers/disallow_two_factor_for_group_worker_spec.rb
new file mode 100644
index 00000000000..a69dd893f81
--- /dev/null
+++ b/spec/workers/disallow_two_factor_for_group_worker_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DisallowTwoFactorForGroupWorker do
+ let_it_be(:group) { create(:group, require_two_factor_authentication: true) }
+ let_it_be(:user) { create(:user, require_two_factor_authentication_from_group: true) }
+
+ it "updates group" do
+ described_class.new.perform(group.id)
+
+ expect(group.reload.require_two_factor_authentication).to eq(false)
+ end
+
+ it "updates group members" do
+ group.add_user(user, GroupMember::DEVELOPER)
+
+ described_class.new.perform(group.id)
+
+ expect(user.reload.require_two_factor_authentication_from_group).to eq(false)
+ end
+end
diff --git a/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb b/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb
new file mode 100644
index 00000000000..c3be8263171
--- /dev/null
+++ b/spec/workers/disallow_two_factor_for_subgroups_worker_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DisallowTwoFactorForSubgroupsWorker do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup_with_2fa) { create(:group, parent: group, require_two_factor_authentication: true) }
+ let_it_be(:subgroup_without_2fa) { create(:group, parent: group, require_two_factor_authentication: false) }
+ let_it_be(:subsubgroup_with_2fa) { create(:group, parent: subgroup_with_2fa, require_two_factor_authentication: true) }
+
+ it "schedules updating subgroups" do
+ expect(DisallowTwoFactorForGroupWorker).to receive(:perform_in).with(0, subgroup_with_2fa.id)
+ expect(DisallowTwoFactorForGroupWorker).to receive(:perform_in).with(2, subsubgroup_with_2fa.id)
+
+ described_class.new.perform(group.id)
+ end
+end
diff --git a/spec/workers/export_csv_worker_spec.rb b/spec/workers/export_csv_worker_spec.rb
index 1a5b17ee35b..88ccfac0a02 100644
--- a/spec/workers/export_csv_worker_spec.rb
+++ b/spec/workers/export_csv_worker_spec.rb
@@ -10,25 +10,9 @@ RSpec.describe ExportCsvWorker do
described_class.new.perform(user.id, project.id, params)
end
- it 'emails a CSV' do
- expect {perform}.to change(ActionMailer::Base.deliveries, :size).by(1)
- end
-
- it 'ensures that project_id is passed to issues_finder' do
- expect(IssuesFinder).to receive(:new).with(anything, hash_including(project_id: project.id)).and_call_original
+ it 'delegates call to IssuableExportCsvWorker' do
+ expect(IssuableExportCsvWorker).to receive(:perform_async).with(:issue, user.id, project.id, anything)
perform
end
-
- it 'removes sort parameter' do
- expect(IssuesFinder).to receive(:new).with(anything, hash_not_including(:sort)).and_call_original
-
- perform
- end
-
- it 'converts controller string keys to symbol keys for IssuesFinder' do
- expect(IssuesFinder).to receive(:new).with(anything, hash_including(test_key: true)).and_call_original
-
- perform('test_key' => true)
- end
end
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 1be6e86b650..fc9115a5ea1 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe GitGarbageCollectWorker do
it "flushes ref caches when the task if 'gc'" do
expect(subject).to receive(:renew_lease).with(lease_key, lease_uuid).and_call_original
- expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
+ expect_any_instance_of(Repository).to receive(:expire_branches_cache).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
@@ -77,7 +77,7 @@ RSpec.describe GitGarbageCollectWorker do
end
it 'returns silently' do
- expect_any_instance_of(Repository).not_to receive(:after_create_branch).and_call_original
+ expect_any_instance_of(Repository).not_to receive(:expire_branches_cache).and_call_original
expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
@@ -102,7 +102,7 @@ RSpec.describe GitGarbageCollectWorker do
it "flushes ref caches when the task if 'gc'" do
expect(subject).to receive(:get_lease_uuid).with("git_gc:#{task}:#{project.id}").and_return(false)
- expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
+ expect_any_instance_of(Repository).to receive(:expire_branches_cache).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
@@ -129,46 +129,36 @@ RSpec.describe GitGarbageCollectWorker do
let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
let(:lfs_object) { lfs_reference.lfs_object }
- context 'with cleanup_lfs_during_gc feature flag enabled' do
- before do
- stub_feature_flags(cleanup_lfs_during_gc: true)
+ it 'cleans up unreferenced LFS objects' do
+ expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
+ expect(svc.project).to eq(project)
+ expect(svc.dry_run).to be_falsy
+ expect(svc).to receive(:run!).and_call_original
end
- it 'cleans up unreferenced LFS objects' do
- expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
- expect(svc.project).to eq(project)
- expect(svc.dry_run).to be_falsy
- expect(svc).to receive(:run!).and_call_original
- end
-
- subject.perform(*params)
-
- expect(project.lfs_objects.reload).not_to include(lfs_object)
- end
+ subject.perform(*params)
- it 'does nothing if the database is read-only' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
+ expect(project.lfs_objects.reload).not_to include(lfs_object)
+ end
- expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
+ it 'catches and logs exceptions' do
+ expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences)
+ .to receive(:run!)
+ .and_raise(/Failed/)
- subject.perform(*params)
+ expect(Gitlab::GitLogger).to receive(:warn)
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(project.lfs_objects.reload).to include(lfs_object)
- end
+ subject.perform(*params)
end
- context 'with cleanup_lfs_during_gc feature flag disabled' do
- before do
- stub_feature_flags(cleanup_lfs_during_gc: false)
- end
-
- it 'does not clean up unreferenced LFS objects' do
- expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
+ it 'does nothing if the database is read-only' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
- subject.perform(*params)
+ subject.perform(*params)
- expect(project.lfs_objects.reload).to include(lfs_object)
- end
+ expect(project.lfs_objects.reload).to include(lfs_object)
end
end
end
@@ -180,7 +170,7 @@ RSpec.describe GitGarbageCollectWorker do
it 'returns silently' do
expect(subject).not_to receive(:command)
- expect_any_instance_of(Repository).not_to receive(:after_create_branch).and_call_original
+ expect_any_instance_of(Repository).not_to receive(:expire_branches_cache).and_call_original
expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
diff --git a/spec/workers/group_export_worker_spec.rb b/spec/workers/group_export_worker_spec.rb
index 5697e66b7d1..4e58e3886a4 100644
--- a/spec/workers/group_export_worker_spec.rb
+++ b/spec/workers/group_export_worker_spec.rb
@@ -26,4 +26,14 @@ RSpec.describe GroupExportWorker do
end
end
end
+
+ describe 'sidekiq options' do
+ it 'disables retry' do
+ expect(described_class.sidekiq_options['retry']).to eq(false)
+ end
+
+ it 'disables dead' do
+ expect(described_class.sidekiq_options['dead']).to eq(false)
+ end
+ end
end
diff --git a/spec/workers/group_import_worker_spec.rb b/spec/workers/group_import_worker_spec.rb
index fb2d49c21af..5171de7086b 100644
--- a/spec/workers/group_import_worker_spec.rb
+++ b/spec/workers/group_import_worker_spec.rb
@@ -3,17 +3,29 @@
require 'spec_helper'
RSpec.describe GroupImportWorker do
- let!(:user) { create(:user) }
- let!(:group) { create(:group) }
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
subject { described_class.new }
before do
+ create(:group_import_state, group: group, user: user)
+
allow_next_instance_of(described_class) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
end
+ describe 'sidekiq options' do
+ it 'disables retry' do
+ expect(described_class.sidekiq_options['retry']).to eq(false)
+ end
+
+ it 'disables dead' do
+ expect(described_class.sidekiq_options['dead']).to eq(false)
+ end
+ end
+
describe '#perform' do
context 'when it succeeds' do
before do
@@ -26,44 +38,11 @@ RSpec.describe GroupImportWorker do
subject.perform(user.id, group.id)
end
- context 'when the import state does not exist' do
- it 'creates group import' do
- expect(group.import_state).to be_nil
-
- subject.perform(user.id, group.id)
- import_state = group.reload.import_state
-
- expect(import_state).to be_instance_of(GroupImportState)
- expect(import_state.status_name).to eq(:finished)
- expect(import_state.jid).not_to be_empty
- end
-
- it 'sets the group import status to started' do
- expect_next_instance_of(GroupImportState) do |import|
- expect(import).to receive(:start!).and_call_original
- end
-
- subject.perform(user.id, group.id)
- end
-
- it 'sets the group import status to finished' do
- expect_next_instance_of(GroupImportState) do |import|
- expect(import).to receive(:finish!).and_call_original
- end
-
- subject.perform(user.id, group.id)
- end
- end
-
- context 'when the import state already exists' do
- it 'updates the existing state' do
- existing_state = create(:group_import_state, group: group)
-
- expect { subject.perform(user.id, group.id) }
- .not_to change { GroupImportState.count }
+ it 'updates the existing state' do
+ expect { subject.perform(user.id, group.id) }
+ .not_to change { GroupImportState.count }
- expect(existing_state.reload).to be_finished
- end
+ expect(group.import_state.reload).to be_finished
end
end
@@ -83,11 +62,9 @@ RSpec.describe GroupImportWorker do
end
it 'sets the group import status to failed' do
- expect_next_instance_of(GroupImportState) do |import|
- expect(import).to receive(:fail_op).and_call_original
- end
-
expect { subject.perform(user.id, group.id) }.to raise_exception(Gitlab::ImportExport::Error)
+
+ expect(group.import_state.reload.status).to eq(-1)
end
end
end
diff --git a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
new file mode 100644
index 00000000000..203c62ffe6f
--- /dev/null
+++ b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::AddSeveritySystemNoteWorker do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+ let_it_be(:issuable_severity) { create(:issuable_severity, issue: incident, severity: :medium) }
+
+ describe '#perform' do
+ let(:incident_id) { incident.id }
+ let(:user_id) { user.id }
+
+ subject(:perform) { described_class.new.perform(incident_id, user_id) }
+
+ shared_examples 'does not add a system note' do
+ it 'does not change incident notes count' do
+ expect { perform }.not_to change { incident.notes.count }
+ end
+ end
+
+ context 'when incident and user exist' do
+ it 'creates a system note' do
+ expect { perform }.to change { incident.notes.where(author: user).count }.by(1)
+ end
+ end
+
+ context 'when incident does not exist' do
+ let(:incident_id) { -1 }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when incident_id is nil' do
+ let(:incident_id) { nil }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when issue is not an incident' do
+ let_it_be(:issue) { create(:issue, project: project) }
+ let(:incident_id) { issue.id }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when user does not exist' do
+ let(:user_id) { -1 }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when user_id is nil' do
+ let(:user_id) { nil }
+
+ it_behaves_like 'does not add a system note'
+ end
+ end
+end
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
index 20ab283b49b..41d4f31da24 100644
--- a/spec/workers/incident_management/process_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
describe '#perform' do
let_it_be(:started_at) { Time.now.rfc3339 }
let_it_be(:payload) { { 'title' => 'title', 'start_time' => started_at } }
- let_it_be(:parsed_payload) { Gitlab::Alerting::NotificationPayloadParser.call(payload, project) }
let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
let(:created_issue) { Issue.last! }
@@ -68,7 +67,6 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
context 'prometheus alert' do
let_it_be(:alert) { create(:alert_management_alert, :prometheus, project: project, started_at: started_at) }
- let_it_be(:parsed_payload) { alert.payload }
it_behaves_like 'creates issue successfully'
end
diff --git a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
index c294892a66f..2ca4193aa72 100644
--- a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe IncidentManagement::ProcessPrometheusAlertWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
- let(:payload_key) { Gitlab::Alerting::Alert.new(project: project, payload: alert_params).gitlab_fingerprint }
+ let(:payload_key) { Gitlab::AlertManagement::Payload::Prometheus.new(project: project, payload: alert_params).gitlab_fingerprint }
let!(:prometheus_alert_event) { create(:prometheus_alert_event, prometheus_alert: prometheus_alert, payload_key: payload_key) }
let!(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
diff --git a/spec/workers/issuable_export_csv_worker_spec.rb b/spec/workers/issuable_export_csv_worker_spec.rb
new file mode 100644
index 00000000000..bcc2420996d
--- /dev/null
+++ b/spec/workers/issuable_export_csv_worker_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssuableExportCsvWorker do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, creator: user) }
+ let(:params) { {} }
+
+ subject { described_class.new.perform(issuable_type, user.id, project.id, params) }
+
+ context 'when issuable type is Issue' do
+ let(:issuable_type) { :issue }
+
+ it 'emails a CSV' do
+ expect { subject }.to change(ActionMailer::Base.deliveries, :size).by(1)
+ end
+
+ it 'ensures that project_id is passed to issues_finder' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_including(project_id: project.id)).and_call_original
+
+ subject
+ end
+
+ it 'removes sort parameter' do
+ expect(IssuesFinder).to receive(:new).with(anything, hash_not_including(:sort)).and_call_original
+
+ subject
+ end
+
+ it 'calls the issue export service' do
+ expect(Issues::ExportCsvService).to receive(:new).once.and_call_original
+
+ subject
+ end
+
+ context 'with params' do
+ let(:params) { { 'test_key' => true } }
+
+ it 'converts controller string keys to symbol keys for IssuesFinder' do
+ expect(IssuesFinder).to receive(:new).with(user, hash_including(test_key: true)).and_call_original
+
+ subject
+ end
+ end
+ end
+
+ context 'when issuable type is MergeRequest' do
+ let(:issuable_type) { :merge_request }
+
+ it 'emails a CSV' do
+ expect { subject }.to change(ActionMailer::Base.deliveries, :size).by(1)
+ end
+
+ it 'calls the MR export service' do
+ expect(MergeRequests::ExportCsvService).to receive(:new).with(anything, project).once.and_call_original
+
+ subject
+ end
+
+ it 'calls the MergeRequest finder' do
+ expect(MergeRequestsFinder).to receive(:new).once.and_call_original
+
+ subject
+ end
+ end
+
+ context 'when issuable type is User' do
+ let(:issuable_type) { :user }
+
+ it { expect { subject }.to raise_error(ArgumentError) }
+ end
+end
diff --git a/spec/workers/member_invitation_reminder_emails_worker_spec.rb b/spec/workers/member_invitation_reminder_emails_worker_spec.rb
new file mode 100644
index 00000000000..bfd08792c7c
--- /dev/null
+++ b/spec/workers/member_invitation_reminder_emails_worker_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberInvitationReminderEmailsWorker do
+ describe '#perform' do
+ subject { described_class.new.perform }
+
+ before do
+ create(:group_member, :invited, created_at: 2.days.ago)
+ end
+
+ context 'feature flag disabled' do
+ before do
+ stub_experiment(invitation_reminders: false)
+ end
+
+ it 'does not attempt to execute the invitation reminder service' do
+ expect(Members::InvitationReminderEmailService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'feature flag enabled' do
+ before do
+ stub_experiment(invitation_reminders: true)
+ end
+
+ it 'executes the invitation reminder email service' do
+ expect_next_instance_of(Members::InvitationReminderEmailService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
index d93612afe37..11343f69d6f 100644
--- a/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
+++ b/spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Metrics::Dashboard::PruneOldAnnotationsWorker do
# is idempotent in the scope of 24h
expect { described_class.new.perform }.not_to change { Metrics::Dashboard::Annotation.all.to_a }
- Timecop.travel(24.hours.from_now) do
+ travel_to(24.hours.from_now) do
described_class.new.perform
expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
end
diff --git a/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
new file mode 100644
index 00000000000..19b79835825
--- /dev/null
+++ b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Metrics::Dashboard::SyncDashboardsWorker do
+ include MetricsDashboardHelpers
+ subject(:worker) { described_class.new }
+
+ let(:project) { project_with_dashboard(dashboard_path) }
+ let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
+
+ describe ".perform" do
+ it 'imports metrics' do
+ expect { worker.perform(project.id) }.to change(PrometheusMetric, :count).by(3)
+ end
+
+ it 'is idempotent' do
+ 2.times do
+ worker.perform(project.id)
+ end
+
+ expect(PrometheusMetric.count).to eq(3)
+ end
+ end
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 50d164d1705..77c1d16428f 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -281,7 +281,7 @@ RSpec.describe PostReceive do
before do
# Need to mock here so we can expect calls on project
- allow(Gitlab::GlRepository).to receive(:parse).and_return([project, project, Gitlab::GlRepository::WIKI])
+ allow(Gitlab::GlRepository).to receive(:parse).and_return([project.wiki, project, Gitlab::GlRepository::WIKI])
end
it 'updates project activity' do
@@ -290,7 +290,7 @@ RSpec.describe PostReceive do
# MySQL drops milliseconds in the timestamps, so advance at least
# a second to ensure we see changes.
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
expect do
perform
project.reload
diff --git a/spec/workers/project_export_worker_spec.rb b/spec/workers/project_export_worker_spec.rb
index 1f54b6766a4..defecefc3cc 100644
--- a/spec/workers/project_export_worker_spec.rb
+++ b/spec/workers/project_export_worker_spec.rb
@@ -75,6 +75,10 @@ RSpec.describe ProjectExportWorker do
expect(described_class.sidekiq_options['retry']).to eq(false)
end
+ it 'disables dead' do
+ expect(described_class.sidekiq_options['dead']).to eq(false)
+ end
+
it 'sets default status expiration' do
expect(described_class.sidekiq_options['status_expiration']).to eq(StuckExportJobsWorker::EXPORT_JOBS_EXPIRATION)
end
diff --git a/spec/workers/propagate_integration_group_worker_spec.rb b/spec/workers/propagate_integration_group_worker_spec.rb
new file mode 100644
index 00000000000..fbf1fbf1fea
--- /dev/null
+++ b/spec/workers/propagate_integration_group_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PropagateIntegrationGroupWorker do
+ describe '#perform' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:another_group) { create(:group) }
+ let_it_be(:subgroup1) { create(:group, parent: group) }
+ let_it_be(:subgroup2) { create(:group, parent: group) }
+ let_it_be(:integration) { create(:redmine_service, :instance) }
+ let(:job_args) { [integration.id, group.id, subgroup2.id] }
+
+ it_behaves_like 'an idempotent worker' do
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([group, another_group, subgroup1, subgroup2]), 'group').twice
+ .and_return(double(execute: nil))
+
+ subject
+ end
+
+ context 'with a group integration' do
+ let_it_be(:integration) { create(:redmine_service, group: group, project: nil) }
+
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([subgroup1, subgroup2]), 'group').twice
+ .and_return(double(execute: nil))
+
+ subject
+ end
+ end
+ end
+
+ context 'with an invalid integration id' do
+ it 'returns without failure' do
+ expect(BulkCreateIntegrationService).not_to receive(:new)
+
+ subject.perform(0, 1, 100)
+ end
+ end
+ end
+end
diff --git a/spec/workers/propagate_integration_inherit_worker_spec.rb b/spec/workers/propagate_integration_inherit_worker_spec.rb
new file mode 100644
index 00000000000..cbfee29a6a0
--- /dev/null
+++ b/spec/workers/propagate_integration_inherit_worker_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PropagateIntegrationInheritWorker do
+ describe '#perform' do
+ let_it_be(:integration) { create(:redmine_service, :instance) }
+ let_it_be(:integration1) { create(:redmine_service, inherit_from_id: integration.id) }
+ let_it_be(:integration2) { create(:bugzilla_service, inherit_from_id: integration.id) }
+ let_it_be(:integration3) { create(:redmine_service) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [integration.id, integration1.id, integration3.id] }
+
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkUpdateIntegrationService).to receive(:new)
+ .with(integration, match_array(integration1)).twice
+ .and_return(double(execute: nil))
+
+ subject
+ end
+ end
+
+ context 'with an invalid integration id' do
+ it 'returns without failure' do
+ expect(BulkUpdateIntegrationService).not_to receive(:new)
+
+ subject.perform(0, integration1.id, integration3.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/propagate_integration_project_worker_spec.rb b/spec/workers/propagate_integration_project_worker_spec.rb
new file mode 100644
index 00000000000..0302af2acc9
--- /dev/null
+++ b/spec/workers/propagate_integration_project_worker_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PropagateIntegrationProjectWorker do
+ describe '#perform' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project, group: group) }
+ let_it_be(:project3) { create(:project, group: group) }
+ let_it_be(:integration) { create(:redmine_service, :instance) }
+ let(:job_args) { [integration.id, project1.id, project3.id] }
+
+ it_behaves_like 'an idempotent worker' do
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([project1, project2, project3]), 'project').twice
+ .and_return(double(execute: nil))
+
+ subject
+ end
+
+ context 'with a group integration' do
+ let_it_be(:integration) { create(:redmine_service, group: group, project: nil) }
+
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([project2, project3]), 'project').twice
+ .and_return(double(execute: nil))
+
+ subject
+ end
+ end
+ end
+
+ context 'with an invalid integration id' do
+ it 'returns without failure' do
+ expect(BulkCreateIntegrationService).not_to receive(:new)
+
+ subject.perform(0, 1, 100)
+ end
+ end
+ end
+end
diff --git a/spec/workers/web_hooks/destroy_worker_spec.rb b/spec/workers/web_hooks/destroy_worker_spec.rb
new file mode 100644
index 00000000000..fd26c8591ee
--- /dev/null
+++ b/spec/workers/web_hooks/destroy_worker_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::DestroyWorker do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ subject { described_class.new }
+
+ describe "#perform" do
+ context 'with a Web hook' do
+ let!(:hook) { create(:project_hook, project: project) }
+ let!(:other_hook) { create(:project_hook, project: project) }
+ let!(:log) { create(:web_hook_log, web_hook: hook) }
+ let!(:other_log) { create(:web_hook_log, web_hook: other_hook) }
+
+ it "deletes the Web hook and logs", :aggregate_failures do
+ expect { subject.perform(user.id, hook.id) }
+ .to change { WebHookLog.count }.from(2).to(1)
+ .and change { WebHook.count }.from(2).to(1)
+
+ expect(WebHook.find(other_hook.id)).to be_present
+ expect(WebHookLog.find(other_log.id)).to be_present
+ end
+
+ it "raises and tracks an error if destroy failed" do
+ allow_next_instance_of(::WebHooks::DestroyService) do |instance|
+ expect(instance).to receive(:sync_destroy).with(anything).and_return({ status: :error, message: "failed" })
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(an_instance_of(::WebHooks::DestroyService::DestroyError), web_hook_id: hook.id)
+ .and_call_original
+ expect { subject.perform(user.id, hook.id) }.to raise_error(::WebHooks::DestroyService::DestroyError)
+ end
+
+ context 'with unknown hook' do
+ it 'does not raise an error' do
+ expect { subject.perform(user.id, non_existing_record_id) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ end
+ end
+
+ context 'with unknown user' do
+ it 'does not raise an error' do
+ expect { subject.perform(non_existing_record_id, hook.id) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ end
+ end
+ end
+ end
+end