summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-05-19 15:44:42 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-05-19 15:44:42 +0000
commit4555e1b21c365ed8303ffb7a3325d773c9b8bf31 (patch)
tree5423a1c7516cffe36384133ade12572cf709398d /spec
parente570267f2f6b326480d284e0164a6464ba4081bc (diff)
downloadgitlab-ce-4555e1b21c365ed8303ffb7a3325d773c9b8bf31.tar.gz
Add latest changes from gitlab-org/gitlab@13-12-stable-eev13.12.0-rc42
Diffstat (limited to 'spec')
-rw-r--r--spec/benchmarks/banzai_benchmark.rb17
-rw-r--r--spec/channels/issues_channel_spec.rb36
-rw-r--r--spec/config/inject_enterprise_edition_module_spec.rb129
-rw-r--r--spec/config/mail_room_spec.rb2
-rw-r--r--spec/config/object_store_settings_spec.rb4
-rw-r--r--spec/controllers/admin/cohorts_controller_spec.rb2
-rw-r--r--spec/controllers/admin/dev_ops_report_controller_spec.rb8
-rw-r--r--spec/controllers/admin/groups_controller_spec.rb6
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb4
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb11
-rw-r--r--spec/controllers/admin/users_controller_spec.rb78
-rw-r--r--spec/controllers/application_controller_spec.rb42
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb13
-rw-r--r--spec/controllers/concerns/confirm_email_warning_spec.rb2
-rw-r--r--spec/controllers/concerns/graceful_timeout_handling_spec.rb2
-rw-r--r--spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb2
-rw-r--r--spec/controllers/concerns/renders_commits_spec.rb1
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb66
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb3
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb19
-rw-r--r--spec/controllers/groups/settings/integrations_controller_spec.rb4
-rw-r--r--spec/controllers/groups/settings/repository_controller_spec.rb2
-rw-r--r--spec/controllers/invites_controller_spec.rb142
-rw-r--r--spec/controllers/oauth/authorizations_controller_spec.rb69
-rw-r--r--spec/controllers/oauth/jira/authorizations_controller_spec.rb12
-rw-r--r--spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb67
-rw-r--r--spec/controllers/projects/analytics/cycle_analytics/value_streams_controller_spec.rb43
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb119
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb56
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb17
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb2
-rw-r--r--spec/controllers/projects/mattermosts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb11
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb6
-rw-r--r--spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb4
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb150
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb14
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb88
-rw-r--r--spec/controllers/projects/services_controller_spec.rb8
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb52
-rw-r--r--spec/controllers/projects/settings/repository_controller_spec.rb2
-rw-r--r--spec/controllers/projects/static_site_editor_controller_spec.rb2
-rw-r--r--spec/controllers/registrations/experience_levels_controller_spec.rb4
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb24
-rw-r--r--spec/controllers/registrations_controller_spec.rb113
-rw-r--r--spec/controllers/sessions_controller_spec.rb7
-rw-r--r--spec/db/schema_spec.rb2
-rw-r--r--spec/deprecation_toolkit_env.rb3
-rw-r--r--spec/experiments/application_experiment_spec.rb6
-rw-r--r--spec/experiments/concerns/project_commit_count_spec.rb41
-rw-r--r--spec/experiments/empty_repo_upload_experiment_spec.rb49
-rw-r--r--spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb22
-rw-r--r--spec/experiments/members/invite_email_experiment_spec.rb10
-rw-r--r--spec/experiments/new_project_readme_experiment_spec.rb22
-rw-r--r--spec/factories/alert_management/alerts.rb18
-rw-r--r--spec/factories/analytics/cycle_analytics/project_stages.rb1
-rw-r--r--spec/factories/analytics/cycle_analytics/project_value_streams.rb9
-rw-r--r--spec/factories/bulk_import/export_uploads.rb7
-rw-r--r--spec/factories/bulk_import/exports.rb24
-rw-r--r--spec/factories/chat_names.rb4
-rw-r--r--spec/factories/ci/builds.rb11
-rw-r--r--spec/factories/ci/job_artifacts.rb12
-rw-r--r--spec/factories/ci/pipeline_artifacts.rb4
-rw-r--r--spec/factories/ci/pipelines.rb6
-rw-r--r--spec/factories/ci/runner_namespaces.rb8
-rw-r--r--spec/factories/clusters/integrations/elastic_stack.rb12
-rw-r--r--spec/factories/gitlab/database/background_migration/batched_jobs.rb1
-rw-r--r--spec/factories/gitlab/database/background_migration/batched_migrations.rb1
-rw-r--r--spec/factories/gitlab/jwt_token.rb21
-rw-r--r--spec/factories/integration_data.rb (renamed from spec/factories/services_data.rb)6
-rw-r--r--spec/factories/integrations.rb (renamed from spec/factories/services.rb)45
-rw-r--r--spec/factories/issues.rb8
-rw-r--r--spec/factories/lfs_objects.rb2
-rw-r--r--spec/factories/merge_requests.rb2
-rw-r--r--spec/factories/namespace_package_settings.rb3
-rw-r--r--spec/factories/namespaces.rb17
-rw-r--r--spec/factories/notes.rb2
-rw-r--r--spec/factories/packages.rb35
-rw-r--r--spec/factories/packages/helm/file_metadatum.rb9
-rw-r--r--spec/factories/packages/package_file.rb30
-rw-r--r--spec/factories/projects.rb2
-rw-r--r--spec/factories/service_hooks.rb2
-rw-r--r--spec/factories/usage_data.rb2
-rw-r--r--spec/factories/users.rb10
-rw-r--r--spec/factories/users/credit_card_validations.rb9
-rw-r--r--spec/features/action_cable_logging_spec.rb6
-rw-r--r--spec/features/admin/admin_appearance_spec.rb5
-rw-r--r--spec/features/admin/admin_dev_ops_report_spec.rb6
-rw-r--r--spec/features/admin/admin_groups_spec.rb10
-rw-r--r--spec/features/admin/admin_labels_spec.rb2
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb70
-rw-r--r--spec/features/admin/admin_mode_spec.rb235
-rw-r--r--spec/features/admin/admin_projects_spec.rb36
-rw-r--r--spec/features/admin/admin_settings_spec.rb73
-rw-r--r--spec/features/admin/admin_users_spec.rb44
-rw-r--r--spec/features/admin/services/admin_visits_service_templates_spec.rb2
-rw-r--r--spec/features/admin/users/user_spec.rb177
-rw-r--r--spec/features/admin/users/users_spec.rb586
-rw-r--r--spec/features/boards/boards_spec.rb49
-rw-r--r--spec/features/boards/new_issue_spec.rb93
-rw-r--r--spec/features/boards/sidebar_assignee_spec.rb25
-rw-r--r--spec/features/boards/sidebar_labels_in_namespaces_spec.rb34
-rw-r--r--spec/features/boards/sub_group_project_spec.rb46
-rw-r--r--spec/features/boards/user_visits_board_spec.rb78
-rw-r--r--spec/features/calendar_spec.rb4
-rw-r--r--spec/features/dashboard/active_tab_spec.rb56
-rw-r--r--spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb74
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb106
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb2
-rw-r--r--spec/features/frequently_visited_projects_and_groups_spec.rb76
-rw-r--r--spec/features/groups/group_page_with_external_authorization_service_spec.rb6
-rw-r--r--spec/features/groups/issues_spec.rb2
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb42
-rw-r--r--spec/features/groups/members/manage_members_spec.rb110
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb12
-rw-r--r--spec/features/groups/milestone_spec.rb4
-rw-r--r--spec/features/groups/milestones/gfm_autocomplete_spec.rb80
-rw-r--r--spec/features/groups/navbar_spec.rb62
-rw-r--r--spec/features/groups/settings/packages_and_registries_spec.rb31
-rw-r--r--spec/features/groups_spec.rb41
-rw-r--r--spec/features/invites_spec.rb319
-rw-r--r--spec/features/issuables/sorting_list_spec.rb36
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb2
-rw-r--r--spec/features/issues/filtered_search/filter_issues_spec.rb22
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb81
-rw-r--r--spec/features/issues/resource_label_events_spec.rb2
-rw-r--r--spec/features/issues/service_desk_spec.rb4
-rw-r--r--spec/features/issues/spam_issues_spec.rb1
-rw-r--r--spec/features/issues/user_bulk_edits_issues_labels_spec.rb (renamed from spec/features/issues/bulk_assignment_labels_spec.rb)91
-rw-r--r--spec/features/issues/user_bulk_edits_issues_spec.rb (renamed from spec/features/issues/update_issues_spec.rb)46
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb8
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb53
-rw-r--r--spec/features/issues/user_toggles_subscription_spec.rb4
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb4
-rw-r--r--spec/features/markdown/math_spec.rb18
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb4
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb1
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb6
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb1
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb2
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb8
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_wip_help_message_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb2
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb32
-rw-r--r--spec/features/monitor_sidebar_link_spec.rb145
-rw-r--r--spec/features/operations_sidebar_link_spec.rb144
-rw-r--r--spec/features/populate_new_pipeline_vars_with_params_spec.rb17
-rw-r--r--spec/features/profiles/chat_names_spec.rb6
-rw-r--r--spec/features/profiles/user_edit_preferences_spec.rb19
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb24
-rw-r--r--spec/features/profiles/user_visits_notifications_tab_spec.rb8
-rw-r--r--spec/features/project_variables_spec.rb2
-rw-r--r--spec/features/projects/active_tabs_spec.rb58
-rw-r--r--spec/features/projects/badges/pipeline_badge_spec.rb1
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb75
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb1
-rw-r--r--spec/features/projects/commit/builds_spec.rb2
-rw-r--r--spec/features/projects/commit/cherry_pick_spec.rb1
-rw-r--r--spec/features/projects/commit/user_comments_on_commit_spec.rb1
-rw-r--r--spec/features/projects/commit/user_reverts_commit_spec.rb1
-rw-r--r--spec/features/projects/commit/user_views_user_status_on_commit_spec.rb1
-rw-r--r--spec/features/projects/compare_spec.rb28
-rw-r--r--spec/features/projects/confluence/user_views_confluence_page_spec.rb1
-rw-r--r--spec/features/projects/deploy_keys_spec.rb5
-rw-r--r--spec/features/projects/diffs/diff_show_spec.rb2
-rw-r--r--spec/features/projects/features_visibility_spec.rb17
-rw-r--r--spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb69
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb4
-rw-r--r--spec/features/projects/fork_spec.rb4
-rw-r--r--spec/features/projects/graph_spec.rb2
-rw-r--r--spec/features/projects/integrations/user_activates_asana_spec.rb (renamed from spec/features/projects/services/user_activates_asana_spec.rb)0
-rw-r--r--spec/features/projects/integrations/user_activates_assembla_spec.rb (renamed from spec/features/projects/services/user_activates_assembla_spec.rb)0
-rw-r--r--spec/features/projects/integrations/user_activates_atlassian_bamboo_ci_spec.rb (renamed from spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb)0
-rw-r--r--spec/features/projects/issues/design_management/user_views_design_images_spec.rb1
-rw-r--r--spec/features/projects/jobs_spec.rb4
-rw-r--r--spec/features/projects/labels/issues_sorted_by_priority_spec.rb24
-rw-r--r--spec/features/projects/labels/user_sees_links_to_issuables_spec.rb1
-rw-r--r--spec/features/projects/labels/user_views_labels_spec.rb1
-rw-r--r--spec/features/projects/members/invite_group_spec.rb106
-rw-r--r--spec/features/projects/members/list_spec.rb27
-rw-r--r--spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb14
-rw-r--r--spec/features/projects/merge_request_button_spec.rb1
-rw-r--r--spec/features/projects/milestones/gfm_autocomplete_spec.rb80
-rw-r--r--spec/features/projects/navbar_spec.rb166
-rw-r--r--spec/features/projects/new_project_from_template_spec.rb26
-rw-r--r--spec/features/projects/new_project_spec.rb560
-rw-r--r--spec/features/projects/pages/user_adds_domain_spec.rb1
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb1
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb70
-rw-r--r--spec/features/projects/product_analytics/events_spec.rb1
-rw-r--r--spec/features/projects/releases/user_views_releases_spec.rb172
-rw-r--r--spec/features/projects/services/user_activates_issue_tracker_spec.rb2
-rw-r--r--spec/features/projects/settings/access_tokens_spec.rb2
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb (renamed from spec/features/projects/settings/operations_settings_spec.rb)26
-rw-r--r--spec/features/projects/settings/packages_settings_spec.rb26
-rw-r--r--spec/features/projects/settings/project_settings_spec.rb1
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb160
-rw-r--r--spec/features/projects/settings/repository_settings_spec.rb4
-rw-r--r--spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb12
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb28
-rw-r--r--spec/features/projects/snippets/user_views_snippets_spec.rb1
-rw-r--r--spec/features/projects/user_changes_project_visibility_spec.rb4
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/projects/user_sees_user_popover_spec.rb1
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb67
-rw-r--r--spec/features/runners_spec.rb96
-rw-r--r--spec/features/snippets/spam_snippets_spec.rb1
-rw-r--r--spec/features/unsubscribe_links_spec.rb2
-rw-r--r--spec/features/user_can_display_performance_bar_spec.rb36
-rw-r--r--spec/features/users/add_email_to_existing_account_spec.rb18
-rw-r--r--spec/features/users/signup_spec.rb12
-rw-r--r--spec/features/users/user_browses_projects_on_user_page_spec.rb2
-rw-r--r--spec/features/whats_new_spec.rb18
-rw-r--r--spec/finders/analytics/cycle_analytics/stage_finder_spec.rb24
-rw-r--r--spec/finders/ci/runners_finder_spec.rb51
-rw-r--r--spec/finders/concerns/packages/finder_helper_spec.rb25
-rw-r--r--spec/finders/deploy_tokens/tokens_finder_spec.rb135
-rw-r--r--spec/finders/deployments_finder_spec.rb225
-rw-r--r--spec/finders/environments/environment_names_finder_spec.rb (renamed from spec/finders/environment_names_finder_spec.rb)2
-rw-r--r--spec/finders/environments/environments_by_deployments_finder_spec.rb (renamed from spec/finders/environments_by_deployments_finder_spec.rb)2
-rw-r--r--spec/finders/environments/environments_finder_spec.rb (renamed from spec/finders/environments_finder_spec.rb)2
-rw-r--r--spec/finders/issues_finder_spec.rb2
-rw-r--r--spec/finders/merge_requests_finder_spec.rb31
-rw-r--r--spec/finders/packages/composer/packages_finder_spec.rb25
-rw-r--r--spec/finders/packages/conan/package_finder_spec.rb3
-rw-r--r--spec/finders/packages/generic/package_finder_spec.rb7
-rw-r--r--spec/finders/packages/go/package_finder_spec.rb13
-rw-r--r--spec/finders/packages/group_or_project_package_finder_spec.rb22
-rw-r--r--spec/finders/packages/group_packages_finder_spec.rb2
-rw-r--r--spec/finders/packages/maven/package_finder_spec.rb77
-rw-r--r--spec/finders/packages/npm/package_finder_spec.rb10
-rw-r--r--spec/finders/packages/nuget/package_finder_spec.rb10
-rw-r--r--spec/finders/packages/package_finder_spec.rb14
-rw-r--r--spec/finders/packages/packages_finder_spec.rb2
-rw-r--r--spec/finders/packages/pypi/package_finder_spec.rb45
-rw-r--r--spec/finders/packages/pypi/packages_finder_spec.rb70
-rw-r--r--spec/finders/projects/groups_finder_spec.rb33
-rw-r--r--spec/finders/projects/members/effective_access_level_finder_spec.rb257
-rw-r--r--spec/finders/projects_finder_spec.rb2
-rw-r--r--spec/finders/repositories/branch_names_finder_spec.rb39
-rw-r--r--spec/finders/template_finder_spec.rb3
-rw-r--r--spec/finders/users_with_pending_todos_finder_spec.rb19
-rw-r--r--spec/fixtures/api/schemas/entities/dag_job.json2
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json8
-rw-r--r--spec/fixtures/api/schemas/entities/downloadable_artifact.json19
-rw-r--r--spec/fixtures/api/schemas/entities/github/commit.json4
-rw-r--r--spec/fixtures/api/schemas/entities/github/pull_request.json18
-rw-r--r--spec/fixtures/api/schemas/entities/issue.json2
-rw-r--r--spec/fixtures/api/schemas/entities/issue_board.json2
-rw-r--r--spec/fixtures/api/schemas/entities/issue_sidebar.json2
-rw-r--r--spec/fixtures/api/schemas/entities/lint_job_entity.json6
-rw-r--r--spec/fixtures/api/schemas/entities/lint_result_entity.json2
-rw-r--r--spec/fixtures/api/schemas/evidences/issue.json4
-rw-r--r--spec/fixtures/api/schemas/evidences/milestone.json4
-rw-r--r--spec/fixtures/api/schemas/evidences/project.json2
-rw-r--r--spec/fixtures/api/schemas/evidences/release.json2
-rw-r--r--spec/fixtures/api/schemas/feature_flag.json4
-rw-r--r--spec/fixtures/api/schemas/feature_flag_scope.json4
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json13
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json9
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_maven_metadata.json28
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_nuget_metadata.json19
-rw-r--r--spec/fixtures/api/schemas/issue.json3
-rw-r--r--spec/fixtures/api/schemas/pipeline_schedule.json16
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/board.json8
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commit/basic.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/commit_note.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/deploy_token.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/environment.json5
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issue.json16
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/issue_link.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/job.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/label_basic.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/members.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_request.json16
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/milestone.json8
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json8
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/notes.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json1
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/package.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/package_files.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/module.json12
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/modules.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/version.json38
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/versions.json9
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/pipeline/detail.json10
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project.json45
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project/identity.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/projects.json46
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/evidence.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json2
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/snippets.json4
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/public.json8
-rw-r--r--spec/fixtures/api/schemas/registry/tag.json4
-rw-r--r--spec/fixtures/api/schemas/release.json2
-rw-r--r--spec/fixtures/bulk_imports/labels.ndjson.gzbin0 -> 202 bytes
-rw-r--r--spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml25
-rw-r--r--spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml23
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml19
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml10
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_full_syntax.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_options.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_values.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/dashboard.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_panel_groups.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/links.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_full_syntax.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_options.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json5
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/templating.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_full_syntax.json3
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_options.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/variables.json7
-rw-r--r--spec/fixtures/packages/helm/rook-ceph-v1.5.8.tgzbin0 -> 15831 bytes
-rw-r--r--spec/fixtures/packages/terraform_module/module-system-v1.0.0.tgzbin0 -> 806 bytes
-rw-r--r--spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json42
-rw-r--r--spec/fixtures/product_intelligence/survey_response_schema.json52
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report-minimal.json68
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report.json4
-rw-r--r--spec/fixtures/whats_new/20201225_01_04.yml19
-rw-r--r--spec/frontend/__helpers__/mock_apollo_helper.js13
-rw-r--r--spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap1
-rw-r--r--spec/frontend/actioncable_link_spec.js110
-rw-r--r--spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js2
-rw-r--r--spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js134
-rw-r--r--spec/frontend/admin/analytics/devops_score/mock_data.js46
-rw-r--r--spec/frontend/admin/users/components/actions/actions_spec.js5
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js12
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js87
-rw-r--r--spec/frontend/admin/users/mock_data.js15
-rw-r--r--spec/frontend/admin/users/tabs_spec.js37
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js19
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_form_spec.js3
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js141
-rw-r--r--spec/frontend/alerts_settings/components/util.js24
-rw-r--r--spec/frontend/alerts_settings/utils/mapping_transformations_spec.js35
-rw-r--r--spec/frontend/api_spec.js4
-rw-r--r--spec/frontend/batch_comments/components/preview_dropdown_spec.js71
-rw-r--r--spec/frontend/behaviors/date_picker_spec.js30
-rw-r--r--spec/frontend/behaviors/shortcuts/shortcut_spec.js96
-rw-r--r--spec/frontend/boards/components/board_card_spec.js126
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js4
-rw-r--r--spec/frontend/boards/components/board_filtered_search_spec.js146
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js32
-rw-r--r--spec/frontend/boards/mock_data.js6
-rw-r--r--spec/frontend/boards/stores/actions_spec.js166
-rw-r--r--spec/frontend/boards/stores/getters_spec.js9
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js46
-rw-r--r--spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap4
-rw-r--r--spec/frontend/branches/components/divergence_graph_spec.js12
-rw-r--r--spec/frontend/branches/divergence_graph_spec.js8
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js2
-rw-r--r--spec/frontend/code_navigation/components/app_spec.js2
-rw-r--r--spec/frontend/code_navigation/store/mutations_spec.js4
-rw-r--r--spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap174
-rw-r--r--spec/frontend/code_quality_walkthrough/components/step_spec.js156
-rw-r--r--spec/frontend/commit/pipelines/pipelines_spec.js1
-rw-r--r--spec/frontend/commits_spec.js4
-rw-r--r--spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap9
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js53
-rw-r--r--spec/frontend/content_editor/components/toolbar_button_spec.js98
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js76
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec.js7
-rw-r--r--spec/frontend/content_editor/services/build_serializer_config_spec.js38
-rw-r--r--spec/frontend/content_editor/services/create_content_editor_spec.js51
-rw-r--r--spec/frontend/content_editor/services/create_editor_spec.js39
-rw-r--r--spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js108
-rw-r--r--spec/frontend/content_editor/test_utils.js34
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap2
-rw-r--r--spec/frontend/contributors/component/contributors_spec.js2
-rw-r--r--spec/frontend/create_merge_request_dropdown_spec.js6
-rw-r--r--spec/frontend/cycle_analytics/mock_data.js186
-rw-r--r--spec/frontend/cycle_analytics/store/actions_spec.js130
-rw-r--r--spec/frontend/cycle_analytics/store/mutations_spec.js83
-rw-r--r--spec/frontend/cycle_analytics/utils_spec.js77
-rw-r--r--spec/frontend/deploy_freeze/store/mutations_spec.js12
-rw-r--r--spec/frontend/deploy_keys/components/action_btn_spec.js29
-rw-r--r--spec/frontend/deploy_keys/components/app_spec.js20
-rw-r--r--spec/frontend/deploy_keys/components/confirm_modal_spec.js28
-rw-r--r--spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap4
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js9
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js47
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js67
-rw-r--r--spec/frontend/diffs/components/inline_diff_table_row_spec.js13
-rw-r--r--spec/frontend/diffs/mock_data/diff_metadata.js2
-rw-r--r--spec/frontend/editor/editor_lite_extension_base_spec.js32
-rw-r--r--spec/frontend/environments/environment_table_spec.js44
-rw-r--r--spec/frontend/environments/environments_store_spec.js23
-rw-r--r--spec/frontend/environments/mock_data.js8
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js15
-rw-r--r--spec/frontend/experimentation/components/gitlab_experiment_spec.js (renamed from spec/frontend/experimentation/components/experiment_spec.js)2
-rw-r--r--spec/frontend/experimentation/utils_spec.js20
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js2
-rw-r--r--spec/frontend/fixtures/api_markdown.yml4
-rw-r--r--spec/frontend/fixtures/merge_requests.rb8
-rw-r--r--spec/frontend/fixtures/raw.rb4
-rw-r--r--spec/frontend/fixtures/releases.rb20
-rw-r--r--spec/frontend/flash_spec.js14
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js365
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js16
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_spec.js10
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_search_input_spec.js16
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js5
-rw-r--r--spec/frontend/groups/components/invite_members_banner_spec.js26
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js1
-rw-r--r--spec/frontend/ide/lib/alerts/environment_spec.js21
-rw-r--r--spec/frontend/ide/services/index_spec.js33
-rw-r--r--spec/frontend/ide/stores/actions/alert_spec.js46
-rw-r--r--spec/frontend/ide/stores/actions_spec.js19
-rw-r--r--spec/frontend/ide/stores/getters/alert_spec.js46
-rw-r--r--spec/frontend/ide/stores/mutations/alert_spec.js26
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_row_spec.js19
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js69
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js231
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/fixtures.js6
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js109
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js14
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js35
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap2
-rw-r--r--spec/frontend/integrations/edit/components/active_checkbox_spec.js16
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js14
-rw-r--r--spec/frontend/invite_member/components/invite_member_modal_spec.js67
-rw-r--r--spec/frontend/invite_member/components/invite_member_trigger_mock_data.js7
-rw-r--r--spec/frontend/invite_member/components/invite_member_trigger_spec.js48
-rw-r--r--spec/frontend/issuable/components/csv_export_modal_spec.js2
-rw-r--r--spec/frontend/issuable/components/status_box_spec.js (renamed from spec/frontend/merge_request/components/status_box_spec.js)18
-rw-r--r--spec/frontend/issuable_form_spec.js7
-rw-r--r--spec/frontend/issuable_list/components/issuable_item_spec.js26
-rw-r--r--spec/frontend/issue_show/components/form_spec.js54
-rw-r--r--spec/frontend/issues_list/components/issues_list_app_spec.js191
-rw-r--r--spec/frontend/issues_list/mock_data.js127
-rw-r--r--spec/frontend/issues_list/utils_spec.js109
-rw-r--r--spec/frontend/jira_connect/components/groups_list_spec.js137
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap12
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js1
-rw-r--r--spec/frontend/jobs/components/table/cells.vue/duration_cell_spec.js81
-rw-r--r--spec/frontend/jobs/components/table/cells.vue/job_cell_spec.js140
-rw-r--r--spec/frontend/jobs/components/table/cells.vue/pipeline_cell_spec.js82
-rw-r--r--spec/frontend/jobs/components/table/job_table_app_spec.js110
-rw-r--r--spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js37
-rw-r--r--spec/frontend/jobs/components/table/jobs_table_spec.js49
-rw-r--r--spec/frontend/jobs/mock_data.js191
-rw-r--r--spec/frontend/learn_gitlab/track_learn_gitlab_spec.js21
-rw-r--r--spec/frontend/lib/utils/number_utility_spec.js26
-rw-r--r--spec/frontend/lib/utils/recurrence_spec.js333
-rw-r--r--spec/frontend/lib/utils/text_markdown_spec.js19
-rw-r--r--spec/frontend/lib/utils/uuids_spec.js (renamed from spec/frontend/diffs/utils/uuids_spec.js)2
-rw-r--r--spec/frontend/lib/utils/vuex_module_mappers_spec.js138
-rw-r--r--spec/frontend/logs/components/log_advanced_filters_spec.js3
-rw-r--r--spec/frontend/logs/stores/actions_spec.js2
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js31
-rw-r--r--spec/frontend/members/components/members_tabs_spec.js194
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js98
-rw-r--r--spec/frontend/members/index_spec.js13
-rw-r--r--spec/frontend/members/mock_data.js25
-rw-r--r--spec/frontend/members/utils_spec.js13
-rw-r--r--spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js14
-rw-r--r--spec/frontend/merge_conflicts/mock_data.js4
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap2
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_builder_spec.js1
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js28
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js1
-rw-r--r--spec/frontend/monitoring/components/dashboards_dropdown_spec.js2
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js2
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js2
-rw-r--r--spec/frontend/monitoring/mock_data.js18
-rw-r--r--spec/frontend/monitoring/router_spec.js3
-rw-r--r--spec/frontend/nav/components/top_nav_app_spec.js68
-rw-r--r--spec/frontend/nav/components/top_nav_container_view_spec.js114
-rw-r--r--spec/frontend/nav/components/top_nav_dropdown_menu_spec.js157
-rw-r--r--spec/frontend/nav/components/top_nav_menu_item_spec.js74
-rw-r--r--spec/frontend/nav/mock_data.js35
-rw-r--r--spec/frontend/notebook/cells/markdown_spec.js37
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js4
-rw-r--r--spec/frontend/notes/old_notes_spec.js2
-rw-r--r--spec/frontend/notes/stores/actions_spec.js79
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap37
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap22
-rw-r--r--spec/frontend/packages/details/components/maven_installation_spec.js47
-rw-r--r--spec/frontend/packages/details/components/npm_installation_spec.js58
-rw-r--r--spec/frontend/packages/details/components/package_files_spec.js11
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js20
-rw-r--r--spec/frontend/packages/list/stores/actions_spec.js26
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap2
-rw-r--r--spec/frontend/packages/shared/components/package_list_row_spec.js38
-rw-r--r--spec/frontend/packages/shared/components/package_path_spec.js86
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap18
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js146
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/generic_settings_spec.js54
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js84
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js150
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js25
-rw-r--r--spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/settings/group/mock_data.js14
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap (renamed from spec/frontend/registry/settings/__snapshots__/utils_spec.js.snap)0
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap (renamed from spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap)0
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js (renamed from spec/frontend/registry/settings/components/expiration_dropdown_spec.js)2
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/expiration_input_spec.js (renamed from spec/frontend/registry/settings/components/expiration_input_spec.js)4
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/expiration_run_text_spec.js (renamed from spec/frontend/registry/settings/components/expiration_run_text_spec.js)7
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/expiration_toggle_spec.js (renamed from spec/frontend/registry/settings/components/expiration_toggle_spec.js)4
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js (renamed from spec/frontend/registry/settings/components/registry_settings_app_spec.js)8
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js (renamed from spec/frontend/registry/settings/components/settings_form_spec.js)10
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js (renamed from spec/frontend/registry/settings/graphql/cache_updated_spec.js)4
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/mock_data.js (renamed from spec/frontend/registry/settings/mock_data.js)0
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js (renamed from spec/frontend/registry/settings/utils_spec.js)2
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap4
-rw-r--r--spec/frontend/pages/admin/users/components/delete_user_modal_spec.js21
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js152
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap73
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap31
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap2
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js4
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js3
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/mock_data.js12
-rw-r--r--spec/frontend/pages/projects/new/components/app_spec.js77
-rw-r--r--spec/frontend/pages/projects/new/components/new_project_push_tip_popover_spec.js (renamed from spec/frontend/projects/experiment_new_project_creation/components/new_project_push_tip_popover_spec.js)2
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js269
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js47
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/getting_started_card_spec.js26
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js51
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/visualize_and_lint_card_spec.js26
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js142
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/ui/demo_job_pill_spec.js27
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/ui/pipeline_visual_reference_spec.js31
-rw-r--r--spec/frontend/pipeline_editor/components/editor/text_editor_spec.js5
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js293
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js20
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js107
-rw-r--r--spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js19
-rw-r--r--spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js137
-rw-r--r--spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap4
-rw-r--r--spec/frontend/pipeline_editor/graphql/resolvers_spec.js18
-rw-r--r--spec/frontend/pipeline_editor/mock_data.js63
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js99
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js26
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js49
-rw-r--r--spec/frontend/pipeline_new/components/refs_dropdown_spec.js4
-rw-r--r--spec/frontend/pipeline_new/mock_data.js26
-rw-r--r--spec/frontend/pipeline_new/utils/filter_variables_spec.js21
-rw-r--r--spec/frontend/pipeline_new/utils/format_refs_spec.js2
-rw-r--r--spec/frontend/pipelines/__snapshots__/parsing_utils_spec.js.snap373
-rw-r--r--spec/frontend/pipelines/components/pipelines_filtered_search_spec.js11
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js1
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js154
-rw-r--r--spec/frontend/pipelines/graph/graph_view_selector_spec.js189
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_spec.js21
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_mock_data.js4
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js21
-rw-r--r--spec/frontend/pipelines/graph/mock_data_legacy.js8
-rw-r--r--spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap6
-rw-r--r--spec/frontend/pipelines/graph_shared/links_inner_spec.js155
-rw-r--r--spec/frontend/pipelines/graph_shared/links_layer_spec.js200
-rw-r--r--spec/frontend/pipelines/mock_data.js6
-rw-r--r--spec/frontend/pipelines/parsing_utils_spec.js (renamed from spec/frontend/pipelines/components/dag/parsing_utils_spec.js)46
-rw-r--r--spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js18
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js112
-rw-r--r--spec/frontend/pipelines/pipelines_artifacts_spec.js110
-rw-r--r--spec/frontend/pipelines/pipelines_ci_templates_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js29
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js5
-rw-r--r--spec/frontend/pipelines/test_reports/empty_state_spec.js45
-rw-r--r--spec/frontend/pipelines/test_reports/test_case_details_spec.js55
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js45
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js2
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js2
-rw-r--r--spec/frontend/project_find_file_spec.js6
-rw-r--r--spec/frontend/projects/compare/components/app_legacy_spec.js2
-rw-r--r--spec/frontend/projects/compare/components/app_spec.js77
-rw-r--r--spec/frontend/projects/compare/components/mock_data.js37
-rw-r--r--spec/frontend/projects/compare/components/repo_dropdown_spec.js56
-rw-r--r--spec/frontend/projects/compare/components/revision_card_spec.js8
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js2
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_spec.js22
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/app_spec.js144
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js20
-rw-r--r--spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js2
-rw-r--r--spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js6
-rw-r--r--spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js2
-rw-r--r--spec/frontend/ref/stores/mutations_spec.js4
-rw-r--r--spec/frontend/registry/explorer/components/details_page/details_header_spec.js103
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js79
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_list_spec.js218
-rw-r--r--spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js47
-rw-r--r--spec/frontend/registry/explorer/mock_data.js25
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js107
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap62
-rw-r--r--spec/frontend/releases/components/app_index_spec.js291
-rw-r--r--spec/frontend/releases/components/app_show_spec.js4
-rw-r--r--spec/frontend/releases/components/releases_pagination_graphql_spec.js175
-rw-r--r--spec/frontend/releases/components/releases_pagination_rest_spec.js72
-rw-r--r--spec/frontend/releases/components/releases_pagination_spec.js169
-rw-r--r--spec/frontend/releases/components/tag_field_new_spec.js72
-rw-r--r--spec/frontend/releases/stores/getters_spec.js22
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js344
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js89
-rw-r--r--spec/frontend/releases/stores/modules/list/actions_spec.js172
-rw-r--r--spec/frontend/releases/stores/modules/list/mutations_spec.js28
-rw-r--r--spec/frontend/releases/util_spec.js164
-rw-r--r--spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js16
-rw-r--r--spec/frontend/reports/codequality_report/mock_data.js91
-rw-r--r--spec/frontend/reports/codequality_report/store/actions_spec.js183
-rw-r--r--spec/frontend/reports/codequality_report/store/mutations_spec.js12
-rw-r--r--spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js153
-rw-r--r--spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js74
-rw-r--r--spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js4
-rw-r--r--spec/frontend/reports/grouped_test_report/store/mutations_spec.js2
-rw-r--r--spec/frontend/reports/mock_data/mock_data.js2
-rw-r--r--spec/frontend/reports/mock_data/recent_failures_report.json4
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js155
-rw-r--r--spec/frontend/repository/components/table/index_spec.js4
-rw-r--r--spec/frontend/repository/components/table/parent_row_spec.js14
-rw-r--r--spec/frontend/repository/components/table/row_spec.js6
-rw-r--r--spec/frontend/repository/components/upload_blob_modal_spec.js6
-rw-r--r--spec/frontend/repository/log_tree_spec.js6
-rw-r--r--spec/frontend/repository/pages/blob_spec.js4
-rw-r--r--spec/frontend/repository/router_spec.js16
-rw-r--r--spec/frontend/repository/utils/title_spec.js4
-rw-r--r--spec/frontend/runner/components/runner_type_badge_spec.js40
-rw-r--r--spec/frontend/runner/runner_detail/runner_detail_app_spec.js29
-rw-r--r--spec/frontend/runner/runner_detail/runner_details_app_spec.js71
-rw-r--r--spec/frontend/security_configuration/components/feature_card_spec.js245
-rw-r--r--spec/frontend/security_configuration/components/utils.js8
-rw-r--r--spec/frontend/security_configuration/configuration_table_spec.js16
-rw-r--r--spec/frontend/security_configuration/manage_sast_spec.js136
-rw-r--r--spec/frontend/security_configuration/upgrade_spec.js2
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js13
-rw-r--r--spec/frontend/sidebar/assignees_realtime_spec.js108
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js255
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js38
-rw-r--r--spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js183
-rw-r--r--spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js62
-rw-r--r--spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js53
-rw-r--r--spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js106
-rw-r--r--spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js89
-rw-r--r--spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js131
-rw-r--r--spec/frontend/sidebar/components/time_tracking/mock_data.js102
-rw-r--r--spec/frontend/sidebar/components/time_tracking/report_spec.js125
-rw-r--r--spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js28
-rw-r--r--spec/frontend/sidebar/mock_data.js198
-rw-r--r--spec/frontend/sidebar/sidebar_assignees_spec.js1
-rw-r--r--spec/frontend/sidebar/sidebar_subscriptions_spec.js36
-rw-r--r--spec/frontend/static_site_editor/mock_data.js6
-rw-r--r--spec/frontend/static_site_editor/pages/home_spec.js1
-rw-r--r--spec/frontend/static_site_editor/services/generate_branch_name_spec.js8
-rw-r--r--spec/frontend/static_site_editor/services/renderers/render_image_spec.js10
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js23
-rw-r--r--spec/frontend/task_list_spec.js50
-rw-r--r--spec/frontend/tracking_spec.js26
-rw-r--r--spec/frontend/users_select/index_spec.js192
-rw-r--r--spec/frontend/users_select/test_helper.js152
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js28
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap8
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap3
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js10
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js560
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js6
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js40
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js12
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js29
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js (renamed from spec/frontend/vue_shared/alert_details/sidebar/alert_managment_sidebar_assignees_spec.js)83
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js68
-rw-r--r--spec/frontend/vue_shared/components/alerts_deprecation_warning_spec.js48
-rw-r--r--spec/frontend/vue_shared/components/commit_spec.js30
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js87
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js38
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js228
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js78
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js37
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/keep_alive_slots_spec.js122
-rw-r--r--spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/registry/list_item_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js40
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js50
-rw-r--r--spec/frontend/vue_shared/components/time_ago_tooltip_spec.js71
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js311
-rw-r--r--spec/frontend/vue_shared/components/vuex_module_provider_spec.js47
-rw-r--r--spec/frontend/vue_shared/directives/validation_spec.js201
-rw-r--r--spec/frontend/vue_shared/new_namespace/components/legacy_container_spec.js (renamed from spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js)2
-rw-r--r--spec/frontend/vue_shared/new_namespace/components/welcome_spec.js (renamed from spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js)36
-rw-r--r--spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js114
-rw-r--r--spec/frontend/vue_shared/security_reports/components/apollo_mocks.js12
-rw-r--r--spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js184
-rw-r--r--spec/frontend/vue_shared/security_reports/mock_data.js120
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js13
-rw-r--r--spec/frontend/vue_shared/security_reports/utils_spec.js25
-rw-r--r--spec/frontend/whats_new/components/app_spec.js11
-rw-r--r--spec/frontend/whats_new/components/feature_spec.js46
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js21
-rw-r--r--spec/graphql/graphql_triggers_spec.rb20
-rw-r--r--spec/graphql/mutations/alert_management/alerts/todo/create_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/create_alert_issue_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/http_integration/create_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/http_integration/destroy_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/http_integration/reset_token_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/http_integration/update_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb1
-rw-r--r--spec/graphql/mutations/alert_management/update_alert_status_spec.rb1
-rw-r--r--spec/graphql/mutations/boards/lists/update_spec.rb58
-rw-r--r--spec/graphql/mutations/commits/create_spec.rb10
-rw-r--r--spec/graphql/mutations/issues/create_spec.rb13
-rw-r--r--spec/graphql/mutations/issues/set_due_date_spec.rb19
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb38
-rw-r--r--spec/graphql/mutations/merge_requests/set_draft_spec.rb55
-rw-r--r--spec/graphql/mutations/merge_requests/set_locked_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_wip_spec.rb2
-rw-r--r--spec/graphql/mutations/namespace/package_settings/update_spec.rb12
-rw-r--r--spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb115
-rw-r--r--spec/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb13
-rw-r--r--spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/alert_management/alert_status_counts_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/boards_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/branch_commit_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/ci/runners_resolver_spec.rb136
-rw-r--r--spec/graphql/resolvers/ci/template_resolver_spec.rb33
-rw-r--r--spec/graphql/resolvers/design_management/designs_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/design_management/version/designs_at_version_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/design_management/versions_resolver_spec.rb14
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/group_packages_resolver_spec.rb9
-rw-r--r--spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/metadata_resolver_spec.rb5
-rw-r--r--spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/packages_base_resolver_spec.rb15
-rw-r--r--spec/graphql/resolvers/project_packages_resolver_spec.rb11
-rw-r--r--spec/graphql/resolvers/project_pipeline_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/project_pipelines_resolver_spec.rb1
-rw-r--r--spec/graphql/resolvers/repository_branch_names_resolver_spec.rb45
-rw-r--r--spec/graphql/subscriptions/issuable_updated_spec.rb68
-rw-r--r--spec/graphql/types/blob_viewer_type_spec.rb12
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb7
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb5
-rw-r--r--spec/graphql/types/ci/runner_type_spec.rb16
-rw-r--r--spec/graphql/types/ci/template_type_spec.rb16
-rw-r--r--spec/graphql/types/design_management/version_type_spec.rb2
-rw-r--r--spec/graphql/types/duration_type_spec.rb29
-rw-r--r--spec/graphql/types/issuable_type_spec.rb23
-rw-r--r--spec/graphql/types/label_type_spec.rb1
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb2
-rw-r--r--spec/graphql/types/metadata/kas_type_spec.rb8
-rw-r--r--spec/graphql/types/mutation_type_spec.rb18
-rw-r--r--spec/graphql/types/packages/maven/metadatum_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/nuget/metadatum_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/package_status_enum_spec.rb9
-rw-r--r--spec/graphql/types/packages/package_type_enum_spec.rb2
-rw-r--r--spec/graphql/types/packages/package_type_spec.rb1
-rw-r--r--spec/graphql/types/permission_types/ci/job_spec.rb13
-rw-r--r--spec/graphql/types/project_type_spec.rb10
-rw-r--r--spec/graphql/types/projects/services_enum_spec.rb2
-rw-r--r--spec/graphql/types/query_type_spec.rb27
-rw-r--r--spec/graphql/types/repository/blob_type_spec.rb29
-rw-r--r--spec/graphql/types/repository_type_spec.rb2
-rw-r--r--spec/graphql/types/subscription_type_spec.rb13
-rw-r--r--spec/graphql/types/timelog_type_spec.rb10
-rw-r--r--spec/graphql/types/user_type_spec.rb6
-rw-r--r--spec/haml_lint/linter/documentation_links_spec.rb6
-rw-r--r--spec/helpers/application_helper_spec.rb29
-rw-r--r--spec/helpers/auth_helper_spec.rb47
-rw-r--r--spec/helpers/avatars_helper_spec.rb31
-rw-r--r--spec/helpers/boards_helper_spec.rb39
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb64
-rw-r--r--spec/helpers/commits_helper_spec.rb54
-rw-r--r--spec/helpers/dev_ops_report_helper_spec.rb41
-rw-r--r--spec/helpers/environments_helper_spec.rb51
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb25
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb108
-rw-r--r--spec/helpers/groups_helper_spec.rb285
-rw-r--r--spec/helpers/ide_helper_spec.rb30
-rw-r--r--spec/helpers/invite_members_helper_spec.rb107
-rw-r--r--spec/helpers/issuables_helper_spec.rb8
-rw-r--r--spec/helpers/issues_helper_spec.rb72
-rw-r--r--spec/helpers/learn_gitlab_helper_spec.rb66
-rw-r--r--spec/helpers/namespaces_helper_spec.rb28
-rw-r--r--spec/helpers/nav/top_nav_helper_spec.rb376
-rw-r--r--spec/helpers/nav_helper_spec.rb4
-rw-r--r--spec/helpers/page_layout_helper_spec.rb2
-rw-r--r--spec/helpers/preferences_helper_spec.rb14
-rw-r--r--spec/helpers/projects/alert_management_helper_spec.rb35
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb90
-rw-r--r--spec/helpers/projects_helper_spec.rb143
-rw-r--r--spec/helpers/registrations_helper_spec.rb29
-rw-r--r--spec/helpers/users_helper_spec.rb12
-rw-r--r--spec/helpers/webpack_helper_spec.rb36
-rw-r--r--spec/helpers/whats_new_helper_spec.rb57
-rw-r--r--spec/initializers/6_validations_spec.rb2
-rw-r--r--spec/initializers/mail_encoding_patch_spec.rb2
-rw-r--r--spec/initializers/pages_storage_check_spec.rb80
-rw-r--r--spec/initializers/secret_token_spec.rb4
-rw-r--r--spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js1
-rw-r--r--spec/lib/api/entities/bulk_imports/export_status_spec.rb20
-rw-r--r--spec/lib/api/entities/release_spec.rb39
-rw-r--r--spec/lib/api/helpers/caching_spec.rb160
-rw-r--r--spec/lib/api/helpers/related_resources_helpers_spec.rb6
-rw-r--r--spec/lib/api/helpers_spec.rb98
-rw-r--r--spec/lib/banzai/cross_project_reference_spec.rb15
-rw-r--r--spec/lib/banzai/filter/custom_emoji_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb80
-rw-r--r--spec/lib/banzai/filter/references/design_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb24
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb11
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/references/reference_cache_spec.rb144
-rw-r--r--spec/lib/banzai/filter/references/reference_filter_spec.rb27
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb27
-rw-r--r--spec/lib/banzai/filter/references/user_reference_filter_spec.rb25
-rw-r--r--spec/lib/banzai/pipeline/post_process_pipeline_spec.rb54
-rw-r--r--spec/lib/banzai/reference_parser/merge_request_parser_spec.rb47
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb102
-rw-r--r--spec/lib/bulk_imports/stage_spec.rb34
-rw-r--r--spec/lib/declarative_policy/overrides_spec.rb82
-rw-r--r--spec/lib/declarative_policy_spec.rb38
-rw-r--r--spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb68
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb27
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb27
-rw-r--r--spec/lib/gitlab/alert_management/payload/base_spec.rb6
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb25
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb2
-rw-r--r--spec/lib/gitlab/api_authentication/token_locator_spec.rb109
-rw-r--r--spec/lib/gitlab/api_authentication/token_resolver_spec.rb49
-rw-r--r--spec/lib/gitlab/auth_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb21
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb21
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb73
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb126
-rw-r--r--spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb29
-rw-r--r--spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb241
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb52
-rw-r--r--spec/lib/gitlab/cache_spec.rb29
-rw-r--r--spec/lib/gitlab/chat/responder_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/cache_spec.rb98
-rw-r--r--spec/lib/gitlab/ci/build/policy/changes_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/build/releaser_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/build/step_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb352
-rw-r--r--spec/lib/gitlab/ci/config/entry/caches_spec.rb70
-rw-r--r--spec/lib/gitlab/ci/config/entry/default_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/hidden_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb127
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/file/project_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/file/template_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/build_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb22
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb247
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/reports/test_failure_history_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/status/core_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/syntax_templates_spec.rb25
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/templates_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/trace/chunked_io_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb149
-rw-r--r--spec/lib/gitlab/class_attributes_spec.rb62
-rw-r--r--spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb2
-rw-r--r--spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb2
-rw-r--r--spec/lib/gitlab/conan_token_spec.rb4
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb12
-rw-r--r--spec/lib/gitlab/content_security_policy/config_loader_spec.rb29
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb6
-rw-r--r--spec/lib/gitlab/data_builder/deployment_spec.rb8
-rw-r--r--spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb102
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb87
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb110
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb120
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb106
-rw-r--r--spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb50
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb232
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/observers/query_log_spec.rb38
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb36
-rw-r--r--spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb83
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb244
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb18
-rw-r--r--spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb6
-rw-r--r--spec/lib/gitlab/database_spec.rb30
-rw-r--r--spec/lib/gitlab/default_branch_spec.rb24
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb32
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb5
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb84
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb27
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb41
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb27
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb53
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing_spec.rb32
-rw-r--r--spec/lib/gitlab/email/receiver_spec.rb15
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb59
-rw-r--r--spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb31
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb18
-rw-r--r--spec/lib/gitlab/etag_caching/router_spec.rb8
-rw-r--r--spec/lib/gitlab/experimentation/controller_concern_spec.rb27
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb1
-rw-r--r--spec/lib/gitlab/external_authorization/access_spec.rb2
-rw-r--r--spec/lib/gitlab/external_authorization/client_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb5
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb10
-rw-r--r--spec/lib/gitlab/git/lfs_changes_spec.rb1
-rw-r--r--spec/lib/gitlab/git/merge_base_spec.rb1
-rw-r--r--spec/lib/gitlab/git/push_spec.rb1
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb10
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb6
-rw-r--r--spec/lib/gitlab/git/wiki_spec.rb16
-rw-r--r--spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb2
-rw-r--r--spec/lib/gitlab/git_access_design_spec.rb1
-rw-r--r--spec/lib/gitlab/git_access_project_spec.rb1
-rw-r--r--spec/lib/gitlab/git_access_spec.rb12
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/remote_service_spec.rb52
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/note_importer_spec.rb21
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb26
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb18
-rw-r--r--spec/lib/gitlab/github_import/markdown_text_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/deprecation_spec.rb4
-rw-r--r--spec/lib/gitlab/graphql/docs/renderer_spec.rb407
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb5
-rw-r--r--spec/lib/gitlab/graphql/present/field_extension_spec.rb42
-rw-r--r--spec/lib/gitlab/graphql/queries_spec.rb17
-rw-r--r--spec/lib/gitlab/highlight_spec.rb58
-rw-r--r--spec/lib/gitlab/hook_data/key_builder_spec.rb73
-rw-r--r--spec/lib/gitlab/hook_data/project_builder_spec.rb83
-rw-r--r--spec/lib/gitlab/i18n_spec.rb15
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml16
-rw-r--r--spec/lib/gitlab/import_export/attribute_configuration_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/base/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/command_line_util_spec.rb15
-rw-r--r--spec/lib/gitlab/import_export/config_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/file_importer_spec.rb16
-rw-r--r--spec/lib/gitlab/import_export/group/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb58
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb72
-rw-r--r--spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/references_configuration_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml3
-rw-r--r--spec/lib/gitlab/integrations/sti_type_spec.rb116
-rw-r--r--spec/lib/gitlab/jwt_token_spec.rb6
-rw-r--r--spec/lib/gitlab/kas_spec.rb40
-rw-r--r--spec/lib/gitlab/lfs/client_spec.rb2
-rw-r--r--spec/lib/gitlab/memory/instrumentation_spec.rb6
-rw-r--r--spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb42
-rw-r--r--spec/lib/gitlab/middleware/speedscope_spec.rb61
-rw-r--r--spec/lib/gitlab/nav/top_nav_menu_item_spec.rb23
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb2
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb8
-rw-r--r--spec/lib/gitlab/pages/stores/local_store_spec.rb25
-rw-r--r--spec/lib/gitlab/pagination/keyset/iterator_spec.rb127
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb743
-rw-r--r--spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb91
-rw-r--r--spec/lib/gitlab/performance_bar_spec.rb26
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb4
-rw-r--r--spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb6
-rw-r--r--spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb6
-rw-r--r--spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb187
-rw-r--r--spec/lib/gitlab/relative_positioning/item_context_spec.rb16
-rw-r--r--spec/lib/gitlab/repository_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb1
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_size_checker_spec.rb1
-rw-r--r--spec/lib/gitlab/repository_size_error_message_spec.rb1
-rw-r--r--spec/lib/gitlab/runtime_spec.rb14
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb82
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_router_spec.rb212
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_spec.rb3
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb14
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb215
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb2
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb121
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb7
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb53
-rw-r--r--spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb17
-rw-r--r--spec/lib/gitlab/terraform_registry_token_spec.rb41
-rw-r--r--spec/lib/gitlab/tracking/docs/helper_spec.rb91
-rw-r--r--spec/lib/gitlab/tracking/docs/renderer_spec.rb23
-rw-r--r--spec/lib/gitlab/tracking/event_definition_spec.rb101
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb16
-rw-r--r--spec/lib/gitlab/tracking_spec.rb3
-rw-r--r--spec/lib/gitlab/tree_summary_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb16
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb105
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb89
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb60
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb35
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb9
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb17
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb15
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb7
-rw-r--r--spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb23
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb1
-rw-r--r--spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb56
-rw-r--r--spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb47
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb129
-rw-r--r--spec/lib/learn_gitlab/onboarding_spec.rb46
-rw-r--r--spec/lib/learn_gitlab/project_spec.rb (renamed from spec/lib/learn_gitlab_spec.rb)8
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb10
-rw-r--r--spec/lib/security/ci_configuration/sast_build_action_spec.rb (renamed from spec/lib/security/ci_configuration/sast_build_actions_spec.rb)89
-rw-r--r--spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb159
-rw-r--r--spec/lib/sidebars/concerns/container_with_html_options_spec.rb (renamed from spec/models/concerns/sidebars/container_with_html_options_spec.rb)10
-rw-r--r--spec/lib/sidebars/menu_item_spec.rb21
-rw-r--r--spec/lib/sidebars/menu_spec.rb147
-rw-r--r--spec/lib/sidebars/panel_spec.rb127
-rw-r--r--spec/lib/sidebars/projects/context_spec.rb (renamed from spec/models/sidebars/projects/context_spec.rb)0
-rw-r--r--spec/lib/sidebars/projects/menus/analytics_menu_spec.rb120
-rw-r--r--spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb70
-rw-r--r--spec/lib/sidebars/projects/menus/confluence_menu_spec.rb44
-rw-r--r--spec/lib/sidebars/projects/menus/deployments_menu_spec.rb71
-rw-r--r--spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb40
-rw-r--r--spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb39
-rw-r--r--spec/lib/sidebars/projects/menus/hidden_menu_spec.rb102
-rw-r--r--spec/lib/sidebars/projects/menus/issues_menu_spec.rb86
-rw-r--r--spec/lib/sidebars/projects/menus/labels_menu_spec.rb61
-rw-r--r--spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb85
-rw-r--r--spec/lib/sidebars/projects/menus/members_menu_spec.rb35
-rw-r--r--spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb63
-rw-r--r--spec/lib/sidebars/projects/menus/monitor_menu_spec.rb217
-rw-r--r--spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb143
-rw-r--r--spec/lib/sidebars/projects/menus/project_information_menu_spec.rb74
-rw-r--r--spec/lib/sidebars/projects/menus/repository_menu_spec.rb (renamed from spec/models/sidebars/projects/menus/repository/menu_spec.rb)4
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb177
-rw-r--r--spec/lib/sidebars/projects/menus/snippets_menu_spec.rb27
-rw-r--r--spec/lib/sidebars/projects/menus/wiki_menu_spec.rb31
-rw-r--r--spec/lib/sidebars/projects/panel_spec.rb42
-rw-r--r--spec/lib/version_check_spec.rb11
-rw-r--r--spec/mailers/emails/in_product_marketing_spec.rb11
-rw-r--r--spec/mailers/notify_spec.rb6
-rw-r--r--spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb114
-rw-r--r--spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb47
-rw-r--r--spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb25
-rw-r--r--spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb41
-rw-r--r--spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb120
-rw-r--r--spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb34
-rw-r--r--spec/migrations/change_web_hook_events_default_spec.rb36
-rw-r--r--spec/migrations/cleanup_projects_with_missing_namespace_spec.rb12
-rw-r--r--spec/migrations/generate_ci_jwt_signing_key_spec.rb2
-rw-r--r--spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb (renamed from spec/migrations/move_container_registry_enabled_to_project_features2_spec.rb)16
-rw-r--r--spec/migrations/remove_hipchat_service_records_spec.rb23
-rw-r--r--spec/migrations/schedule_update_timelogs_project_id_spec.rb33
-rw-r--r--spec/migrations/update_invalid_web_hooks_spec.rb30
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb1
-rw-r--r--spec/models/analytics/cycle_analytics/project_value_stream_spec.rb39
-rw-r--r--spec/models/appearance_spec.rb4
-rw-r--r--spec/models/application_record_spec.rb14
-rw-r--r--spec/models/application_setting_spec.rb41
-rw-r--r--spec/models/board_group_recent_visit_spec.rb60
-rw-r--r--spec/models/board_project_recent_visit_spec.rb60
-rw-r--r--spec/models/board_spec.rb42
-rw-r--r--spec/models/broadcast_message_spec.rb6
-rw-r--r--spec/models/bulk_imports/entity_spec.rb9
-rw-r--r--spec/models/bulk_imports/export_spec.rb85
-rw-r--r--spec/models/bulk_imports/export_upload_spec.rb23
-rw-r--r--spec/models/bulk_imports/file_transfer/group_config_spec.rb38
-rw-r--r--spec/models/bulk_imports/file_transfer/project_config_spec.rb38
-rw-r--r--spec/models/bulk_imports/file_transfer_spec.rb25
-rw-r--r--spec/models/bulk_imports/stage_spec.rb50
-rw-r--r--spec/models/chat_name_spec.rb6
-rw-r--r--spec/models/ci/build_dependencies_spec.rb22
-rw-r--r--spec/models/ci/build_spec.rb82
-rw-r--r--spec/models/ci/commit_with_pipeline_spec.rb40
-rw-r--r--spec/models/ci/job_artifact_spec.rb28
-rw-r--r--spec/models/ci/pipeline_artifact_spec.rb24
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb30
-rw-r--r--spec/models/ci/pipeline_spec.rb123
-rw-r--r--spec/models/ci/runner_namespace_spec.rb9
-rw-r--r--spec/models/ci/runner_project_spec.rb9
-rw-r--r--spec/models/ci/stage_spec.rb12
-rw-r--r--spec/models/clusters/agent_spec.rb1
-rw-r--r--spec/models/clusters/agent_token_spec.rb13
-rw-r--r--spec/models/clusters/applications/elastic_stack_spec.rb110
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb87
-rw-r--r--spec/models/clusters/integrations/elastic_stack_spec.rb19
-rw-r--r--spec/models/clusters/integrations/prometheus_spec.rb56
-rw-r--r--spec/models/commit_status_spec.rb34
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb36
-rw-r--r--spec/models/concerns/cascading_namespace_setting_attribute_spec.rb36
-rw-r--r--spec/models/concerns/chronic_duration_attribute_spec.rb3
-rw-r--r--spec/models/concerns/ci/maskable_spec.rb2
-rw-r--r--spec/models/concerns/cron_schedulable_spec.rb17
-rw-r--r--spec/models/concerns/has_integrations_spec.rb33
-rw-r--r--spec/models/concerns/has_timelogs_report_spec.rb22
-rw-r--r--spec/models/concerns/noteable_spec.rb2
-rw-r--r--spec/models/concerns/routable_spec.rb74
-rw-r--r--spec/models/concerns/sidebars/positionable_list_spec.rb59
-rw-r--r--spec/models/container_repository_spec.rb95
-rw-r--r--spec/models/context_commits_diff_spec.rb59
-rw-r--r--spec/models/custom_emoji_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb81
-rw-r--r--spec/models/design_management/design_spec.rb2
-rw-r--r--spec/models/email_spec.rb11
-rw-r--r--spec/models/external_pull_request_spec.rb17
-rw-r--r--spec/models/group_spec.rb502
-rw-r--r--spec/models/hooks/project_hook_spec.rb9
-rw-r--r--spec/models/hooks/service_hook_spec.rb12
-rw-r--r--spec/models/hooks/system_hook_spec.rb8
-rw-r--r--spec/models/hooks/web_hook_log_archived_spec.rb52
-rw-r--r--spec/models/hooks/web_hook_spec.rb198
-rw-r--r--spec/models/instance_metadata/kas_spec.rb33
-rw-r--r--spec/models/instance_metadata_spec.rb3
-rw-r--r--spec/models/integration_spec.rb952
-rw-r--r--spec/models/integrations/asana_spec.rb (renamed from spec/models/project_services/asana_service_spec.rb)18
-rw-r--r--spec/models/integrations/assembla_spec.rb (renamed from spec/models/project_services/assembla_service_spec.rb)2
-rw-r--r--spec/models/integrations/bamboo_spec.rb (renamed from spec/models/project_services/bamboo_service_spec.rb)2
-rw-r--r--spec/models/integrations/campfire_spec.rb (renamed from spec/models/project_services/campfire_service_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/alert_message_spec.rb (renamed from spec/models/project_services/chat_message/alert_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/base_message_spec.rb (renamed from spec/models/project_services/chat_message/base_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/deployment_message_spec.rb (renamed from spec/models/project_services/chat_message/deployment_message_spec.rb)6
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb (renamed from spec/models/project_services/chat_message/issue_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/merge_message_spec.rb (renamed from spec/models/project_services/chat_message/merge_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/note_message_spec.rb (renamed from spec/models/project_services/chat_message/note_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/pipeline_message_spec.rb (renamed from spec/models/project_services/chat_message/pipeline_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/push_message_spec.rb (renamed from spec/models/project_services/chat_message/push_message_spec.rb)2
-rw-r--r--spec/models/integrations/chat_message/wiki_page_message_spec.rb (renamed from spec/models/project_services/chat_message/wiki_page_message_spec.rb)2
-rw-r--r--spec/models/integrations/confluence_spec.rb (renamed from spec/models/project_services/confluence_service_spec.rb)2
-rw-r--r--spec/models/integrations/datadog_spec.rb (renamed from spec/models/project_services/datadog_service_spec.rb)2
-rw-r--r--spec/models/integrations/emails_on_push_spec.rb (renamed from spec/models/project_services/emails_on_push_service_spec.rb)2
-rw-r--r--spec/models/internal_id_spec.rb9
-rw-r--r--spec/models/issue/metrics_spec.rb15
-rw-r--r--spec/models/issue_spec.rb52
-rw-r--r--spec/models/label_link_spec.rb24
-rw-r--r--spec/models/member_spec.rb64
-rw-r--r--spec/models/members/group_member_spec.rb4
-rw-r--r--spec/models/members/project_member_spec.rb4
-rw-r--r--spec/models/merge_request_diff_spec.rb94
-rw-r--r--spec/models/merge_request_spec.rb20
-rw-r--r--spec/models/milestone_spec.rb16
-rw-r--r--spec/models/namespace/package_setting_spec.rb25
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb30
-rw-r--r--spec/models/namespace_spec.rb111
-rw-r--r--spec/models/note_spec.rb10
-rw-r--r--spec/models/packages/dependency_spec.rb5
-rw-r--r--spec/models/packages/go/module_version_spec.rb11
-rw-r--r--spec/models/packages/helm/file_metadatum_spec.rb60
-rw-r--r--spec/models/packages/package_file_spec.rb47
-rw-r--r--spec/models/packages/package_spec.rb116
-rw-r--r--spec/models/packages/tag_spec.rb1
-rw-r--r--spec/models/pages/lookup_path_spec.rb25
-rw-r--r--spec/models/plan_limits_spec.rb1
-rw-r--r--spec/models/project_auto_devops_spec.rb16
-rw-r--r--spec/models/project_feature_spec.rb2
-rw-r--r--spec/models/project_services/chat_notification_service_spec.rb150
-rw-r--r--spec/models/project_services/data_fields_spec.rb4
-rw-r--r--spec/models/project_services/hipchat_service_spec.rb94
-rw-r--r--spec/models/project_services/issue_tracker_data_spec.rb6
-rw-r--r--spec/models/project_services/jira_service_spec.rb4
-rw-r--r--spec/models/project_services/jira_tracker_data_spec.rb2
-rw-r--r--spec/models/project_services/mattermost_slash_commands_service_spec.rb2
-rw-r--r--spec/models/project_services/microsoft_teams_service_spec.rb6
-rw-r--r--spec/models/project_services/open_project_tracker_data_spec.rb4
-rw-r--r--spec/models/project_services/slack_service_spec.rb10
-rw-r--r--spec/models/project_spec.rb230
-rw-r--r--spec/models/project_team_spec.rb8
-rw-r--r--spec/models/release_highlight_spec.rb50
-rw-r--r--spec/models/release_spec.rb17
-rw-r--r--spec/models/releases/evidence_spec.rb1
-rw-r--r--spec/models/releases/source_spec.rb1
-rw-r--r--spec/models/repository_spec.rb61
-rw-r--r--spec/models/service_spec.rb887
-rw-r--r--spec/models/sidebars/menu_spec.rb67
-rw-r--r--spec/models/sidebars/panel_spec.rb34
-rw-r--r--spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb31
-rw-r--r--spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb38
-rw-r--r--spec/models/sidebars/projects/menus/project_overview/menu_spec.rb18
-rw-r--r--spec/models/sidebars/projects/panel_spec.rb14
-rw-r--r--spec/models/snippet_spec.rb12
-rw-r--r--spec/models/timelog_spec.rb46
-rw-r--r--spec/models/todo_spec.rb12
-rw-r--r--spec/models/user_preference_spec.rb2
-rw-r--r--spec/models/user_spec.rb221
-rw-r--r--spec/models/user_status_spec.rb2
-rw-r--r--spec/models/users/credit_card_validation_spec.rb7
-rw-r--r--spec/models/wiki_page/meta_spec.rb6
-rw-r--r--spec/models/wiki_page_spec.rb18
-rw-r--r--spec/policies/integration_policy_spec.rb (renamed from spec/policies/service_policy_spec.rb)2
-rw-r--r--spec/policies/project_member_policy_spec.rb10
-rw-r--r--spec/policies/project_policy_spec.rb40
-rw-r--r--spec/presenters/alert_management/alert_presenter_spec.rb1
-rw-r--r--spec/presenters/blob_presenter_spec.rb137
-rw-r--r--spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb3
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb2
-rw-r--r--spec/presenters/group_member_presenter_spec.rb2
-rw-r--r--spec/presenters/label_presenter_spec.rb1
-rw-r--r--spec/presenters/packages/composer/packages_presenter_spec.rb1
-rw-r--r--spec/presenters/packages/conan/package_presenter_spec.rb1
-rw-r--r--spec/presenters/packages/detail/package_presenter_spec.rb2
-rw-r--r--spec/presenters/packages/npm/package_presenter_spec.rb9
-rw-r--r--spec/presenters/packages/nuget/search_results_presenter_spec.rb1
-rw-r--r--spec/presenters/project_presenter_spec.rb8
-rw-r--r--spec/presenters/prometheus_alert_presenter_spec.rb1
-rw-r--r--spec/presenters/release_presenter_spec.rb1
-rw-r--r--spec/presenters/service_hook_presenter_spec.rb2
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb39
-rw-r--r--spec/presenters/terraform/modules_presenter_spec.rb22
-rw-r--r--spec/presenters/user_presenter_spec.rb1
-rw-r--r--spec/presenters/web_hook_log_presenter_spec.rb4
-rw-r--r--spec/requests/api/api_guard/admin_mode_middleware_spec.rb2
-rw-r--r--spec/requests/api/api_spec.rb14
-rw-r--r--spec/requests/api/branches_spec.rb6
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb19
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb6
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb30
-rw-r--r--spec/requests/api/ci/runners_spec.rb13
-rw-r--r--spec/requests/api/debian_group_packages_spec.rb32
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb30
-rw-r--r--spec/requests/api/deploy_tokens_spec.rb57
-rw-r--r--spec/requests/api/deployments_spec.rb25
-rw-r--r--spec/requests/api/environments_spec.rb109
-rw-r--r--spec/requests/api/graphql/ci/job_spec.rb13
-rw-r--r--spec/requests/api/graphql/ci/pipelines_spec.rb124
-rw-r--r--spec/requests/api/graphql/ci/runner_spec.rb144
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb114
-rw-r--r--spec/requests/api/graphql/ci/template_spec.rb48
-rw-r--r--spec/requests/api/graphql/group/milestones_spec.rb12
-rw-r--r--spec/requests/api/graphql/group/packages_spec.rb48
-rw-r--r--spec/requests/api/graphql/issue/issue_spec.rb6
-rw-r--r--spec/requests/api/graphql/merge_request/merge_request_spec.rb111
-rw-r--r--spec/requests/api/graphql/metadata_query_spec.rb46
-rw-r--r--spec/requests/api/graphql/mutations/boards/destroy_spec.rb3
-rw-r--r--spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb73
-rw-r--r--spec/requests/api/graphql/mutations/boards/lists/update_spec.rb41
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_play_spec.rb46
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_retry_spec.rb46
-rw-r--r--spec/requests/api/graphql/mutations/issues/create_spec.rb5
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb31
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb3
-rw-r--r--spec/requests/api/graphql/mutations/labels/create_spec.rb3
-rw-r--r--spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb12
-rw-r--r--spec/requests/api/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb26
-rw-r--r--spec/requests/api/graphql/packages/composer_spec.rb64
-rw-r--r--spec/requests/api/graphql/packages/conan_spec.rb90
-rw-r--r--spec/requests/api/graphql/packages/maven_spec.rb94
-rw-r--r--spec/requests/api/graphql/packages/nuget_spec.rb74
-rw-r--r--spec/requests/api/graphql/packages/package_spec.rb78
-rw-r--r--spec/requests/api/graphql/project/issue/design_collection/versions_spec.rb17
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb46
-rw-r--r--spec/requests/api/graphql/project/packages_spec.rb33
-rw-r--r--spec/requests/api/graphql/project/project_members_spec.rb16
-rw-r--r--spec/requests/api/graphql/project/release_spec.rb17
-rw-r--r--spec/requests/api/graphql/project/releases_spec.rb17
-rw-r--r--spec/requests/api/graphql/project/repository_spec.rb24
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb16
-rw-r--r--spec/requests/api/graphql_spec.rb57
-rw-r--r--spec/requests/api/group_export_spec.rb70
-rw-r--r--spec/requests/api/group_labels_spec.rb4
-rw-r--r--spec/requests/api/helpers_spec.rb2
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb12
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb5
-rw-r--r--spec/requests/api/issues/issues_spec.rb29
-rw-r--r--spec/requests/api/issues/put_projects_issues_spec.rb11
-rw-r--r--spec/requests/api/labels_spec.rb14
-rw-r--r--spec/requests/api/maven_packages_spec.rb101
-rw-r--r--spec/requests/api/merge_requests_spec.rb78
-rw-r--r--spec/requests/api/package_files_spec.rb81
-rw-r--r--spec/requests/api/project_attributes.yml2
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb505
-rw-r--r--spec/requests/api/project_import_spec.rb76
-rw-r--r--spec/requests/api/project_packages_spec.rb10
-rw-r--r--spec/requests/api/project_templates_spec.rb17
-rw-r--r--spec/requests/api/projects_spec.rb71
-rw-r--r--spec/requests/api/releases_spec.rb92
-rw-r--r--spec/requests/api/services_spec.rb52
-rw-r--r--spec/requests/api/settings_spec.rb56
-rw-r--r--spec/requests/api/terraform/modules/v1/packages_spec.rb360
-rw-r--r--spec/requests/api/users_spec.rb42
-rw-r--r--spec/requests/groups/autocomplete_sources_spec.rb68
-rw-r--r--spec/requests/groups/email_campaigns_controller_spec.rb3
-rw-r--r--spec/requests/invite_registration_spec.rb68
-rw-r--r--spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb14
-rw-r--r--spec/requests/rack_attack_global_spec.rb186
-rw-r--r--spec/requests/terraform/services_controller_spec.rb15
-rw-r--r--spec/requests/whats_new_controller_spec.rb14
-rw-r--r--spec/rubocop/cop/active_model_errors_direct_manipulation_spec.rb62
-rw-r--r--spec/rubocop/cop/inject_enterprise_edition_module_spec.rb178
-rw-r--r--spec/rubocop/cop/performance/ar_count_each_spec.rb2
-rw-r--r--spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb2
-rw-r--r--spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb14
-rw-r--r--spec/serializers/ci/codequality_mr_diff_entity_spec.rb10
-rw-r--r--spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb10
-rw-r--r--spec/serializers/ci/downloadable_artifact_entity_spec.rb28
-rw-r--r--spec/serializers/ci/downloadable_artifact_serializer_spec.rb17
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb12
-rw-r--r--spec/serializers/context_commits_diff_entity_spec.rb25
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb2
-rw-r--r--spec/serializers/group_issuable_autocomplete_entity_spec.rb17
-rw-r--r--spec/serializers/issue_entity_spec.rb4
-rw-r--r--spec/serializers/job_entity_spec.rb4
-rw-r--r--spec/serializers/member_serializer_spec.rb33
-rw-r--r--spec/serializers/merge_requests/pipeline_entity_spec.rb2
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb18
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb9
-rw-r--r--spec/serializers/test_case_entity_spec.rb44
-rw-r--r--spec/services/admin/propagate_service_template_spec.rb6
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb236
-rw-r--r--spec/services/analytics/cycle_analytics/stages/list_service_spec.rb25
-rw-r--r--spec/services/application_settings/update_service_spec.rb26
-rw-r--r--spec/services/auto_merge/base_service_spec.rb8
-rw-r--r--spec/services/boards/lists/destroy_service_spec.rb26
-rw-r--r--spec/services/boards/lists/update_service_spec.rb4
-rw-r--r--spec/services/boards/visits/create_service_spec.rb43
-rw-r--r--spec/services/branches/delete_service_spec.rb2
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb4
-rw-r--r--spec/services/bulk_imports/export_service_spec.rb43
-rw-r--r--spec/services/bulk_imports/relation_export_service_spec.rb116
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb2
-rw-r--r--spec/services/chat_names/find_user_service_spec.rb8
-rw-r--r--spec/services/ci/change_variable_service_spec.rb1
-rw-r--r--spec/services/ci/change_variables_service_spec.rb1
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb45
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/custom_config_content_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/dry_run_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/environment_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/parameter_content_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb11
-rw-r--r--spec/services/ci/create_web_ide_terminal_service_spec.rb8
-rw-r--r--spec/services/ci/delete_unit_tests_service_spec.rb52
-rw-r--r--spec/services/ci/destroy_pipeline_service_spec.rb12
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb8
-rw-r--r--spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb1
-rw-r--r--spec/services/ci/find_exposed_artifacts_service_spec.rb1
-rw-r--r--spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb4
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb1
-rw-r--r--spec/services/ci/job_artifacts/destroy_associations_service_spec.rb54
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb39
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb3
-rw-r--r--spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb88
-rw-r--r--spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb20
-rw-r--r--spec/services/ci/pipeline_bridge_status_service_spec.rb1
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb4
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_post_test_needs_deploy_is_stage.yml50
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb43
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb13
-rw-r--r--spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb27
-rw-r--r--spec/services/ci/register_job_service_spec.rb117
-rw-r--r--spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb1
-rw-r--r--spec/services/ci/retry_build_service_spec.rb1
-rw-r--r--spec/services/ci/stop_environments_service_spec.rb1
-rw-r--r--spec/services/clusters/applications/prometheus_update_service_spec.rb119
-rw-r--r--spec/services/clusters/applications/schedule_update_service_spec.rb26
-rw-r--r--spec/services/clusters/integrations/create_service_spec.rb103
-rw-r--r--spec/services/clusters/management/create_project_service_spec.rb126
-rw-r--r--spec/services/container_expiration_policies/cleanup_service_spec.rb250
-rw-r--r--spec/services/deployments/create_service_spec.rb6
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb3
-rw-r--r--spec/services/discussions/capture_diff_note_positions_service_spec.rb2
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb2
-rw-r--r--spec/services/feature_flags/create_service_spec.rb1
-rw-r--r--spec/services/feature_flags/destroy_service_spec.rb1
-rw-r--r--spec/services/feature_flags/disable_service_spec.rb1
-rw-r--r--spec/services/feature_flags/enable_service_spec.rb1
-rw-r--r--spec/services/feature_flags/update_service_spec.rb1
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb28
-rw-r--r--spec/services/git/wiki_push_service_spec.rb8
-rw-r--r--spec/services/groups/autocomplete_service_spec.rb119
-rw-r--r--spec/services/groups/create_service_spec.rb18
-rw-r--r--spec/services/groups/open_issues_count_service_spec.rb11
-rw-r--r--spec/services/groups/transfer_service_spec.rb9
-rw-r--r--spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb182
-rw-r--r--spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb71
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb30
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb4
-rw-r--r--spec/services/issuable/destroy_label_links_service_spec.rb19
-rw-r--r--spec/services/issuable/destroy_service_spec.rb10
-rw-r--r--spec/services/issue_rebalancing_service_spec.rb71
-rw-r--r--spec/services/issues/after_create_service_spec.rb2
-rw-r--r--spec/services/issues/build_service_spec.rb14
-rw-r--r--spec/services/issues/clone_service_spec.rb2
-rw-r--r--spec/services/issues/close_service_spec.rb117
-rw-r--r--spec/services/issues/create_service_spec.rb60
-rw-r--r--spec/services/issues/duplicate_service_spec.rb2
-rw-r--r--spec/services/issues/move_service_spec.rb2
-rw-r--r--spec/services/issues/referenced_merge_requests_service_spec.rb4
-rw-r--r--spec/services/issues/related_branches_service_spec.rb4
-rw-r--r--spec/services/issues/reopen_service_spec.rb14
-rw-r--r--spec/services/issues/reorder_service_spec.rb4
-rw-r--r--spec/services/issues/resolve_discussions_spec.rb38
-rw-r--r--spec/services/issues/update_service_spec.rb70
-rw-r--r--spec/services/issues/zoom_link_service_spec.rb2
-rw-r--r--spec/services/labels/available_labels_service_spec.rb9
-rw-r--r--spec/services/labels/find_or_create_service_spec.rb29
-rw-r--r--spec/services/lfs/push_service_spec.rb2
-rw-r--r--spec/services/merge_requests/add_context_service_spec.rb6
-rw-r--r--spec/services/merge_requests/add_spent_time_service_spec.rb63
-rw-r--r--spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb2
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb4
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb2
-rw-r--r--spec/services/merge_requests/assign_issues_service_spec.rb22
-rw-r--r--spec/services/merge_requests/base_service_spec.rb2
-rw-r--r--spec/services/merge_requests/build_service_spec.rb4
-rw-r--r--spec/services/merge_requests/cleanup_refs_service_spec.rb2
-rw-r--r--spec/services/merge_requests/close_service_spec.rb14
-rw-r--r--spec/services/merge_requests/create_from_issue_service_spec.rb14
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_service_spec.rb26
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb4
-rw-r--r--spec/services/merge_requests/get_urls_service_spec.rb4
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb14
-rw-r--r--spec/services/merge_requests/link_lfs_objects_service_spec.rb2
-rw-r--r--spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb6
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb46
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb8
-rw-r--r--spec/services/merge_requests/mergeability_check_service_spec.rb12
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb3
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb2
-rw-r--r--spec/services/merge_requests/pushed_branches_service_spec.rb2
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb2
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb62
-rw-r--r--spec/services/merge_requests/reload_merge_head_diff_service_spec.rb2
-rw-r--r--spec/services/merge_requests/remove_approval_service_spec.rb2
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb12
-rw-r--r--spec/services/merge_requests/request_review_service_spec.rb4
-rw-r--r--spec/services/merge_requests/resolve_todos_service_spec.rb12
-rw-r--r--spec/services/merge_requests/resolved_discussion_notification_service_spec.rb2
-rw-r--r--spec/services/merge_requests/retarget_chain_service_spec.rb2
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb6
-rw-r--r--spec/services/merge_requests/update_assignees_service_spec.rb24
-rw-r--r--spec/services/merge_requests/update_service_spec.rb83
-rw-r--r--spec/services/namespaces/package_settings/update_service_spec.rb11
-rw-r--r--spec/services/notes/build_service_spec.rb2
-rw-r--r--spec/services/notes/copy_service_spec.rb2
-rw-r--r--spec/services/notes/create_service_spec.rb2
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb32
-rw-r--r--spec/services/notification_service_spec.rb14
-rw-r--r--spec/services/packages/debian/generate_distribution_key_service_spec.rb35
-rw-r--r--spec/services/packages/debian/generate_distribution_service_spec.rb182
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb1
-rw-r--r--spec/services/packages/generic/create_package_file_service_spec.rb42
-rw-r--r--spec/services/packages/maven/find_or_create_package_service_spec.rb10
-rw-r--r--spec/services/packages/nuget/search_service_spec.rb12
-rw-r--r--spec/services/packages/rubygems/dependency_resolver_service_spec.rb2
-rw-r--r--spec/services/packages/rubygems/process_gem_service_spec.rb5
-rw-r--r--spec/services/packages/terraform_module/create_package_service_spec.rb57
-rw-r--r--spec/services/post_receive_service_spec.rb2
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb258
-rw-r--r--spec/services/projects/create_service_spec.rb45
-rw-r--r--spec/services/projects/destroy_service_spec.rb48
-rw-r--r--spec/services/projects/housekeeping_service_spec.rb20
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb119
-rw-r--r--spec/services/projects/transfer_service_spec.rb2
-rw-r--r--spec/services/projects/unlink_fork_service_spec.rb12
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb2
-rw-r--r--spec/services/projects/update_statistics_service_spec.rb46
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb28
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb75
-rw-r--r--spec/services/security/ci_configuration/secret_detection_create_service_spec.rb19
-rw-r--r--spec/services/snippets/update_repository_storage_service_spec.rb2
-rw-r--r--spec/services/spam/spam_action_service_spec.rb54
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb189
-rw-r--r--spec/services/submit_usage_ping_service_spec.rb2
-rw-r--r--spec/services/suggestions/apply_service_spec.rb75
-rw-r--r--spec/services/system_hooks_service_spec.rb159
-rw-r--r--spec/services/todo_service_spec.rb15
-rw-r--r--spec/services/users/ban_service_spec.rb50
-rw-r--r--spec/services/users/build_service_spec.rb244
-rw-r--r--spec/services/users/registrations_build_service_spec.rb73
-rw-r--r--spec/services/users/update_assigned_open_issue_count_service_spec.rb49
-rw-r--r--spec/services/users/update_todo_count_cache_service_spec.rb32
-rw-r--r--spec/services/users/upsert_credit_card_validation_service_spec.rb84
-rw-r--r--spec/services/web_hook_service_spec.rb244
-rw-r--r--spec/services/web_hooks/destroy_service_spec.rb8
-rw-r--r--spec/spec_helper.rb24
-rw-r--r--spec/support/atlassian/jira_connect/schemata.rb2
-rw-r--r--spec/support/capybara.rb10
-rw-r--r--spec/support/db_cleaner.rb2
-rw-r--r--spec/support/factory_bot.rb8
-rw-r--r--spec/support/factory_default.rb12
-rw-r--r--spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb13
-rw-r--r--spec/support/gitlab_experiment.rb4
-rw-r--r--spec/support/helpers/board_helpers.rb17
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb38
-rw-r--r--spec/support/helpers/dns_helpers.rb2
-rw-r--r--spec/support/helpers/features/invite_members_modal_helper.rb50
-rw-r--r--spec/support/helpers/features/members_table_helpers.rb4
-rw-r--r--spec/support/helpers/gitaly_setup.rb195
-rw-r--r--spec/support/helpers/graphql_helpers.rb4
-rw-r--r--spec/support/helpers/ldap_helpers.rb2
-rw-r--r--spec/support/helpers/license_helper.rb2
-rw-r--r--spec/support/helpers/login_helpers.rb2
-rw-r--r--spec/support/helpers/migrations_helpers.rb2
-rw-r--r--spec/support/helpers/next_found_instance_of.rb2
-rw-r--r--spec/support/helpers/query_recorder.rb4
-rw-r--r--spec/support/helpers/redis_without_keys.rb4
-rw-r--r--spec/support/helpers/reload_helpers.rb2
-rw-r--r--spec/support/helpers/require_migration.rb4
-rw-r--r--spec/support/helpers/snowplow_helpers.rb12
-rw-r--r--spec/support/helpers/stub_configuration.rb2
-rw-r--r--spec/support/helpers/stub_gitlab_calls.rb2
-rw-r--r--spec/support/helpers/test_env.rb8
-rw-r--r--spec/support/helpers/usage_data_helpers.rb1
-rw-r--r--spec/support/matchers/access_matchers_generic.rb2
-rw-r--r--spec/support/matchers/markdown_matchers.rb2
-rw-r--r--spec/support/matchers/schema_matcher.rb58
-rw-r--r--spec/support/renameable_upload.rb2
-rw-r--r--spec/support/services/issuable_update_service_shared_examples.rb4
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb132
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/project_service_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/services_shared_context.rb9
-rw-r--r--spec/support/shared_contexts/spam_constants.rb1
-rw-r--r--spec/support/shared_examples/alert_notification_service_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/boards/lists/update_service_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/board_sidebar_labels_examples.rb77
-rw-r--r--spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/features/sidebar_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/features/variable_list_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/finders/packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/graphql/mutations/boards/update_list_shared_examples.rb48
-rw-r--r--spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb114
-rw-r--r--spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb63
-rw-r--r--spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/lib/gitlab/jwt_token_shared_examples.rb49
-rw-r--r--spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/chat_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb82
-rw-r--r--spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/concerns/cron_schedulable_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb55
-rw-r--r--spec/support/shared_examples/models/packages/debian/component_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/namespaces/traversal_examples.rb (renamed from spec/support/shared_examples/namespaces/namespace_traversal_examples.rb)17
-rw-r--r--spec/support/shared_examples/policies/project_policy_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb345
-rw-r--r--spec/support/shared_examples/requests/api/graphql/mutations/boards/update_list_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/requests/api/graphql/mutations/destroy_list_shared_examples.rb87
-rw-r--r--spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb129
-rw-r--r--spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/multiple_and_scoped_issue_boards_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb251
-rw-r--r--spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/rack_attack_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/row_lock_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb21
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb161
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb113
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/system_notes_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/services/alert_management_shared_examples.rb136
-rw-r--r--spec/support/shared_examples/services/boards/boards_recent_visit_shared_examples.rb65
-rw-r--r--spec/support/shared_examples/services/boards/create_service_shared_examples.rb25
-rw-r--r--spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/services/common_system_notes_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/destroy_label_links_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/services/issuable_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/merge_request_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb91
-rw-r--r--spec/support/shared_examples/services/updating_mentions_shared_examples.rb2
-rw-r--r--spec/support/stored_repositories.rb2
-rw-r--r--spec/support/stub_languages_translation_percentage.rb19
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb42
-rw-r--r--spec/tasks/gitlab/sidekiq_rake_spec.rb53
-rw-r--r--spec/tooling/danger/changelog_spec.rb220
-rw-r--r--spec/tooling/danger/project_helper_spec.rb6
-rw-r--r--spec/uploaders/object_storage_spec.rb19
-rw-r--r--spec/validators/addressable_url_validator_spec.rb12
-rw-r--r--spec/validators/array_members_validator_spec.rb4
-rw-r--r--spec/validators/devise_email_validator_spec.rb4
-rw-r--r--spec/validators/gitlab/utils/zoom_url_validator_spec.rb2
-rw-r--r--spec/validators/qualified_domain_array_validator_spec.rb8
-rw-r--r--spec/views/admin/application_settings/repository.html.haml_spec.rb32
-rw-r--r--spec/views/devise/shared/_signup_box.html.haml_spec.rb71
-rw-r--r--spec/views/groups/show.html.haml_spec.rb118
-rw-r--r--spec/views/help/index.html.haml_spec.rb9
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb1
-rw-r--r--spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb68
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb1169
-rw-r--r--spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb29
-rw-r--r--spec/views/notify/change_in_merge_request_draft_status_email.html.haml_spec.rb3
-rw-r--r--spec/views/profiles/keys/_form.html.haml_spec.rb2
-rw-r--r--spec/views/projects/pipelines/new.html.haml_spec.rb34
-rw-r--r--spec/views/projects/pipelines/show.html.haml_spec.rb2
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb2
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb6
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb2
-rw-r--r--spec/views/shared/nav/_sidebar.html.haml_spec.rb4
-rw-r--r--spec/views/shared/runners/_runner_details.html.haml_spec.rb (renamed from spec/views/shared/runners/show.html.haml_spec.rb)8
-rw-r--r--spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb1
-rw-r--r--spec/workers/build_hooks_worker_spec.rb18
-rw-r--r--spec/workers/bulk_import_worker_spec.rb3
-rw-r--r--spec/workers/bulk_imports/export_request_worker_spec.rb30
-rw-r--r--spec/workers/bulk_imports/relation_export_worker_spec.rb47
-rw-r--r--spec/workers/ci/create_cross_project_pipeline_worker_spec.rb1
-rw-r--r--spec/workers/ci/delete_unit_tests_worker_spec.rb33
-rw-r--r--spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb2
-rw-r--r--spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb4
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/ci/retry_pipeline_worker_spec.rb51
-rw-r--r--spec/workers/cluster_update_app_worker_spec.rb11
-rw-r--r--spec/workers/concerns/application_worker_spec.rb95
-rw-r--r--spec/workers/concerns/gitlab/github_import/object_importer_spec.rb100
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb48
-rw-r--r--spec/workers/concerns/limited_capacity/worker_spec.rb137
-rw-r--r--spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb496
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb12
-rw-r--r--spec/workers/deployments/hooks_worker_spec.rb53
-rw-r--r--spec/workers/email_receiver_worker_spec.rb21
-rw-r--r--spec/workers/environments/canary_ingress/update_worker_spec.rb1
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb370
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb1
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb26
-rw-r--r--spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_issue_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_note_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb1
-rw-r--r--spec/workers/import_issues_csv_worker_spec.rb1
-rw-r--r--spec/workers/incident_management/add_severity_system_note_worker_spec.rb1
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb1
-rw-r--r--spec/workers/incident_management/process_alert_worker_v2_spec.rb96
-rw-r--r--spec/workers/incident_management/process_prometheus_alert_worker_spec.rb1
-rw-r--r--spec/workers/issuable/label_links_destroy_worker_spec.rb17
-rw-r--r--spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb42
-rw-r--r--spec/workers/issue_placement_worker_spec.rb21
-rw-r--r--spec/workers/issue_rebalancing_worker_spec.rb16
-rw-r--r--spec/workers/jira_connect/sync_project_worker_spec.rb1
-rw-r--r--spec/workers/merge_requests/create_pipeline_worker_spec.rb2
-rw-r--r--spec/workers/merge_worker_spec.rb18
-rw-r--r--spec/workers/namespaces/onboarding_issue_created_worker_spec.rb1
-rw-r--r--spec/workers/packages/composer/cache_update_worker_spec.rb1
-rw-r--r--spec/workers/packages/debian/process_changes_worker_spec.rb113
-rw-r--r--spec/workers/packages/nuget/extraction_worker_spec.rb18
-rw-r--r--spec/workers/packages/rubygems/extraction_worker_spec.rb36
-rw-r--r--spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb1
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb5
-rw-r--r--spec/workers/post_receive_spec.rb24
-rw-r--r--spec/workers/process_commit_worker_spec.rb2
-rw-r--r--spec/workers/project_service_worker_spec.rb2
-rw-r--r--spec/workers/projects/git_garbage_collect_worker_spec.rb1
-rw-r--r--spec/workers/projects/post_creation_worker_spec.rb2
-rw-r--r--spec/workers/prometheus/create_default_alerts_worker_spec.rb1
-rw-r--r--spec/workers/propagate_integration_group_worker_spec.rb1
-rw-r--r--spec/workers/propagate_integration_project_worker_spec.rb1
-rw-r--r--spec/workers/rebase_worker_spec.rb2
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb1
-rw-r--r--spec/workers/service_desk_email_receiver_worker_spec.rb6
-rw-r--r--spec/workers/update_external_pull_requests_worker_spec.rb1
-rw-r--r--spec/workers/update_merge_requests_worker_spec.rb2
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb61
-rw-r--r--spec/workers/users/update_open_issue_count_worker_spec.rb65
-rw-r--r--spec/workers/web_hook_worker_spec.rb18
1733 files changed, 51014 insertions, 18583 deletions
diff --git a/spec/benchmarks/banzai_benchmark.rb b/spec/benchmarks/banzai_benchmark.rb
index 05c41eed889..86f7ee7e90b 100644
--- a/spec/benchmarks/banzai_benchmark.rb
+++ b/spec/benchmarks/banzai_benchmark.rb
@@ -88,12 +88,18 @@ RSpec.describe 'GitLab Markdown Benchmark', :aggregate_failures do
def build_filter_text(pipeline, initial_text)
filter_source = {}
input_text = initial_text
+ result = nil
pipeline.filters.each do |filter_klass|
- filter_source[filter_klass] = input_text
+ # store inputs for current filter_klass
+ filter_source[filter_klass] = { input_text: input_text, input_result: result }
- output = filter_klass.call(input_text, context)
+ filter = filter_klass.new(input_text, context, result)
+ output = filter.call
+
+ # save these for the next filter_klass
input_text = output
+ result = filter.result
end
filter_source
@@ -111,7 +117,12 @@ RSpec.describe 'GitLab Markdown Benchmark', :aggregate_failures do
pipeline.filters.each do |filter_klass|
label = filter_klass.name.demodulize.delete_suffix('Filter').truncate(20)
- x.report(label) { filter_klass.call(filter_source[filter_klass], context) }
+ x.report(label) do
+ filter = filter_klass.new(filter_source[filter_klass][:input_text],
+ context,
+ filter_source[filter_klass][:input_result])
+ filter.call
+ end
end
x.compare!
diff --git a/spec/channels/issues_channel_spec.rb b/spec/channels/issues_channel_spec.rb
deleted file mode 100644
index 4c860402f03..00000000000
--- a/spec/channels/issues_channel_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe IssuesChannel do
- let_it_be(:issue) { create(:issue) }
-
- it 'rejects when project path is invalid' do
- subscribe(project_path: 'invalid_project_path', iid: issue.iid)
-
- expect(subscription).to be_rejected
- end
-
- it 'rejects when iid is invalid' do
- subscribe(project_path: issue.project.full_path, iid: non_existing_record_iid)
-
- expect(subscription).to be_rejected
- end
-
- it 'rejects when the user does not have access' do
- stub_action_cable_connection current_user: nil
-
- subscribe(project_path: issue.project.full_path, iid: issue.iid)
-
- expect(subscription).to be_rejected
- end
-
- it 'subscribes to a stream when the user has access' do
- stub_action_cable_connection current_user: issue.author
-
- subscribe(project_path: issue.project.full_path, iid: issue.iid)
-
- expect(subscription).to be_confirmed
- expect(subscription).to have_stream_for(issue)
- end
-end
diff --git a/spec/config/inject_enterprise_edition_module_spec.rb b/spec/config/inject_enterprise_edition_module_spec.rb
new file mode 100644
index 00000000000..61b40e46001
--- /dev/null
+++ b/spec/config/inject_enterprise_edition_module_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe InjectEnterpriseEditionModule do
+ let(:extension_name) { 'FF' }
+ let(:extension_namespace) { Module.new }
+ let(:fish_name) { 'Fish' }
+ let(:fish_class) { Class.new }
+ let(:fish_extension) { Module.new }
+
+ before do
+ # Make sure we're not relying on which mode we're running under
+ allow(Gitlab).to receive(:extensions).and_return([extension_name.downcase])
+
+ # Test on an imagined extension and imagined class
+ stub_const(fish_name, fish_class) # Fish
+ allow(fish_class).to receive(:name).and_return(fish_name)
+ end
+
+ shared_examples 'expand the extension with' do |method|
+ context 'when extension namespace is set at top-level' do
+ before do
+ stub_const(extension_name, extension_namespace) # FF
+ extension_namespace.const_set(fish_name, fish_extension) # FF::Fish
+ end
+
+ it "calls #{method} with the extension module" do
+ expect(fish_class).to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod_with", fish_name)
+ end
+ end
+
+ context 'when extension namespace is set at another namespace' do
+ let(:another_namespace) { Module.new } # QA
+
+ before do
+ another_namespace.const_set(extension_name, extension_namespace) # QA::FF
+ extension_namespace.const_set(fish_name, fish_extension) # QA::FF::Fish
+ end
+
+ it "calls #{method} with the extension module from the additional namespace" do
+ expect(fish_class).to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod_with", fish_name, namespace: another_namespace)
+ end
+ end
+
+ context 'when extension namespace exists but not the extension' do
+ before do
+ stub_const(extension_name, extension_namespace) # FF
+ end
+
+ it "does not call #{method}" do
+ expect(fish_class).not_to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod_with", fish_name)
+ end
+ end
+
+ context 'when extension namespace does not exist' do
+ it "does not call #{method}" do
+ expect(fish_class).not_to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod_with", fish_name)
+ end
+ end
+ end
+
+ shared_examples 'expand the assumed extension with' do |method|
+ context 'when extension namespace is set at top-level' do
+ before do
+ stub_const(extension_name, extension_namespace) # FF
+ extension_namespace.const_set(fish_name, fish_extension) # FF::Fish
+ end
+
+ it "calls #{method} with the extension module" do
+ expect(fish_class).to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod")
+ end
+ end
+
+ context 'when extension namespace exists but not the extension' do
+ before do
+ stub_const(extension_name, extension_namespace) # FF
+ end
+
+ it "does not call #{method}" do
+ expect(fish_class).not_to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod")
+ end
+ end
+
+ context 'when extension namespace does not exist' do
+ it "does not call #{method}" do
+ expect(fish_class).not_to receive(method).with(fish_extension)
+
+ fish_class.__send__("#{method}_mod")
+ end
+ end
+ end
+
+ describe '#prepend_mod_with' do
+ it_behaves_like 'expand the extension with', :prepend
+ end
+
+ describe '#extend_mod_with' do
+ it_behaves_like 'expand the extension with', :extend
+ end
+
+ describe '#include_mod_with' do
+ it_behaves_like 'expand the extension with', :include
+ end
+
+ describe '#prepend_mod' do
+ it_behaves_like 'expand the assumed extension with', :prepend
+ end
+
+ describe '#extend_mod' do
+ it_behaves_like 'expand the assumed extension with', :extend
+ end
+
+ describe '#include_mod' do
+ it_behaves_like 'expand the assumed extension with', :include
+ end
+end
diff --git a/spec/config/mail_room_spec.rb b/spec/config/mail_room_spec.rb
index 6265b54931a..ce514bd8905 100644
--- a/spec/config/mail_room_spec.rb
+++ b/spec/config/mail_room_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'mail_room.yml' do
status = result.status
raise "Error interpreting #{mailroom_config_path}: #{output}" unless status == 0
- YAML.load(output)
+ YAML.safe_load(output, permitted_classes: [Symbol])
end
before do
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 68b37197ca7..33443509e4a 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -74,6 +74,7 @@ RSpec.describe ObjectStoreSettings do
expect(settings.artifacts['object_store']['proxy_download']).to be false
expect(settings.artifacts['object_store']['remote_directory']).to eq('artifacts')
expect(settings.artifacts['object_store']['consolidated_settings']).to be true
+ expect(settings.artifacts).to eq(settings['artifacts'])
expect(settings.lfs['enabled']).to be true
expect(settings.lfs['object_store']['enabled']).to be true
@@ -83,15 +84,18 @@ RSpec.describe ObjectStoreSettings do
expect(settings.lfs['object_store']['proxy_download']).to be true
expect(settings.lfs['object_store']['remote_directory']).to eq('lfs-objects')
expect(settings.lfs['object_store']['consolidated_settings']).to be true
+ expect(settings.lfs).to eq(settings['lfs'])
expect(settings.pages['enabled']).to be true
expect(settings.pages['object_store']['enabled']).to be true
expect(settings.pages['object_store']['connection']).to eq(connection)
expect(settings.pages['object_store']['remote_directory']).to eq('pages')
expect(settings.pages['object_store']['consolidated_settings']).to be true
+ expect(settings.pages).to eq(settings['pages'])
expect(settings.external_diffs['enabled']).to be false
expect(settings.external_diffs['object_store']).to be_nil
+ expect(settings.external_diffs).to eq(settings['external_diffs'])
end
it 'raises an error when a bucket is missing' do
diff --git a/spec/controllers/admin/cohorts_controller_spec.rb b/spec/controllers/admin/cohorts_controller_spec.rb
index 77a9c8eb223..ba5406f25ab 100644
--- a/spec/controllers/admin/cohorts_controller_spec.rb
+++ b/spec/controllers/admin/cohorts_controller_spec.rb
@@ -12,6 +12,6 @@ RSpec.describe Admin::CohortsController do
it 'redirects to Overview->Users' do
get :index
- expect(response).to redirect_to(admin_users_path(tab: 'cohorts'))
+ expect(response).to redirect_to(cohorts_admin_users_path)
end
end
diff --git a/spec/controllers/admin/dev_ops_report_controller_spec.rb b/spec/controllers/admin/dev_ops_report_controller_spec.rb
index 142db175a15..49e6c0f69bd 100644
--- a/spec/controllers/admin/dev_ops_report_controller_spec.rb
+++ b/spec/controllers/admin/dev_ops_report_controller_spec.rb
@@ -9,12 +9,6 @@ RSpec.describe Admin::DevOpsReportController do
end
end
- describe 'should_track_devops_score?' do
- it 'is always true' do
- expect(controller.should_track_devops_score?).to be_truthy
- end
- end
-
describe 'GET #show' do
context 'as admin' do
let(:user) { create(:admin) }
@@ -31,6 +25,8 @@ RSpec.describe Admin::DevOpsReportController do
it_behaves_like 'tracking unique visits', :show do
let(:target_id) { 'i_analytics_dev_ops_score' }
+
+ let(:request_params) { { tab: 'devops-score' } }
end
end
end
diff --git a/spec/controllers/admin/groups_controller_spec.rb b/spec/controllers/admin/groups_controller_spec.rb
index 8441a52b454..8e31ef12adf 100644
--- a/spec/controllers/admin/groups_controller_spec.rb
+++ b/spec/controllers/admin/groups_controller_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Admin::GroupsController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'Users were successfully added.'
+ expect(controller).to set_flash.to 'Users were successfully added.'
expect(response).to redirect_to(admin_group_path(group))
expect(group.users).to include group_user
end
@@ -67,7 +67,7 @@ RSpec.describe Admin::GroupsController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'Users were successfully added.'
+ expect(controller).to set_flash.to 'Users were successfully added.'
expect(response).to redirect_to(admin_group_path(group))
end
@@ -78,7 +78,7 @@ RSpec.describe Admin::GroupsController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'No users specified.'
+ expect(controller).to set_flash.to 'No users specified.'
expect(response).to redirect_to(admin_group_path(group))
expect(group.users).not_to include group_user
end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index e14619a9916..971f2f121aa 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#edit' do
- Service.available_services_names.each do |integration_name|
+ Integration.available_services_names.each do |integration_name|
context "#{integration_name}" do
it 'successfully displays the template' do
get :edit, params: { id: integration_name }
@@ -27,7 +27,7 @@ RSpec.describe Admin::IntegrationsController do
end
it 'returns 404' do
- get :edit, params: { id: Service.available_services_names.sample }
+ get :edit, params: { id: Integration.available_services_names.sample }
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 45ea8949bf2..3984784f045 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -34,6 +34,17 @@ RSpec.describe Admin::RunnersController do
expect(response.body).to have_content('tag1')
expect(response.body).to have_content('tag2')
end
+
+ it 'paginates runners' do
+ stub_const("Admin::RunnersController::NUMBER_OF_RUNNERS_PER_PAGE", 1)
+
+ create(:ci_runner)
+
+ get :index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:runners).count).to be(1)
+ end
end
describe '#show' do
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index 6faec315eb6..722c9c322cc 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -30,9 +30,33 @@ RSpec.describe Admin::UsersController do
expect(assigns(:users).first.association(:authorized_projects)).to be_loaded
end
- it_behaves_like 'tracking unique visits', :index do
+ context 'pagination' do
+ context 'when number of users is over the pagination limit' do
+ before do
+ stub_const('Admin::UsersController::PAGINATION_WITH_COUNT_LIMIT', 5)
+ allow(Gitlab::Database::Count).to receive(:approximate_counts).with([User]).and_return({ User => 6 })
+ end
+
+ it 'marks the relation for pagination without counts' do
+ get :index
+
+ expect(assigns(:users)).to be_a(Kaminari::PaginatableWithoutCount)
+ end
+ end
+
+ context 'when number of users is below the pagination limit' do
+ it 'marks the relation for pagination with counts' do
+ get :index
+
+ expect(assigns(:users)).not_to be_a(Kaminari::PaginatableWithoutCount)
+ end
+ end
+ end
+ end
+
+ describe 'GET #cohorts' do
+ it_behaves_like 'tracking unique visits', :cohorts do
let(:target_id) { 'i_analytics_cohorts' }
- let(:request_params) { { tab: 'cohorts' } }
end
end
@@ -341,6 +365,56 @@ RSpec.describe Admin::UsersController do
end
end
+ describe 'PUT ban/:id' do
+ context 'when ban_user_feature_flag is enabled' do
+ it 'bans user' do
+ put :ban, params: { id: user.username }
+
+ user.reload
+ expect(user.banned?).to be_truthy
+ expect(flash[:notice]).to eq _('Successfully banned')
+ end
+
+ context 'when unsuccessful' do
+ let(:user) { create(:user, :blocked) }
+
+ it 'does not ban user' do
+ put :ban, params: { id: user.username }
+
+ user.reload
+ expect(user.banned?).to be_falsey
+ expect(flash[:alert]).to eq _('Error occurred. User was not banned')
+ end
+ end
+ end
+
+ context 'when ban_user_feature_flag is not enabled' do
+ before do
+ stub_feature_flags(ban_user_feature_flag: false)
+ end
+
+ it 'does not ban user, renders 404' do
+ put :ban, params: { id: user.username }
+
+ user.reload
+ expect(user.banned?).to be_falsey
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'PUT unban/:id' do
+ let(:banned_user) { create(:user, :banned) }
+
+ it 'unbans user' do
+ put :unban, params: { id: banned_user.username }
+
+ banned_user.reload
+ expect(banned_user.banned?).to be_falsey
+ expect(flash[:notice]).to eq _('Successfully unbanned')
+ end
+ end
+
describe 'PUT unlock/:id' do
before do
request.env["HTTP_REFERER"] = "/"
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 3d34db6c2c0..0235d7eb95a 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -725,7 +725,7 @@ RSpec.describe ApplicationController do
format.csv do
stream_csv_headers('test.csv')
- self.response_body = fixture_file_upload('spec/fixtures/csv_comma.csv')
+ self.response_body = Rack::Test::UploadedFile.new('spec/fixtures/csv_comma.csv')
end
end
end
@@ -1027,4 +1027,44 @@ RSpec.describe ApplicationController do
get :index
end
end
+
+ describe 'setting permissions-policy header' do
+ controller do
+ skip_before_action :authenticate_user!
+
+ def index
+ render html: 'It is a flock of sheep, not a floc of sheep.'
+ end
+ end
+
+ before do
+ routes.draw do
+ get 'index' => 'anonymous#index'
+ end
+ end
+
+ context 'with FloC enabled' do
+ before do
+ stub_application_setting floc_enabled: true
+ end
+
+ it 'does not set the Permissions-Policy header' do
+ get :index
+
+ expect(response.headers['Permissions-Policy']).to eq(nil)
+ end
+ end
+
+ context 'with FloC disabled' do
+ before do
+ stub_application_setting floc_enabled: false
+ end
+
+ it 'sets the Permissions-Policy header' do
+ get :index
+
+ expect(response.headers['Permissions-Policy']).to eq('interest-cohort=()')
+ end
+ end
+ end
end
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index d23f099e382..48000284264 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -49,6 +49,7 @@ RSpec.describe Boards::IssuesController do
create(:labeled_issue, project: project, labels: [development], due_date: Date.tomorrow)
create(:labeled_issue, project: project, labels: [development], assignees: [johndoe])
issue.subscribe(johndoe, project)
+ expect(Issue).to receive(:move_nulls_to_end)
list_issues user: user, board: board, list: list2
@@ -119,6 +120,18 @@ RSpec.describe Boards::IssuesController do
expect(query_count).to eq(1)
end
+
+ context 'when block_issue_repositioning feature flag is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: true)
+ end
+
+ it 'does not reposition issues with null position' do
+ expect(Issue).not_to receive(:move_nulls_to_end)
+
+ list_issues(user: user, board: group_board, list: list3)
+ end
+ end
end
context 'with invalid list id' do
diff --git a/spec/controllers/concerns/confirm_email_warning_spec.rb b/spec/controllers/concerns/confirm_email_warning_spec.rb
index 24ee6fb30d2..334c156e1ae 100644
--- a/spec/controllers/concerns/confirm_email_warning_spec.rb
+++ b/spec/controllers/concerns/confirm_email_warning_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe ConfirmEmailWarning do
RSpec::Matchers.define :set_confirm_warning_for do |email|
match do |response|
- expect(response).to set_flash.now[:warning].to include("Please check your email (#{email}) to verify that you own this address and unlock the power of CI/CD.")
+ expect(controller).to set_flash.now[:warning].to include("Please check your email (#{email}) to verify that you own this address and unlock the power of CI/CD.")
end
end
diff --git a/spec/controllers/concerns/graceful_timeout_handling_spec.rb b/spec/controllers/concerns/graceful_timeout_handling_spec.rb
index cece36f06b2..e496d12856b 100644
--- a/spec/controllers/concerns/graceful_timeout_handling_spec.rb
+++ b/spec/controllers/concerns/graceful_timeout_handling_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GracefulTimeoutHandling, type: :controller do
skip_before_action :authenticate_user!
def index
- raise ActiveRecord::QueryCanceled.new
+ raise ActiveRecord::QueryCanceled
end
end
diff --git a/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb b/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb
index 5c3b6e13ee3..ccd5570f2bd 100644
--- a/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb
+++ b/spec/controllers/concerns/redirects_for_missing_path_on_tree_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe RedirectsForMissingPathOnTree, type: :controller do
get :fake, params: { project_id: project.id, ref: 'theref', file_path: long_file_path }
expect(response).to redirect_to project_tree_path(project, 'theref')
- expect(response.flash[:notice]).to eq(expected_message)
+ expect(controller).to set_flash[:notice].to eq(expected_message)
end
end
end
diff --git a/spec/controllers/concerns/renders_commits_spec.rb b/spec/controllers/concerns/renders_commits_spec.rb
index 7b241fc29af..5c918267f50 100644
--- a/spec/controllers/concerns/renders_commits_spec.rb
+++ b/spec/controllers/concerns/renders_commits_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe RendersCommits do
expect do
subject.prepare_commits_for_rendering(merge_request.commits)
+ merge_request.commits.each(&:latest_pipeline)
end.not_to exceed_all_query_limit(control.count)
end
end
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index 19655687028..b666f73110a 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -5,9 +5,8 @@ require 'spec_helper'
RSpec.describe Groups::GroupMembersController do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
- let(:group) { create(:group, :public) }
- let(:membership) { create(:group_member, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group, reload: true) { create(:group, :public) }
before do
travel_to DateTime.new(2019, 4, 1)
@@ -25,14 +24,22 @@ RSpec.describe Groups::GroupMembersController do
expect(response).to render_template(:index)
end
- context 'user with owner access' do
- let!(:invited) { create_list(:group_member, 3, :invited, group: group) }
+ context 'when user can manage members' do
+ let_it_be(:invited) { create_list(:group_member, 3, :invited, group: group) }
before do
group.add_owner(user)
sign_in(user)
end
+ it 'assigns max_access_for_group' do
+ allow(controller).to receive(:current_user).and_return(user)
+
+ get :index, params: { group_id: group }
+
+ expect(user.max_access_for_group[group.id]).to eq(Gitlab::Access::OWNER)
+ end
+
it 'assigns invited members' do
get :index, params: { group_id: group }
@@ -64,9 +71,22 @@ RSpec.describe Groups::GroupMembersController do
end
end
+ context 'when user cannot manage members' do
+ before do
+ sign_in(user)
+ end
+
+ it 'does not assign invited members or skip_groups', :aggregate_failures do
+ get :index, params: { group_id: group }
+
+ expect(assigns(:invited_members)).to be_nil
+ expect(assigns(:skip_groups)).to be_nil
+ end
+ end
+
context 'when user has owner access to subgroup' do
- let(:nested_group) { create(:group, parent: group) }
- let(:nested_group_user) { create(:user) }
+ let_it_be(:nested_group) { create(:group, parent: group) }
+ let_it_be(:nested_group_user) { create(:user) }
before do
group.add_owner(user)
@@ -95,7 +115,7 @@ RSpec.describe Groups::GroupMembersController do
end
describe 'POST create' do
- let(:group_user) { create(:user) }
+ let_it_be(:group_user) { create(:user) }
before do
sign_in(user)
@@ -130,7 +150,7 @@ RSpec.describe Groups::GroupMembersController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'Users were successfully added.'
+ expect(controller).to set_flash.to 'Users were successfully added.'
expect(response).to redirect_to(group_group_members_path(group))
expect(group.users).to include group_user
end
@@ -142,7 +162,7 @@ RSpec.describe Groups::GroupMembersController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'No users specified.'
+ expect(controller).to set_flash.to 'No users specified.'
expect(response).to redirect_to(group_group_members_path(group))
expect(group.users).not_to include group_user
end
@@ -180,7 +200,7 @@ RSpec.describe Groups::GroupMembersController do
it 'adds user to members' do
subject
- expect(response).to set_flash.to 'Users were successfully added.'
+ expect(controller).to set_flash.to 'Users were successfully added.'
expect(response).to redirect_to(group_group_members_path(group))
expect(group.users).to include group_user
end
@@ -189,7 +209,7 @@ RSpec.describe Groups::GroupMembersController do
end
describe 'PUT update' do
- let(:requester) { create(:group_member, :access_request, group: group) }
+ let_it_be(:requester) { create(:group_member, :access_request, group: group) }
before do
group.add_owner(user)
@@ -292,9 +312,9 @@ RSpec.describe Groups::GroupMembersController do
end
describe 'DELETE destroy' do
- let(:sub_group) { create(:group, parent: group) }
- let!(:member) { create(:group_member, :developer, group: group) }
- let!(:sub_member) { create(:group_member, :developer, group: sub_group, user: member.user) }
+ let_it_be(:sub_group) { create(:group, parent: group) }
+ let_it_be(:member) { create(:group_member, :developer, group: group) }
+ let_it_be(:sub_member) { create(:group_member, :developer, group: sub_group, user: member.user) }
before do
sign_in(user)
@@ -330,7 +350,7 @@ RSpec.describe Groups::GroupMembersController do
it '[HTML] removes user from members' do
delete :destroy, params: { group_id: group, id: member }
- expect(response).to set_flash.to 'User was successfully removed from group.'
+ expect(controller).to set_flash.to 'User was successfully removed from group.'
expect(response).to redirect_to(group_group_members_path(group))
expect(group.members).not_to include member
expect(sub_group.members).to include sub_member
@@ -339,7 +359,7 @@ RSpec.describe Groups::GroupMembersController do
it '[HTML] removes user from members including subgroups and projects' do
delete :destroy, params: { group_id: group, id: member, remove_sub_memberships: true }
- expect(response).to set_flash.to 'User was successfully removed from group and any subgroups and projects.'
+ expect(controller).to set_flash.to 'User was successfully removed from group and any subgroups and projects.'
expect(response).to redirect_to(group_group_members_path(group))
expect(group.members).not_to include member
expect(sub_group.members).not_to include sub_member
@@ -377,7 +397,7 @@ RSpec.describe Groups::GroupMembersController do
it 'removes user from members' do
delete :leave, params: { group_id: group }
- expect(response).to set_flash.to "You left the \"#{group.name}\" group."
+ expect(controller).to set_flash.to "You left the \"#{group.name}\" group."
expect(response).to redirect_to(dashboard_groups_path)
expect(group.users).not_to include user
end
@@ -403,6 +423,8 @@ RSpec.describe Groups::GroupMembersController do
end
context 'and is a requester' do
+ let(:group) { create(:group, :public) }
+
before do
group.request_access(user)
end
@@ -410,7 +432,7 @@ RSpec.describe Groups::GroupMembersController do
it 'removes user from members' do
delete :leave, params: { group_id: group }
- expect(response).to set_flash.to 'Your access request to the group has been withdrawn.'
+ expect(controller).to set_flash.to 'Your access request to the group has been withdrawn.'
expect(response).to redirect_to(group_path(group))
expect(group.requesters).to be_empty
expect(group.users).not_to include user
@@ -427,7 +449,7 @@ RSpec.describe Groups::GroupMembersController do
it 'creates a new GroupMember that is not a team member' do
post :request_access, params: { group_id: group }
- expect(response).to set_flash.to 'Your request for access has been queued for review.'
+ expect(controller).to set_flash.to 'Your request for access has been queued for review.'
expect(response).to redirect_to(group_path(group))
expect(group.requesters.exists?(user_id: user)).to be_truthy
expect(group.users).not_to include user
@@ -435,7 +457,7 @@ RSpec.describe Groups::GroupMembersController do
end
describe 'POST approve_access_request' do
- let(:member) { create(:group_member, :access_request, group: group) }
+ let_it_be(:member) { create(:group_member, :access_request, group: group) }
before do
sign_in(user)
@@ -479,6 +501,8 @@ RSpec.describe Groups::GroupMembersController do
end
context 'with external authorization enabled' do
+ let_it_be(:membership) { create(:group_member, group: group) }
+
before do
enable_external_authorization_service_check
group.add_owner(user)
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index d6da9a4e8d0..2f1c6c813cf 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -302,6 +302,9 @@ RSpec.describe Groups::RunnersController do
context 'when user is not an owner' do
before do
+ # Disable limit checking
+ allow(runner).to receive(:runner_scope).and_return(nil)
+
group.add_maintainer(user)
end
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index 880d5fe8951..f225d798886 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -32,6 +32,17 @@ RSpec.describe Groups::Settings::CiCdController do
expect(response).to render_template(:show)
expect(assigns(:group_runners)).to match_array([runner_group, runner_project_1, runner_project_2, runner_project_3])
end
+
+ it 'paginates runners' do
+ stub_const("Groups::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE", 1)
+
+ create(:ci_runner)
+
+ get :show, params: { group_id: group }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:group_runners).count).to be(1)
+ end
end
context 'when user is not owner' do
@@ -128,7 +139,7 @@ RSpec.describe Groups::Settings::CiCdController do
end
it 'returns a flash alert' do
- expect(response).to set_flash[:alert]
+ expect(controller).to set_flash[:alert]
.to eq("There was a problem updating Auto DevOps pipeline: [\"Error 1\"].")
end
end
@@ -137,7 +148,7 @@ RSpec.describe Groups::Settings::CiCdController do
it 'returns a flash notice' do
subject
- expect(response).to set_flash[:notice]
+ expect(controller).to set_flash[:notice]
.to eq('Auto DevOps pipeline was updated for the group')
end
end
@@ -209,7 +220,7 @@ RSpec.describe Groups::Settings::CiCdController do
end
it 'returns a flash alert' do
- expect(response).to set_flash[:alert]
+ expect(controller).to set_flash[:alert]
.to eq("There was a problem updating the pipeline settings: [\"Error 1\"].")
end
end
@@ -218,7 +229,7 @@ RSpec.describe Groups::Settings::CiCdController do
it 'returns a flash notice' do
subject
- expect(response).to set_flash[:notice]
+ expect(controller).to set_flash[:notice]
.to eq('Pipeline settings was updated for the group')
end
end
diff --git a/spec/controllers/groups/settings/integrations_controller_spec.rb b/spec/controllers/groups/settings/integrations_controller_spec.rb
index 3233e814184..63d99a1fab1 100644
--- a/spec/controllers/groups/settings/integrations_controller_spec.rb
+++ b/spec/controllers/groups/settings/integrations_controller_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Groups::Settings::IntegrationsController do
describe '#edit' do
context 'when user is not owner' do
it 'renders not_found' do
- get :edit, params: { group_id: group, id: Service.available_services_names(include_project_specific: false).sample }
+ get :edit, params: { group_id: group, id: Integration.available_services_names(include_project_specific: false).sample }
expect(response).to have_gitlab_http_status(:not_found)
end
@@ -47,7 +47,7 @@ RSpec.describe Groups::Settings::IntegrationsController do
group.add_owner(user)
end
- Service.available_services_names(include_project_specific: false).each do |integration_name|
+ Integration.available_services_names(include_project_specific: false).each do |integration_name|
context "#{integration_name}" do
it 'successfully displays the template' do
get :edit, params: { group_id: group, id: integration_name }
diff --git a/spec/controllers/groups/settings/repository_controller_spec.rb b/spec/controllers/groups/settings/repository_controller_spec.rb
index 14bbdc05282..cbf55218b94 100644
--- a/spec/controllers/groups/settings/repository_controller_spec.rb
+++ b/spec/controllers/groups/settings/repository_controller_spec.rb
@@ -59,6 +59,8 @@ RSpec.describe Groups::Settings::RepositoryController do
'username' => deploy_token_params[:username],
'expires_at' => Time.zone.parse(deploy_token_params[:expires_at]),
'token' => be_a(String),
+ 'expired' => false,
+ 'revoked' => false,
'scopes' => deploy_token_params.inject([]) do |scopes, kv|
key, value = kv
key.to_s.start_with?('read_') && value.to_i != 0 ? scopes << key.to_s : scopes
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index 5195f482084..6b94d186d5f 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -8,16 +8,18 @@ RSpec.describe InvitesController do
let(:raw_invite_token) { member.raw_invite_token }
let(:project_members) { member.source.users }
let(:md5_member_global_id) { Digest::MD5.hexdigest(member.to_global_id.to_s) }
- let(:params) { { id: raw_invite_token } }
+ let(:extra_params) { {} }
+ let(:params) { { id: raw_invite_token }.merge(extra_params) }
shared_examples 'invalid token' do
context 'when invite token is not valid' do
- let(:params) { { id: '_bogus_token_' } }
+ let(:raw_invite_token) { '_bogus_token_' }
- it 'renders the 404 page' do
+ it 'redirects to root' do
request
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to redirect_to(root_path)
+ expect(controller).to set_flash[:alert].to('The invitation can not be found with the provided invite token.')
end
end
end
@@ -25,6 +27,37 @@ RSpec.describe InvitesController do
describe 'GET #show' do
subject(:request) { get :show, params: params }
+ context 'when it is part of our invite email experiment' do
+ let(:extra_params) { { invite_type: 'initial_email' } }
+
+ it 'tracks the experiment' do
+ experiment = double(track: true)
+ allow(controller).to receive(:experiment).with('members/invite_email', actor: member).and_return(experiment)
+
+ request
+
+ expect(experiment).to have_received(:track).with(:join_clicked)
+ end
+
+ context 'when member does not exist' do
+ let(:raw_invite_token) { '_bogus_token_' }
+
+ it 'does not track the experiment' do
+ expect(controller).not_to receive(:experiment).with('members/invite_email', actor: member)
+
+ request
+ end
+ end
+ end
+
+ context 'when it is not part of our invite email experiment' do
+ it 'does not track via experiment' do
+ expect(controller).not_to receive(:experiment).with('members/invite_email', actor: member)
+
+ request
+ end
+ end
+
context 'when logged in' do
before do
sign_in(user)
@@ -51,32 +84,10 @@ RSpec.describe InvitesController do
end
it_behaves_like 'invalid token'
-
- context 'when invite comes from the initial email invite' do
- let(:params) { { id: raw_invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE } }
-
- it 'tracks via experiment', :aggregate_failures do
- experiment = double(track: true)
- allow(controller).to receive(:experiment).and_return(experiment)
-
- request
-
- expect(experiment).to have_received(:track).with(:opened)
- expect(experiment).to have_received(:track).with(:accepted)
- end
- end
-
- context 'when invite does not come from initial email invite' do
- it 'does not track via experiment' do
- expect(controller).not_to receive(:experiment)
-
- request
- end
- end
end
context 'when not logged in' do
- context 'when inviter is a member' do
+ context 'when invite token belongs to a valid member' do
context 'when instance allows sign up' do
it 'indicates an account can be created in notice' do
request
@@ -116,10 +127,62 @@ RSpec.describe InvitesController do
expect(flash[:notice]).to include('create an account or sign in')
end
- it 'is redirected to a new registration with invite email param' do
+ context 'when it is part of our invite email experiment', :experiment, :aggregate_failures do
+ let(:experience) { :control }
+
+ before do
+ stub_experiments(invite_signup_page_interaction: experience)
+ end
+
+ it 'sets originating_member_id session key' do
+ request
+
+ expect(session[:originating_member_id]).to eq(member.id)
+ end
+
+ context 'with control experience' do
+ it 'is redirected to a new registration with invite email param and flash message' do
+ request
+
+ expect(response).to redirect_to(new_user_registration_path(invite_email: member.invite_email))
+ expect(flash[:notice]).to eq 'To accept this invitation, create an account or sign in.'
+ end
+ end
+
+ context 'with candidate experience' do
+ let(:experience) { :candidate }
+
+ it 'is redirected to a new invite registration with invite email param and no flash message' do
+ request
+
+ expect(response).to redirect_to(new_users_sign_up_invite_path(invite_email: member.invite_email))
+ expect(flash[:notice]).to be_nil
+ end
+ end
+ end
+
+ it 'sets session keys for auto email confirmation on sign up' do
request
- expect(response).to redirect_to(new_user_registration_path(invite_email: member.invite_email))
+ expect(session[:invite_email]).to eq(member.invite_email)
+ end
+
+ context 'when it is part of our invite email experiment' do
+ let(:extra_params) { { invite_type: 'initial_email' } }
+
+ it 'sets session key for invite acceptance tracking on sign-up' do
+ request
+
+ expect(session[:originating_member_id]).to eq(member.id)
+ end
+ end
+
+ context 'when it is not part of our invite email experiment' do
+ it 'does not set the session key for invite acceptance tracking on sign-up' do
+ request
+
+ expect(session[:originating_member_id]).to be_nil
+ end
end
end
end
@@ -157,7 +220,7 @@ RSpec.describe InvitesController do
end
end
- context 'when inviter is not a member' do
+ context 'when invite token does not belong to a valid member' do
let(:params) { { id: '_bogus_token_' } }
it 'is redirected to a new session' do
@@ -177,25 +240,6 @@ RSpec.describe InvitesController do
subject(:request) { post :accept, params: params }
it_behaves_like 'invalid token'
-
- context 'when invite comes from the initial email invite' do
- it 'tracks via experiment' do
- experiment = double(track: true)
- allow(controller).to receive(:experiment).and_return(experiment)
-
- post :accept, params: params, session: { invite_type: Members::InviteEmailExperiment::INVITE_TYPE }
-
- expect(experiment).to have_received(:track).with(:accepted)
- end
- end
-
- context 'when invite does not come from initial email invite' do
- it 'does not track via experiment' do
- expect(controller).not_to receive(:experiment)
-
- request
- end
- end
end
describe 'POST #decline for link in UI' do
diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb
index 21124299b25..5fc5cdfc9b9 100644
--- a/spec/controllers/oauth/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/authorizations_controller_spec.rb
@@ -73,39 +73,74 @@ RSpec.describe Oauth::AuthorizationsController do
include_examples 'OAuth Authorizations require confirmed user'
include_examples "Implicit grant can't be used in confidential application"
- context 'when the user is confirmed' do
- let(:confirmed_at) { 1.hour.ago }
+ context 'rendering of views based on the ownership of the application' do
+ shared_examples 'render views' do
+ render_views
- context 'without valid params' do
- it 'returns 200 code and renders error view' do
- get :new
+ it 'returns 200 and renders view with correct info', :aggregate_failures do
+ subject
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('doorkeeper/authorizations/error')
+ expect(response.body).to include(application.owner.name)
+ expect(response).to render_template('doorkeeper/authorizations/new')
end
end
- context 'with valid params' do
- render_views
+ subject { get :new, params: params }
- it 'returns 200 code and renders view' do
- subject
+ context 'when auth app owner is a user' do
+ context 'with valid params' do
+ it_behaves_like 'render views'
+ end
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('doorkeeper/authorizations/new')
+ context 'when auth app owner is a group' do
+ let(:group) { create(:group) }
+
+ context 'when auth app owner is a root group' do
+ let(:application) { create(:oauth_application, owner_id: group.id, owner_type: 'Namespace') }
+
+ it_behaves_like 'render views'
+ end
+
+ context 'when auth app owner is a subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+ let(:application) { create(:oauth_application, owner_id: subgroup.id, owner_type: 'Namespace') }
+
+ it_behaves_like 'render views'
end
+ end
- it 'deletes session.user_return_to and redirects when skip authorization' do
- application.update!(trusted: true)
- request.session['user_return_to'] = 'http://example.com'
+ context 'when there is no owner associated' do
+ let(:application) { create(:oauth_application, owner_id: nil, owner_type: nil) }
+ it 'renders view' do
subject
- expect(request.session['user_return_to']).to be_nil
- expect(response).to have_gitlab_http_status(:found)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('doorkeeper/authorizations/new')
end
end
end
+
+ context 'without valid params' do
+ it 'returns 200 code and renders error view' do
+ get :new
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('doorkeeper/authorizations/error')
+ end
+ end
+
+ it 'deletes session.user_return_to and redirects when skip authorization' do
+ application.update!(trusted: true)
+ request.session['user_return_to'] = 'http://example.com'
+
+ subject
+
+ expect(request.session['user_return_to']).to be_nil
+ expect(response).to have_gitlab_http_status(:found)
+ end
end
describe 'POST #create' do
diff --git a/spec/controllers/oauth/jira/authorizations_controller_spec.rb b/spec/controllers/oauth/jira/authorizations_controller_spec.rb
index 0b4a691d7ec..f4a335b30f4 100644
--- a/spec/controllers/oauth/jira/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/jira/authorizations_controller_spec.rb
@@ -5,10 +5,20 @@ require 'spec_helper'
RSpec.describe Oauth::Jira::AuthorizationsController do
describe 'GET new' do
it 'redirects to OAuth authorization with correct params' do
- get :new, params: { client_id: 'client-123', redirect_uri: 'http://example.com/' }
+ get :new, params: { client_id: 'client-123', scope: 'foo', redirect_uri: 'http://example.com/' }
expect(response).to redirect_to(oauth_authorization_url(client_id: 'client-123',
response_type: 'code',
+ scope: 'foo',
+ redirect_uri: oauth_jira_callback_url))
+ end
+
+ it 'replaces the GitHub "repo" scope with "api"' do
+ get :new, params: { client_id: 'client-123', scope: 'repo', redirect_uri: 'http://example.com/' }
+
+ expect(response).to redirect_to(oauth_authorization_url(client_id: 'client-123',
+ response_type: 'code',
+ scope: 'api',
redirect_uri: oauth_jira_callback_url))
end
end
diff --git a/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb b/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
new file mode 100644
index 00000000000..3bb841c7c9f
--- /dev/null
+++ b/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ let(:params) { { namespace_id: group, project_id: project, value_stream_id: 'default' } }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET index' do
+ context 'when user is member of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'succeeds' do
+ get :index, params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'exposes the default stages' do
+ get :index, params: params
+
+ expect(json_response['stages'].size).to eq(Gitlab::Analytics::CycleAnalytics::DefaultStages.all.size)
+ end
+
+ context 'when list service fails' do
+ it 'renders 403' do
+ expect_next_instance_of(Analytics::CycleAnalytics::Stages::ListService) do |list_service|
+ expect(list_service).to receive(:allowed?).and_return(false)
+ end
+
+ get :index, params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when invalid value stream id is given' do
+ before do
+ params[:value_stream_id] = 1
+ end
+
+ it 'renders 404' do
+ get :index, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is not member of the project' do
+ it 'renders 404' do
+ get :index, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/analytics/cycle_analytics/value_streams_controller_spec.rb b/spec/controllers/projects/analytics/cycle_analytics/value_streams_controller_spec.rb
new file mode 100644
index 00000000000..5b434eb2011
--- /dev/null
+++ b/spec/controllers/projects/analytics/cycle_analytics/value_streams_controller_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Analytics::CycleAnalytics::ValueStreamsController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ let(:params) { { namespace_id: group, project_id: project } }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET index' do
+ context 'when user is member of the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'succeeds' do
+ get :index, params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'exposes the default value stream' do
+ get :index, params: params
+
+ expect(json_response.first['name']).to eq('default')
+ end
+ end
+
+ context 'when user is not member of the project' do
+ it 'renders 404' do
+ get :index, params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index c9a76049e19..b965feee645 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -7,96 +7,6 @@ RSpec.describe Projects::BlobController do
let(:project) { create(:project, :public, :repository) }
- describe "GET new" do
- context 'with no jobs' do
- let_it_be(:user) { create(:user) }
- let_it_be(:file_name) { '.gitlab-ci.yml' }
-
- def request
- get(:new, params: { namespace_id: project.namespace, project_id: project, id: 'master', file_name: file_name } )
- end
-
- before do
- project.add_maintainer(user)
- sign_in(user)
-
- stub_experiment(ci_syntax_templates_b: experiment_active)
- stub_experiment_for_subject(ci_syntax_templates_b: in_experiment_group)
- end
-
- context 'when the experiment is not active' do
- let(:experiment_active) { false }
- let(:in_experiment_group) { false }
-
- it 'does not record the experiment user' do
- expect(Experiment).not_to receive(:add_user)
-
- request
- end
- end
-
- context 'when the experiment is active' do
- let(:experiment_active) { true }
-
- context 'when the user is in the control group' do
- let(:in_experiment_group) { false }
-
- it 'records the experiment user in the control group' do
- expect(Experiment).to receive(:add_user)
- .with(:ci_syntax_templates_b, :control, user, namespace_id: project.namespace_id)
-
- request
- end
- end
-
- context 'when the user is in the experimental group' do
- let(:in_experiment_group) { true }
-
- it 'records the experiment user in the experimental group' do
- expect(Experiment).to receive(:add_user)
- .with(:ci_syntax_templates_b, :experimental, user, namespace_id: project.namespace_id)
-
- request
- end
-
- context 'when requesting a non default config file type' do
- let(:file_name) { '.non_default_ci_config' }
- let(:project) { create(:project, :public, :repository, ci_config_path: file_name) }
-
- it 'records the experiment user in the experimental group' do
- expect(Experiment).to receive(:add_user)
- .with(:ci_syntax_templates_b, :experimental, user, namespace_id: project.namespace_id)
-
- request
- end
- end
-
- context 'when requesting a different file type' do
- let(:file_name) { '.gitignore' }
-
- it 'does not record the experiment user' do
- expect(Experiment).not_to receive(:add_user)
-
- request
- end
- end
-
- context 'when the group is created longer than 90 days ago' do
- before do
- project.namespace.update_attribute(:created_at, 91.days.ago)
- end
-
- it 'does not record the experiment user' do
- expect(Experiment).not_to receive(:add_user)
-
- request
- end
- end
- end
- end
- end
- end
-
describe "GET show" do
def request
get(:show, params: { namespace_id: project.namespace, project_id: project, id: id })
@@ -554,11 +464,36 @@ RSpec.describe Projects::BlobController do
sign_in(user)
end
- it_behaves_like 'tracking unique hll events' do
- subject(:request) { post :create, params: default_params }
+ subject(:request) { post :create, params: default_params }
+ it_behaves_like 'tracking unique hll events' do
let(:target_id) { 'g_edit_by_sfe' }
let(:expected_type) { instance_of(Integer) }
end
+
+ it 'redirects to blob' do
+ request
+
+ expect(response).to redirect_to(project_blob_path(project, 'master/docs/EXAMPLE_FILE'))
+ end
+
+ context 'when code_quality_walkthrough param is present' do
+ let(:default_params) { super().merge(code_quality_walkthrough: true) }
+
+ it 'redirects to the pipelines page' do
+ request
+
+ expect(response).to redirect_to(project_pipelines_path(project, code_quality_walkthrough: true))
+ end
+
+ it 'creates an "commit_created" experiment tracking event' do
+ experiment = double(track: true)
+ expect(controller).to receive(:experiment).with(:code_quality_walkthrough, namespace: project.root_ancestor).and_return(experiment)
+
+ request
+
+ expect(experiment).to have_received(:track).with(:commit_created)
+ end
+ end
end
end
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index b9c008d2950..17baf38ef32 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe Projects::HooksController do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:user) { project.owner }
before do
- project.add_maintainer(user)
sign_in(user)
end
@@ -20,6 +20,56 @@ RSpec.describe Projects::HooksController do
end
end
+ describe '#edit' do
+ let_it_be(:hook) { create(:project_hook, project: project) }
+
+ let(:params) do
+ { namespace_id: project.namespace, project_id: project, id: hook.id }
+ end
+
+ render_views
+
+ it 'does not error if the hook cannot be found' do
+ get :edit, params: params.merge(id: non_existing_record_id)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'assigns hook_logs' do
+ get :edit, params: params
+
+ expect(assigns[:hook]).to be_present
+ expect(assigns[:hook_logs]).to be_empty
+ it_renders_correctly
+ end
+
+ it 'handles when logs are present' do
+ create_list(:web_hook_log, 3, web_hook: hook)
+
+ get :edit, params: params
+
+ expect(assigns[:hook]).to be_present
+ expect(assigns[:hook_logs].count).to eq 3
+ it_renders_correctly
+ end
+
+ it 'can paginate logs' do
+ create_list(:web_hook_log, 21, web_hook: hook)
+
+ get :edit, params: params.merge(page: 2)
+
+ expect(assigns[:hook]).to be_present
+ expect(assigns[:hook_logs].count).to eq 1
+ it_renders_correctly
+ end
+
+ def it_renders_correctly
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ expect(response).to render_template('projects/hook_logs/_index')
+ end
+ end
+
describe '#create' do
it 'sets all parameters' do
hook_params = {
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 3e016a5e8d2..059e7884d55 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -586,13 +586,15 @@ RSpec.describe Projects::IssuesController do
end
describe 'PUT #update' do
+ let(:issue_params) { { title: 'New title' } }
+
subject do
put :update,
params: {
namespace_id: project.namespace,
project_id: project,
id: issue.to_param,
- issue: { title: 'New title' }
+ issue: issue_params
},
format: :json
end
@@ -614,6 +616,17 @@ RSpec.describe Projects::IssuesController do
expect(issue.reload.title).to eq('New title')
end
+ context 'with issue_type param' do
+ let(:issue_params) { { issue_type: 'incident' } }
+
+ it 'permits the parameter' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(issue.reload.issue_type).to eql('incident')
+ end
+ end
+
context 'when the SpamVerdictService disallows' do
before do
stub_application_setting(recaptcha_enabled: true)
@@ -1704,7 +1717,7 @@ RSpec.describe Projects::IssuesController do
request_csv
expect(response).to redirect_to(project_issues_path(project))
- expect(response.flash[:notice]).to match(/\AYour CSV export has started/i)
+ expect(controller).to set_flash[:notice].to match(/\AYour CSV export has started/i)
end
end
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index 081927ea73c..776ed9774b1 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -201,7 +201,7 @@ RSpec.describe Projects::LabelsController do
context 'service raising InvalidRecord' do
before do
expect_any_instance_of(Labels::PromoteService).to receive(:execute) do |label|
- raise ActiveRecord::RecordInvalid.new(label_1)
+ raise ActiveRecord::RecordInvalid, label_1
end
end
diff --git a/spec/controllers/projects/mattermosts_controller_spec.rb b/spec/controllers/projects/mattermosts_controller_spec.rb
index 001f2564698..10bcee28f71 100644
--- a/spec/controllers/projects/mattermosts_controller_spec.rb
+++ b/spec/controllers/projects/mattermosts_controller_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Projects::MattermostsController do
it 'redirects to the new page' do
subject
- service = project.services.last
+ service = project.integrations.last
expect(subject).to redirect_to(edit_project_service_url(project, service))
end
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 50f8942d9d5..989f941caea 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -180,7 +180,8 @@ RSpec.describe Projects::MergeRequests::DiffsController do
start_version: nil,
start_sha: nil,
commit: nil,
- latest_diff: true
+ latest_diff: true,
+ only_context_commits: false
}
expect_next_instance_of(DiffsMetadataSerializer) do |instance|
@@ -203,7 +204,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end
it "correctly generates the right diff between versions" do
- MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+ MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author).execute(merge_request)
expect_next_instance_of(CompareService) do |service|
expect(service).to receive(:execute).with(
@@ -261,7 +262,8 @@ RSpec.describe Projects::MergeRequests::DiffsController do
start_version: nil,
start_sha: nil,
commit: nil,
- latest_diff: true
+ latest_diff: true,
+ only_context_commits: false
}
expect_next_instance_of(DiffsMetadataSerializer) do |instance|
@@ -290,7 +292,8 @@ RSpec.describe Projects::MergeRequests::DiffsController do
start_version: nil,
start_sha: nil,
commit: merge_request.diff_head_commit,
- latest_diff: nil
+ latest_diff: nil,
+ only_context_commits: false
}
expect_next_instance_of(DiffsMetadataSerializer) do |instance|
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 337a4a19b2e..d4c52e1c7ca 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -531,7 +531,7 @@ RSpec.describe Projects::MergeRequestsController do
sha: merge_request.diff_head_sha,
merge_request: merge_request }
- expect_next_instance_of(MergeRequests::SquashService, project, user, expected_squash_params) do |squash_service|
+ expect_next_instance_of(MergeRequests::SquashService, project: project, current_user: user, params: expected_squash_params) do |squash_service|
expect(squash_service).to receive(:execute).and_return({
status: :success,
squash_sha: SecureRandom.hex(20)
@@ -1831,7 +1831,7 @@ RSpec.describe Projects::MergeRequestsController do
it 'calls MergeRequests::AssignIssuesService' do
expect(MergeRequests::AssignIssuesService).to receive(:new)
- .with(project, user, merge_request: merge_request)
+ .with(project: project, current_user: user, params: { merge_request: merge_request })
.and_return(double(execute: { count: 1 }))
post_assign_issues
@@ -2229,7 +2229,7 @@ RSpec.describe Projects::MergeRequestsController do
subject
expect(response).to redirect_to(project_merge_requests_path(project))
- expect(response.flash[:notice]).to match(/\AYour CSV export has started/i)
+ expect(controller).to set_flash[:notice].to match(/\AYour CSV export has started/i)
end
it 'enqueues an IssuableExportCsvWorker worker' do
diff --git a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
index 923581d9367..939366e5b0b 100644
--- a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
+++ b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Projects::PerformanceMonitoring::DashboardsController do
post :create, params: params
expect(response).to have_gitlab_http_status :created
- expect(response).to set_flash[:notice].to eq("Your dashboard has been copied. You can <a href=\"/-/ide/project/#{namespace.path}/#{project.name}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
+ expect(controller).to set_flash[:notice].to eq("Your dashboard has been copied. You can <a href=\"/-/ide/project/#{namespace.path}/#{project.name}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
expect(json_response).to eq('status' => 'success', 'dashboard' => { 'path' => ".gitlab/dashboards/#{file_name}" })
end
@@ -203,7 +203,7 @@ RSpec.describe Projects::PerformanceMonitoring::DashboardsController do
put :update, params: params
expect(response).to have_gitlab_http_status :created
- expect(response).to set_flash[:notice].to eq("Your dashboard has been updated. You can <a href=\"/-/ide/project/#{namespace.path}/#{project.name}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
+ expect(controller).to set_flash[:notice].to eq("Your dashboard has been updated. You can <a href=\"/-/ide/project/#{namespace.path}/#{project.name}/edit/#{branch_name}/-/.gitlab/dashboards/#{file_name}\">edit it here</a>.")
expect(json_response).to eq('status' => 'success', 'dashboard' => { 'default' => false, 'display_name' => "custom_dashboard.yml", 'path' => ".gitlab/dashboards/#{file_name}", 'system_dashboard' => false })
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 4a1d01f0e82..0e6b5e84d85 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -288,6 +288,17 @@ RSpec.describe Projects::PipelinesController do
get :index, params: { namespace_id: project.namespace, project_id: project }
end
end
+
+ context 'code_quality_walkthrough experiment' do
+ it 'tracks the view', :experiment do
+ expect(experiment(:code_quality_walkthrough))
+ .to track(:view, property: project.root_ancestor.id.to_s)
+ .with_context(namespace: project.root_ancestor)
+ .on_next_instance
+
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+ end
+ end
end
describe 'GET #show' do
@@ -842,10 +853,7 @@ RSpec.describe Projects::PipelinesController do
end
describe 'POST retry.json' do
- let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
- let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
-
- before do
+ subject(:post_retry) do
post :retry, params: {
namespace_id: project.namespace,
project_id: project,
@@ -854,15 +862,41 @@ RSpec.describe Projects::PipelinesController do
format: :json
end
- it 'retries a pipeline without returning any content' do
+ let!(:pipeline) { create(:ci_pipeline, :failed, project: project) }
+ let!(:build) { create(:ci_build, :failed, pipeline: pipeline) }
+
+ let(:worker_spy) { class_spy(::Ci::RetryPipelineWorker) }
+
+ before do
+ stub_const('::Ci::RetryPipelineWorker', worker_spy)
+ end
+
+ it 'retries a pipeline in the background without returning any content' do
+ post_retry
+
expect(response).to have_gitlab_http_status(:no_content)
- expect(build.reload).to be_retried
+ expect(::Ci::RetryPipelineWorker).to have_received(:perform_async).with(pipeline.id, user.id)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(background_pipeline_retry_endpoint: false)
+ end
+
+ it 'retries the pipeline without returning any content' do
+ post_retry
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(build.reload).to be_retried
+ end
end
context 'when builds are disabled' do
let(:feature) { ProjectFeature::DISABLED }
it 'fails to retry pipeline' do
+ post_retry
+
expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -976,49 +1010,26 @@ RSpec.describe Projects::PipelinesController do
end
end
- context 'when junit_pipeline_screenshots_view is enabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: project)
- end
-
- context 'when test_report contains attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
+ context 'when test_report contains attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
- it 'returns a test reports with attachment' do
- get_test_report_json(scope: 'with_attachment')
+ it 'returns a test reports with attachment' do
+ get_test_report_json(scope: 'with_attachment')
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"]).to be_present
- expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
- end
- end
-
- context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
-
- it 'returns a test reports with empty values' do
- get_test_report_json(scope: 'with_attachment')
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"]).to be_empty
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"]).to be_present
+ expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
end
end
- context 'when junit_pipeline_screenshots_view is disabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: false)
- end
-
- context 'when test_report contains attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
+ context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
- it 'returns a test reports without attachment_url' do
- get_test_report_json(scope: 'with_attachment')
+ it 'returns a test reports with empty values' do
+ get_test_report_json(scope: 'with_attachment')
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"].first["test_cases"].first).not_to include("attachment_url")
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"]).to be_empty
end
end
@@ -1280,4 +1291,59 @@ RSpec.describe Projects::PipelinesController do
format: :json
end
end
+
+ describe 'GET downloadable_artifacts.json' do
+ context 'when pipeline is empty' do
+ let(:pipeline) { create(:ci_empty_pipeline) }
+
+ it 'returns status not_found' do
+ get_downloadable_artifacts_json
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when pipeline exists' do
+ context 'when pipeline does not have any downloadable artifacts' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'returns an empty array' do
+ get_downloadable_artifacts_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['artifacts']).to be_empty
+ end
+ end
+
+ context 'when pipeline has downloadable artifacts' do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
+
+ before do
+ create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
+ create(:ci_job_artifact, :junit, job: build)
+ end
+ end
+
+ it 'returns an array of artifacts' do
+ get_downloadable_artifacts_json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['artifacts']).to be_kind_of(Array)
+ expect(json_response['artifacts'].size).to eq(2)
+ end
+ end
+ end
+
+ private
+
+ def get_downloadable_artifacts_json
+ get :downloadable_artifacts,
+ params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ id: pipeline.id
+ },
+ format: :json
+ end
+ end
end
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 46a0fc8edb0..bb817fc94b2 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -199,7 +199,7 @@ RSpec.describe Projects::ProjectMembersController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'Users were successfully added.'
+ expect(controller).to set_flash.to 'Users were successfully added.'
expect(response).to redirect_to(project_project_members_path(project))
end
@@ -215,7 +215,7 @@ RSpec.describe Projects::ProjectMembersController do
access_level: Gitlab::Access::GUEST
}
- expect(response).to set_flash.to 'Message'
+ expect(controller).to set_flash.to 'Message'
expect(response).to redirect_to(project_project_members_path(project))
end
end
@@ -276,7 +276,7 @@ RSpec.describe Projects::ProjectMembersController do
it 'adds user to members' do
subject
- expect(response).to set_flash.to 'Users were successfully added.'
+ expect(controller).to set_flash.to 'Users were successfully added.'
expect(response).to redirect_to(project_project_members_path(project))
expect(project.users).to include project_user
end
@@ -489,7 +489,7 @@ RSpec.describe Projects::ProjectMembersController do
project_id: project
}
- expect(response).to set_flash.to "You left the \"#{project.human_name}\" project."
+ expect(controller).to set_flash.to "You left the \"#{project.human_name}\" project."
expect(response).to redirect_to(dashboard_projects_path)
expect(project.users).not_to include user
end
@@ -523,7 +523,7 @@ RSpec.describe Projects::ProjectMembersController do
project_id: project
}
- expect(response).to set_flash.to 'Your access request to the project has been withdrawn.'
+ expect(controller).to set_flash.to 'Your access request to the project has been withdrawn.'
expect(response).to redirect_to(project_path(project))
expect(project.requesters).to be_empty
expect(project.users).not_to include user
@@ -543,7 +543,7 @@ RSpec.describe Projects::ProjectMembersController do
project_id: project
}
- expect(response).to set_flash.to 'Your request for access has been queued for review.'
+ expect(controller).to set_flash.to 'Your request for access has been queued for review.'
expect(response).to redirect_to(
project_path(project)
)
@@ -639,7 +639,7 @@ RSpec.describe Projects::ProjectMembersController do
it 'imports source project members' do
expect(project.team_members).to include member
- expect(response).to set_flash.to 'Successfully imported'
+ expect(controller).to set_flash.to 'Successfully imported'
expect(response).to redirect_to(
project_project_members_path(project)
)
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 3021ad42c9f..39b45a7133c 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -78,84 +78,40 @@ RSpec.describe Projects::RunnersController do
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
- context 'without feature flag' do
- before do
- stub_feature_flags(vueify_shared_runners_toggle: false)
- end
+ it 'toggles shared_runners_enabled when the group allows shared runners' do
+ project.update!(shared_runners_enabled: true)
- it 'toggles shared_runners_enabled when the group allows shared runners' do
- project.update!(shared_runners_enabled: true)
+ post :toggle_shared_runners, params: params
- post :toggle_shared_runners, params: params
+ project.reload
- project.reload
-
- expect(response).to have_gitlab_http_status(:found)
- expect(project.shared_runners_enabled).to eq(false)
- end
-
- it 'toggles shared_runners_enabled when the group disallows shared runners but allows overrides' do
- group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true)
- project.update!(shared_runners_enabled: false)
-
- post :toggle_shared_runners, params: params
-
- project.reload
-
- expect(response).to have_gitlab_http_status(:found)
- expect(project.shared_runners_enabled).to eq(true)
- end
-
- it 'does not enable if the group disallows shared runners' do
- group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false)
- project.update!(shared_runners_enabled: false)
-
- post :toggle_shared_runners, params: params
-
- project.reload
-
- expect(response).to have_gitlab_http_status(:found)
- expect(project.shared_runners_enabled).to eq(false)
- expect(flash[:alert]).to eq('Cannot enable shared runners because parent group does not allow it')
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project.shared_runners_enabled).to eq(false)
end
- context 'with feature flag: vueify_shared_runners_toggle' do
- it 'toggles shared_runners_enabled when the group allows shared runners' do
- project.update!(shared_runners_enabled: true)
-
- post :toggle_shared_runners, params: params
-
- project.reload
+ it 'toggles shared_runners_enabled when the group disallows shared runners but allows overrides' do
+ group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true)
+ project.update!(shared_runners_enabled: false)
- expect(response).to have_gitlab_http_status(:ok)
- expect(project.shared_runners_enabled).to eq(false)
- end
+ post :toggle_shared_runners, params: params
- it 'toggles shared_runners_enabled when the group disallows shared runners but allows overrides' do
- group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true)
- project.update!(shared_runners_enabled: false)
+ project.reload
- post :toggle_shared_runners, params: params
-
- project.reload
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(project.shared_runners_enabled).to eq(true)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project.shared_runners_enabled).to eq(true)
+ end
- it 'does not enable if the group disallows shared runners' do
- group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false)
- project.update!(shared_runners_enabled: false)
+ it 'does not enable if the group disallows shared runners' do
+ group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false)
+ project.update!(shared_runners_enabled: false)
- post :toggle_shared_runners, params: params
+ post :toggle_shared_runners, params: params
- project.reload
+ project.reload
- expect(response).to have_gitlab_http_status(:unauthorized)
- expect(project.shared_runners_enabled).to eq(false)
- expect(json_response['error']).to eq('Cannot enable shared runners because parent group does not allow it')
- end
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(project.shared_runners_enabled).to eq(false)
+ expect(json_response['error']).to eq('Cannot enable shared runners because parent group does not allow it')
end
end
end
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 488a34b74df..d8fb3b226ed 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Projects::ServicesController do
describe '#test' do
context 'when can_test? returns false' do
it 'renders 404' do
- allow_any_instance_of(Service).to receive(:can_test?).and_return(false)
+ allow_any_instance_of(Integration).to receive(:can_test?).and_return(false)
put :test, params: project_params
@@ -271,7 +271,7 @@ RSpec.describe Projects::ServicesController do
expect(response).to redirect_to(edit_project_service_path(project, service))
expected_alert = "You can now manage your Prometheus settings on the <a href=\"#{project_settings_operations_path(project)}\">Operations</a> page. Fields on this page has been deprecated."
- expect(response).to set_flash.now[:alert].to(expected_alert)
+ expect(controller).to set_flash.now[:alert].to(expected_alert)
end
it 'does not modify service' do
@@ -317,7 +317,7 @@ RSpec.describe Projects::ServicesController do
it 'renders deprecation warning notice' do
expected_alert = "You can now manage your Prometheus settings on the <a href=\"#{project_settings_operations_path(project)}\">Operations</a> page. Fields on this page has been deprecated."
- expect(response).to set_flash.now[:alert].to(expected_alert)
+ expect(controller).to set_flash.now[:alert].to(expected_alert)
end
end
@@ -328,7 +328,7 @@ RSpec.describe Projects::ServicesController do
end
it 'does not render deprecation warning notice' do
- expect(response).not_to set_flash.now[:alert]
+ expect(controller).not_to set_flash.now[:alert]
end
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index d953249c139..dc7066f6b61 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -14,6 +14,10 @@ RSpec.describe Projects::Settings::CiCdController do
end
describe 'GET show' do
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:other_project) { create(:project, group: group) }
+
it 'renders show with 200 status code' do
get :show, params: { namespace_id: project.namespace, project_id: project }
@@ -22,12 +26,9 @@ RSpec.describe Projects::Settings::CiCdController do
end
context 'with group runners' do
- let(:parent_group) { create(:group) }
- let(:group) { create(:group, parent: parent_group) }
- let(:group_runner) { create(:ci_runner, :group, groups: [group]) }
- let(:other_project) { create(:project, group: group) }
- let!(:project_runner) { create(:ci_runner, :project, projects: [other_project]) }
- let!(:shared_runner) { create(:ci_runner, :instance) }
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group]) }
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [other_project]) }
+ let_it_be(:shared_runner) { create(:ci_runner, :instance) }
it 'sets assignable project runners only' do
group.add_maintainer(user)
@@ -37,6 +38,33 @@ RSpec.describe Projects::Settings::CiCdController do
expect(assigns(:assignable_runners)).to contain_exactly(project_runner)
end
end
+
+ context 'prevents N+1 queries for tags' do
+ render_views
+
+ def show
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+ end
+
+ it 'has the same number of queries with one tag or with many tags', :request_store do
+ group.add_maintainer(user)
+
+ show # warmup
+
+ # with one tag
+ create(:ci_runner, :instance, tag_list: %w(shared_runner))
+ create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner))
+ create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner))
+ control = ActiveRecord::QueryRecorder.new { show }
+
+ # with several tags
+ create(:ci_runner, :instance, tag_list: %w(shared_runner tag2 tag3))
+ create(:ci_runner, :project, projects: [other_project], tag_list: %w(project_runner tag2 tag3))
+ create(:ci_runner, :group, groups: [group], tag_list: %w(group_runner tag2 tag3))
+
+ expect { show }.not_to exceed_query_limit(control)
+ end
+ end
end
describe '#reset_cache' do
@@ -134,7 +162,9 @@ RSpec.describe Projects::Settings::CiCdController do
context 'when the project repository is empty' do
it 'sets a notice flash' do
- expect(subject).to set_flash[:notice]
+ subject
+
+ expect(controller).to set_flash[:notice]
end
it 'does not queue a CreatePipelineWorker' do
@@ -150,7 +180,9 @@ RSpec.describe Projects::Settings::CiCdController do
it 'displays a toast message' do
allow(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args)
- expect(subject).to set_flash[:toast]
+ subject
+
+ expect(controller).to set_flash[:toast]
end
it 'queues a CreatePipelineWorker' do
@@ -211,7 +243,9 @@ RSpec.describe Projects::Settings::CiCdController do
let(:params) { { build_timeout_human_readable: '5m' } }
it 'set specified timeout' do
- expect(subject).to set_flash[:alert]
+ subject
+
+ expect(controller).to set_flash[:alert]
expect(response).to redirect_to(namespace_project_settings_ci_cd_path)
end
end
diff --git a/spec/controllers/projects/settings/repository_controller_spec.rb b/spec/controllers/projects/settings/repository_controller_spec.rb
index 394f1ff28f2..2bb93990c58 100644
--- a/spec/controllers/projects/settings/repository_controller_spec.rb
+++ b/spec/controllers/projects/settings/repository_controller_spec.rb
@@ -78,6 +78,8 @@ RSpec.describe Projects::Settings::RepositoryController do
'username' => deploy_token_params[:username],
'expires_at' => Time.zone.parse(deploy_token_params[:expires_at]),
'token' => be_a(String),
+ 'expired' => false,
+ 'revoked' => false,
'scopes' => deploy_token_params.inject([]) do |scopes, kv|
key, value = kv
key.to_s.start_with?('read_') && value.to_i != 0 ? scopes << key.to_s : scopes
diff --git a/spec/controllers/projects/static_site_editor_controller_spec.rb b/spec/controllers/projects/static_site_editor_controller_spec.rb
index 73b0e3bba69..26161b5fb5c 100644
--- a/spec/controllers/projects/static_site_editor_controller_spec.rb
+++ b/spec/controllers/projects/static_site_editor_controller_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Projects::StaticSiteEditorController do
it 'redirects to project page and flashes error message' do
expect(response).to redirect_to(project_path(project))
- expect(response).to set_flash[:alert].to('invalid')
+ expect(controller).to set_flash[:alert].to('invalid')
end
end
diff --git a/spec/controllers/registrations/experience_levels_controller_spec.rb b/spec/controllers/registrations/experience_levels_controller_spec.rb
index 79fa3f1474a..6b8ab3ec715 100644
--- a/spec/controllers/registrations/experience_levels_controller_spec.rb
+++ b/spec/controllers/registrations/experience_levels_controller_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Registrations::ExperienceLevelsController do
let(:learn_gitlab_available?) { true }
before do
- allow_next_instance_of(LearnGitlab) do |learn_gitlab|
+ allow_next_instance_of(LearnGitlab::Project) do |learn_gitlab|
allow(learn_gitlab).to receive(:available?).and_return(learn_gitlab_available?)
allow(learn_gitlab).to receive(:project).and_return(project)
allow(learn_gitlab).to receive(:board).and_return(issues_board)
@@ -136,7 +136,7 @@ RSpec.describe Registrations::ExperienceLevelsController do
let(:params) { super().merge(experience_level: :novice) }
before do
- allow_next(LearnGitlab).to receive(:available?).and_return(false)
+ allow_next(LearnGitlab::Project).to receive(:available?).and_return(false)
end
it 'does not add a BoardLabel' do
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index 008259a8bfa..6d34b56df09 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -77,6 +77,30 @@ RSpec.describe Registrations::WelcomeController do
it { is_expected.to redirect_to(dashboard_projects_path)}
+ context 'when the new user already has any accepted group membership' do
+ let!(:member1) { create(:group_member, user: user) }
+
+ it 'redirects to the group activity page' do
+ expect(subject).to redirect_to(activity_group_path(member1.source))
+ end
+
+ context 'when the new user already has more than 1 accepted group membership' do
+ it 'redirects to the most recent membership group activty page' do
+ member2 = create(:group_member, user: user)
+
+ expect(subject).to redirect_to(activity_group_path(member2.source))
+ end
+ end
+
+ context 'when the member has an orphaned source at the time of the welcome' do
+ it 'redirects to the project dashboard page' do
+ member1.source.delete
+
+ expect(subject).to redirect_to(dashboard_projects_path)
+ end
+ end
+ end
+
context 'when the user opted in' do
let(:email_opted_in) { '1' }
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index aac7c10d878..ff73c0aafe8 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -20,10 +20,15 @@ RSpec.describe RegistrationsController do
end
describe '#create' do
- let(:base_user_params) { { first_name: 'first', last_name: 'last', username: 'new_username', email: 'new@user.com', password: 'Any_password' } }
- let(:user_params) { { user: base_user_params } }
+ let_it_be(:base_user_params) do
+ { first_name: 'first', last_name: 'last', username: 'new_username', email: 'new@user.com', password: 'Any_password' }
+ end
+
+ let_it_be(:user_params) { { user: base_user_params } }
- subject { post(:create, params: user_params) }
+ let(:session_params) { {} }
+
+ subject { post(:create, params: user_params, session: session_params) }
context '`blocked_pending_approval` state' do
context 'when the `require_admin_approval_after_user_signup` setting is turned on' do
@@ -148,6 +153,90 @@ RSpec.describe RegistrationsController do
expect { subject }.to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
expect(controller.current_user).to be_nil
end
+
+ context 'when registration is triggered from an accepted invite' do
+ context 'when it is part of our invite email experiment', :experiment do
+ let_it_be(:member) { create(:project_member, :invited, invite_email: user_params.dig(:user, :email)) }
+
+ let(:originating_member_id) { member.id }
+ let(:session_params) do
+ {
+ invite_email: user_params.dig(:user, :email),
+ originating_member_id: originating_member_id
+ }
+ end
+
+ context 'when member exists from the session key value' do
+ it 'tracks the experiment' do
+ expect(experiment('members/invite_email')).to track(:accepted)
+ .with_context(actor: member)
+ .on_next_instance
+
+ subject
+ end
+ end
+
+ context 'when member does not exist from the session key value' do
+ let(:originating_member_id) { -1 }
+
+ it 'tracks the experiment' do
+ expect(experiment('members/invite_email')).not_to track(:accepted)
+
+ subject
+ end
+ end
+ end
+
+ context 'when it is part of our invite_signup_page_interaction experiment', :experiment do
+ let_it_be(:member) { create(:project_member, :invited, invite_email: user_params.dig(:user, :email)) }
+
+ let(:originating_member_id) { member.id }
+ let(:session_params) do
+ {
+ invite_email: user_params.dig(:user, :email),
+ originating_member_id: originating_member_id
+ }
+ end
+
+ context 'when member exists from the session key value' do
+ it 'tracks the experiment' do
+ expect(experiment(:invite_signup_page_interaction)).to track(:form_submission)
+ .with_context(actor: member)
+ .on_next_instance
+
+ subject
+ end
+ end
+
+ context 'when member does not exist from the session key value' do
+ let(:originating_member_id) { -1 }
+
+ it 'tracks the experiment' do
+ expect(experiment(:invite_signup_page_interaction)).not_to track(:form_submission)
+
+ subject
+ end
+ end
+ end
+
+ context 'when invite email matches email used on registration' do
+ let(:session_params) { { invite_email: user_params.dig(:user, :email) } }
+
+ it 'signs the user in without sending a confirmation email', :aggregate_failures do
+ expect { subject }.not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).to be_confirmed
+ end
+ end
+
+ context 'when invite email does not match the email used on registration' do
+ let(:session_params) { { invite_email: 'bogus@email.com' } }
+
+ it 'does not authenticate the user and sends a confirmation email', :aggregate_failures do
+ expect { subject }.to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).to be_nil
+ end
+ end
+ end
end
context 'when soft email confirmation is enabled' do
@@ -161,6 +250,24 @@ RSpec.describe RegistrationsController do
expect(controller.current_user).to be_present
expect(response).to redirect_to(users_sign_up_welcome_path)
end
+
+ context 'when invite email matches email used on registration' do
+ let(:session_params) { { invite_email: user_params.dig(:user, :email) } }
+
+ it 'signs the user in without sending a confirmation email', :aggregate_failures do
+ expect { subject }.not_to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).to be_confirmed
+ end
+ end
+
+ context 'when invite email does not match the email used on registration' do
+ let(:session_params) { { invite_email: 'bogus@email.com' } }
+
+ it 'authenticates the user and sends a confirmation email without confirming', :aggregate_failures do
+ expect { subject }.to have_enqueued_mail(DeviseMailer, :confirmation_instructions)
+ expect(controller.current_user).not_to be_confirmed
+ end
+ end
end
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index abdafa2880a..c233e5b7c15 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -85,8 +85,7 @@ RSpec.describe SessionsController do
it 'does not authenticate user' do
post(:create, params: { user: { login: 'invalid', password: 'invalid' } })
- expect(response)
- .to set_flash.now[:alert].to(/Invalid login or password/)
+ expect(controller).to set_flash.now[:alert].to(/Invalid login or password/)
end
end
@@ -348,7 +347,7 @@ RSpec.describe SessionsController do
otp_user_id: user.id
)
- expect(response).to set_flash.now[:alert].to(/Invalid login or password/)
+ expect(controller).to set_flash.now[:alert].to(/Invalid login or password/)
end
end
@@ -396,7 +395,7 @@ RSpec.describe SessionsController do
end
it 'warns about invalid OTP code' do
- expect(response).to set_flash.now[:alert]
+ expect(controller).to set_flash.now[:alert]
.to(/Invalid two-factor code/)
end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 908d5741709..4d10b979c69 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require Rails.root.join('ee', 'spec', 'db', 'schema_support') if Gitlab.ee?
RSpec.describe 'Database schema' do
- prepend_if_ee('EE::DB::SchemaSupport')
+ prepend_mod_with('DB::SchemaSupport')
let(:connection) { ActiveRecord::Base.connection }
let(:tables) { connection.tables }
diff --git a/spec/deprecation_toolkit_env.rb b/spec/deprecation_toolkit_env.rb
index fb70a8844a1..57d32b5423c 100644
--- a/spec/deprecation_toolkit_env.rb
+++ b/spec/deprecation_toolkit_env.rb
@@ -57,10 +57,9 @@ module DeprecationToolkitEnv
%w[
activerecord-6.0.3.6/lib/active_record/migration.rb
activesupport-6.0.3.6/lib/active_support/cache.rb
- carrierwave-1.3.1/lib/carrierwave/sanitized_file.rb
activerecord-6.0.3.6/lib/active_record/relation.rb
- selenium-webdriver-3.142.7/lib/selenium/webdriver/firefox/driver.rb
asciidoctor-2.0.12/lib/asciidoctor/extensions.rb
+ attr_encrypted-3.1.0/lib/attr_encrypted/adapters/active_record.rb
]
end
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 424a3af20a3..2ff16604c33 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -25,6 +25,12 @@ RSpec.describe ApplicationExperiment, :experiment do
described_class.new('namespaced/stub')
end
+ it "doesn't raise an exception without a defined control" do
+ # because we have a default behavior defined
+
+ expect { experiment('namespaced/stub') { } }.not_to raise_error
+ end
+
describe "enabled" do
before do
allow(subject).to receive(:enabled?).and_call_original
diff --git a/spec/experiments/concerns/project_commit_count_spec.rb b/spec/experiments/concerns/project_commit_count_spec.rb
new file mode 100644
index 00000000000..5616f167cb4
--- /dev/null
+++ b/spec/experiments/concerns/project_commit_count_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectCommitCount do
+ let(:klass) { Class.include(ProjectCommitCount) }
+ let(:instance) { klass.new }
+
+ describe '#commit_count_for' do
+ subject { instance.commit_count_for(project, default_count: 42, caller_info: :identifiable) }
+
+ let(:project) { create(:project, :repository) }
+
+ context 'when a root_ref exists' do
+ it 'returns commit count from GitlayClient' do
+ allow(Gitlab::GitalyClient).to receive(:call).and_call_original
+ allow(Gitlab::GitalyClient).to receive(:call).with(anything, :commit_service, :count_commits, anything, anything)
+ .and_return(double(count: 4))
+
+ expect(subject).to eq(4)
+ end
+ end
+
+ context 'when a root_ref does not exist' do
+ let(:project) { create(:project, :empty_repo) }
+
+ it 'returns the default_count' do
+ expect(subject).to eq(42)
+ end
+ end
+
+ it "handles exceptions by logging them with exception_details and returns the default_count" do
+ allow(Gitlab::GitalyClient).to receive(:call).and_call_original
+ allow(Gitlab::GitalyClient).to receive(:call).with(anything, :commit_service, :count_commits, anything, anything).and_raise(e = StandardError.new('_message_'))
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, caller_info: :identifiable)
+
+ expect(subject).to eq(42)
+ end
+ end
+end
diff --git a/spec/experiments/empty_repo_upload_experiment_spec.rb b/spec/experiments/empty_repo_upload_experiment_spec.rb
new file mode 100644
index 00000000000..10cbedbe8ba
--- /dev/null
+++ b/spec/experiments/empty_repo_upload_experiment_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe EmptyRepoUploadExperiment, :experiment do
+ subject { described_class.new(project: project) }
+
+ let(:project) { create(:project, :repository) }
+
+ describe '#track_initial_write' do
+ context 'when experiment is turned on' do
+ before do
+ stub_experiments(empty_repo_upload: :control)
+ end
+
+ it "tracks an event for the first commit on a project" do
+ expect(subject).to receive(:commit_count_for).with(project, max_count: described_class::INITIAL_COMMIT_COUNT, experiment: 'empty_repo_upload').and_return(1)
+
+ expect(subject).to receive(:track).with(:initial_write, project: project).and_call_original
+
+ subject.track_initial_write
+ end
+
+ it "doesn't track an event for projects with a commit count more than 1" do
+ expect(subject).to receive(:commit_count_for).and_return(2)
+
+ expect(subject).not_to receive(:track)
+
+ subject.track_initial_write
+ end
+
+ it "doesn't track if the project is older" do
+ expect(project).to receive(:created_at).and_return(described_class::TRACKING_START_DATE - 1.minute)
+
+ expect(subject).not_to receive(:track)
+
+ subject.track_initial_write
+ end
+ end
+
+ context 'when experiment is turned off' do
+ it "doesn't track when we generally shouldn't" do
+ expect(subject).not_to receive(:track)
+
+ subject.track_initial_write
+ end
+ end
+ end
+end
diff --git a/spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb b/spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb
new file mode 100644
index 00000000000..d616672173e
--- /dev/null
+++ b/spec/experiments/in_product_guidance_environments_webide_experiment_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe InProductGuidanceEnvironmentsWebideExperiment, :experiment do
+ subject { described_class.new(project: project) }
+
+ let(:project) { create(:project, :repository) }
+
+ before do
+ stub_experiments(in_product_guidance_environments_webide: :candidate)
+ end
+
+ it 'excludes projects with environments' do
+ create(:environment, project: project)
+ expect(subject).to exclude(project: project)
+ end
+
+ it 'does not exlude projects without environments' do
+ expect(subject).not_to exclude(project: project)
+ end
+end
diff --git a/spec/experiments/members/invite_email_experiment_spec.rb b/spec/experiments/members/invite_email_experiment_spec.rb
index a9a269347e0..ac4c05e3058 100644
--- a/spec/experiments/members/invite_email_experiment_spec.rb
+++ b/spec/experiments/members/invite_email_experiment_spec.rb
@@ -11,6 +11,16 @@ RSpec.describe Members::InviteEmailExperiment, :clean_gitlab_redis_shared_state
allow(invite_email).to receive(:enabled?).and_return(true)
end
+ describe ".initial_invite_email?" do
+ it "is an initial invite email" do
+ expect(described_class.initial_invite_email?('initial_email')).to be(true)
+ end
+
+ it "is not an initial invite email" do
+ expect(described_class.initial_invite_email?('_bogus_')).to be(false)
+ end
+ end
+
describe "exclusions", :experiment do
it "excludes when created by is nil" do
expect(experiment('members/invite_email')).to exclude(actor: double(created_by: nil))
diff --git a/spec/experiments/new_project_readme_experiment_spec.rb b/spec/experiments/new_project_readme_experiment_spec.rb
index 87446394bff..e5ecc4662f6 100644
--- a/spec/experiments/new_project_readme_experiment_spec.rb
+++ b/spec/experiments/new_project_readme_experiment_spec.rb
@@ -29,24 +29,19 @@ RSpec.describe NewProjectReadmeExperiment, :experiment do
context "when tracking initial writes" do
let!(:project) { create(:project, :repository) }
- def stub_gitaly_count(count = 1)
- allow(Gitlab::GitalyClient).to receive(:call).and_call_original
- allow(Gitlab::GitalyClient).to receive(:call).with(anything, :commit_service, :count_commits, anything, anything)
- .and_return(double(count: count))
- end
-
before do
- stub_gitaly_count
+ stub_experiments(new_project_readme: :control)
end
it "tracks an event for the first commit on a project with a repository" do
+ expect(subject).to receive(:commit_count_for).with(project, default_count: described_class::INITIAL_WRITE_LIMIT, max_count: described_class::INITIAL_WRITE_LIMIT, experiment: 'new_project_readme').and_return(1)
expect(subject).to receive(:track).with(:write, property: project.created_at.to_s, value: 1).and_call_original
subject.track_initial_writes(project)
end
it "tracks an event for the second commit on a project with a repository" do
- stub_gitaly_count(2)
+ allow(subject).to receive(:commit_count_for).and_return(2)
expect(subject).to receive(:track).with(:write, property: project.created_at.to_s, value: 2).and_call_original
@@ -54,7 +49,7 @@ RSpec.describe NewProjectReadmeExperiment, :experiment do
end
it "doesn't track if the repository has more then 2 commits" do
- stub_gitaly_count(3)
+ allow(subject).to receive(:commit_count_for).and_return(3)
expect(subject).not_to receive(:track)
@@ -76,14 +71,5 @@ RSpec.describe NewProjectReadmeExperiment, :experiment do
subject.track_initial_writes(project)
end
-
- it "handles exceptions by logging them" do
- allow(Gitlab::GitalyClient).to receive(:call).with(anything, :commit_service, :count_commits, anything, anything)
- .and_raise(e = StandardError.new('_message_'))
-
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, experiment: 'new_project_readme')
-
- subject.track_initial_writes(project)
- end
end
end
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index ee1225b9542..f63a3c9f7f5 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -95,6 +95,10 @@ FactoryBot.define do
severity { 'unknown' }
end
+ trait :threat_monitoring do
+ domain { :threat_monitoring }
+ end
+
trait :prometheus do
monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] }
payload do
@@ -109,6 +113,20 @@ FactoryBot.define do
end
end
+ trait :cilium do
+ monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:cilium] }
+ payload do
+ {
+ annotations: {
+ title: 'This is a cilium alert',
+ summary: 'Summary of the alert',
+ description: 'Description of the alert'
+ },
+ startsAt: started_at
+ }.with_indifferent_access
+ end
+ end
+
trait :all_fields do
with_issue
with_assignee
diff --git a/spec/factories/analytics/cycle_analytics/project_stages.rb b/spec/factories/analytics/cycle_analytics/project_stages.rb
index 3a481bd20fd..e673c4957b0 100644
--- a/spec/factories/analytics/cycle_analytics/project_stages.rb
+++ b/spec/factories/analytics/cycle_analytics/project_stages.rb
@@ -6,6 +6,7 @@ FactoryBot.define do
sequence(:name) { |n| "Stage ##{n}" }
hidden { false }
issue_stage
+ value_stream { association(:cycle_analytics_project_value_stream, project: project) }
trait :issue_stage do
start_event_identifier { Gitlab::Analytics::CycleAnalytics::StageEvents::IssueCreated.identifier }
diff --git a/spec/factories/analytics/cycle_analytics/project_value_streams.rb b/spec/factories/analytics/cycle_analytics/project_value_streams.rb
new file mode 100644
index 00000000000..45a6470b0aa
--- /dev/null
+++ b/spec/factories/analytics/cycle_analytics/project_value_streams.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :cycle_analytics_project_value_stream, class: 'Analytics::CycleAnalytics::ProjectValueStream' do
+ sequence(:name) { |n| "Value Stream ##{n}" }
+
+ project
+ end
+end
diff --git a/spec/factories/bulk_import/export_uploads.rb b/spec/factories/bulk_import/export_uploads.rb
new file mode 100644
index 00000000000..9f03498b9d9
--- /dev/null
+++ b/spec/factories/bulk_import/export_uploads.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :bulk_import_export_upload, class: 'BulkImports::ExportUpload' do
+ export { association(:bulk_import_export) }
+ end
+end
diff --git a/spec/factories/bulk_import/exports.rb b/spec/factories/bulk_import/exports.rb
new file mode 100644
index 00000000000..dd8831ce33a
--- /dev/null
+++ b/spec/factories/bulk_import/exports.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :bulk_import_export, class: 'BulkImports::Export', traits: %i[started] do
+ group
+ relation { 'labels' }
+
+ trait :started do
+ status { 0 }
+
+ sequence(:jid) { |n| "bulk_import_export_#{n}" }
+ end
+
+ trait :finished do
+ status { 1 }
+
+ sequence(:jid) { |n| "bulk_import_export_#{n}" }
+ end
+
+ trait :failed do
+ status { -1 }
+ end
+ end
+end
diff --git a/spec/factories/chat_names.rb b/spec/factories/chat_names.rb
index 73c885806f2..56567394bf5 100644
--- a/spec/factories/chat_names.rb
+++ b/spec/factories/chat_names.rb
@@ -2,8 +2,8 @@
FactoryBot.define do
factory :chat_name, class: 'ChatName' do
- user factory: :user
- service factory: :service
+ user
+ integration
team_id { 'T0001' }
team_domain { 'Awesome Team' }
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index b06d581d2c0..f99021ad223 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-include ActionDispatch::TestProcess
-
FactoryBot.define do
factory :ci_build, class: 'Ci::Build', parent: :ci_processable do
name { 'test' }
@@ -371,6 +369,12 @@ FactoryBot.define do
end
end
+ trait :codequality_reports_without_degradation do
+ after(:build) do |build|
+ build.job_artifacts << create(:ci_job_artifact, :codequality_without_errors, job: build)
+ end
+ end
+
trait :terraform_reports do
after(:build) do |build|
build.job_artifacts << create(:ci_job_artifact, :terraform, job: build)
@@ -427,7 +431,8 @@ FactoryBot.define do
name: 'Release $CI_COMMIT_SHA',
description: 'Created using the release-cli $EXTRA_DESCRIPTION',
tag_name: 'release-$CI_COMMIT_SHA',
- ref: '$CI_COMMIT_SHA'
+ ref: '$CI_COMMIT_SHA',
+ assets: { links: [{ name: 'asset1', url: 'https://example.com/assets/1' }] }
}
}
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index bfd8506566b..17cd495e217 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-include ActionDispatch::TestProcess
-
FactoryBot.define do
factory :ci_job_artifact, class: 'Ci::JobArtifact' do
job factory: :ci_build
@@ -279,6 +277,16 @@ FactoryBot.define do
end
end
+ trait :sast_minimal do
+ file_type { :sast }
+ file_format { :raw }
+
+ after(:build) do |artifact, _|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/security_reports/master/gl-sast-report-minimal.json'), 'application/json')
+ end
+ end
+
trait :secret_detection do
file_type { :secret_detection }
file_format { :raw }
diff --git a/spec/factories/ci/pipeline_artifacts.rb b/spec/factories/ci/pipeline_artifacts.rb
index 3250c7abb4b..85277ce6fbf 100644
--- a/spec/factories/ci/pipeline_artifacts.rb
+++ b/spec/factories/ci/pipeline_artifacts.rb
@@ -13,6 +13,10 @@ FactoryBot.define do
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json')
end
+ trait :unlocked do
+ association :pipeline, :unlocked, factory: :ci_pipeline
+ end
+
trait :checksummed do
verification_checksum { 'abc' }
end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 87b9a6c0e23..4fc7d945881 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -81,6 +81,10 @@ FactoryBot.define do
status { :failed }
end
+ trait :unlocked do
+ locked { Ci::Pipeline.lockeds[:unlocked] }
+ end
+
trait :protected do
add_attribute(:protected) { true }
end
@@ -237,7 +241,7 @@ FactoryBot.define do
trait :merged_result_pipeline do
detached_merge_request_pipeline
- sha { 'test-merge-sha'}
+ sha { 'mergeSha' }
ref { merge_request.merge_ref_path }
source_sha { merge_request.source_branch_sha }
target_sha { merge_request.target_branch_sha }
diff --git a/spec/factories/ci/runner_namespaces.rb b/spec/factories/ci/runner_namespaces.rb
new file mode 100644
index 00000000000..a5060d196ca
--- /dev/null
+++ b/spec/factories/ci/runner_namespaces.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_runner_namespace, class: 'Ci::RunnerNamespace' do
+ runner factory: [:ci_runner, :group]
+ group
+ end
+end
diff --git a/spec/factories/clusters/integrations/elastic_stack.rb b/spec/factories/clusters/integrations/elastic_stack.rb
new file mode 100644
index 00000000000..1ab3256845b
--- /dev/null
+++ b/spec/factories/clusters/integrations/elastic_stack.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :clusters_integrations_elastic_stack, class: 'Clusters::Integrations::ElasticStack' do
+ cluster factory: %i(cluster provided_by_gcp)
+ enabled { true }
+
+ trait :disabled do
+ enabled { false }
+ end
+ end
+end
diff --git a/spec/factories/gitlab/database/background_migration/batched_jobs.rb b/spec/factories/gitlab/database/background_migration/batched_jobs.rb
index 52bc04447da..cec20616f7f 100644
--- a/spec/factories/gitlab/database/background_migration/batched_jobs.rb
+++ b/spec/factories/gitlab/database/background_migration/batched_jobs.rb
@@ -8,5 +8,6 @@ FactoryBot.define do
max_value { 10 }
batch_size { 5 }
sub_batch_size { 1 }
+ pause_ms { 100 }
end
end
diff --git a/spec/factories/gitlab/database/background_migration/batched_migrations.rb b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
index 49cbdc5a8fb..c03841d8c02 100644
--- a/spec/factories/gitlab/database/background_migration/batched_migrations.rb
+++ b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
@@ -10,5 +10,6 @@ FactoryBot.define do
table_name { :events }
column_name { :id }
total_tuple_count { 10_000 }
+ pause_ms { 100 }
end
end
diff --git a/spec/factories/gitlab/jwt_token.rb b/spec/factories/gitlab/jwt_token.rb
new file mode 100644
index 00000000000..bc00c6a5ff4
--- /dev/null
+++ b/spec/factories/gitlab/jwt_token.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :jwt_token, class: 'Gitlab::JWTToken' do
+ skip_create
+
+ initialize_with { new }
+
+ trait :with_custom_payload do
+ transient do
+ custom_payload { {} }
+ end
+
+ after(:build) do |jwt, evaluator|
+ evaluator.custom_payload.each do |key, value|
+ jwt[key] = value
+ end
+ end
+ end
+ end
+end
diff --git a/spec/factories/services_data.rb b/spec/factories/integration_data.rb
index 7b6a705c791..2541a3d2da3 100644
--- a/spec/factories/services_data.rb
+++ b/spec/factories/integration_data.rb
@@ -4,15 +4,15 @@
# The factories are used when creating integrations.
FactoryBot.define do
factory :jira_tracker_data do
- service
+ integration factory: :jira_service
end
factory :issue_tracker_data do
- service
+ integration
end
factory :open_project_tracker_data do
- service
+ integration factory: :open_project_service
url { 'http://openproject.example.com'}
token { 'supersecret' }
project_identifier_code { 'PRJ-1' }
diff --git a/spec/factories/services.rb b/spec/factories/integrations.rb
index 25ef75880bb..6bd6deb262a 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/integrations.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :service do
+ factory :integration, aliases: [:service] do
project
- type { 'Service' }
+ type { 'Integration' }
end
factory :custom_issue_tracker_service, class: 'CustomIssueTrackerService' do
@@ -12,7 +12,7 @@ FactoryBot.define do
issue_tracker
end
- factory :emails_on_push_service do
+ factory :emails_on_push_service, class: 'Integrations::EmailsOnPush' do
project
type { 'EmailsOnPushService' }
active { true }
@@ -65,21 +65,21 @@ FactoryBot.define do
deployment_type { 'cloud' }
end
- before(:create) do |service, evaluator|
+ after(:build) do |integration, evaluator|
if evaluator.create_data
- create(:jira_tracker_data, service: service,
- url: evaluator.url, api_url: evaluator.api_url,
- jira_issue_transition_automatic: evaluator.jira_issue_transition_automatic,
- jira_issue_transition_id: evaluator.jira_issue_transition_id,
- username: evaluator.username, password: evaluator.password, issues_enabled: evaluator.issues_enabled,
- project_key: evaluator.project_key, vulnerabilities_enabled: evaluator.vulnerabilities_enabled,
- vulnerabilities_issuetype: evaluator.vulnerabilities_issuetype, deployment_type: evaluator.deployment_type
+ integration.jira_tracker_data = build(:jira_tracker_data,
+ integration: integration, url: evaluator.url, api_url: evaluator.api_url,
+ jira_issue_transition_automatic: evaluator.jira_issue_transition_automatic,
+ jira_issue_transition_id: evaluator.jira_issue_transition_id,
+ username: evaluator.username, password: evaluator.password, issues_enabled: evaluator.issues_enabled,
+ project_key: evaluator.project_key, vulnerabilities_enabled: evaluator.vulnerabilities_enabled,
+ vulnerabilities_issuetype: evaluator.vulnerabilities_issuetype, deployment_type: evaluator.deployment_type
)
end
end
end
- factory :confluence_service do
+ factory :confluence_service, class: 'Integrations::Confluence' do
project
active { true }
confluence_url { 'https://example.atlassian.net/wiki' }
@@ -117,10 +117,11 @@ FactoryBot.define do
new_issue_url { 'http://new-issue.example.com' }
end
- before(:create) do |service, evaluator|
+ after(:build) do |integration, evaluator|
if evaluator.create_data
- create(:issue_tracker_data, service: service,
- project_url: evaluator.project_url, issues_url: evaluator.issues_url, new_issue_url: evaluator.new_issue_url
+ integration.issue_tracker_data = build(:issue_tracker_data,
+ integration: integration, project_url: evaluator.project_url,
+ issues_url: evaluator.issues_url, new_issue_url: evaluator.new_issue_url
)
end
end
@@ -145,9 +146,9 @@ FactoryBot.define do
project_identifier_code { 'PRJ-1' }
end
- before(:create) do |service, evaluator|
- create(:open_project_tracker_data, service: service,
- url: evaluator.url, api_url: evaluator.api_url, token: evaluator.token,
+ after(:build) do |integration, evaluator|
+ integration.open_project_tracker_data = build(:open_project_tracker_data,
+ integration: integration, url: evaluator.url, api_url: evaluator.api_url, token: evaluator.token,
closed_status_id: evaluator.closed_status_id, project_identifier_code: evaluator.project_identifier_code
)
end
@@ -159,12 +160,6 @@ FactoryBot.define do
password { 'my-secret-password' }
end
- factory :hipchat_service do
- project
- type { 'HipchatService' }
- token { 'test_token' }
- end
-
factory :slack_service do
project
active { true }
@@ -186,7 +181,7 @@ FactoryBot.define do
issue_tracker_data { nil }
create_data { false }
- after(:build) do |service|
+ after(:build) do
IssueTrackerService.skip_callback(:validation, :before, :handle_properties)
end
diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb
index 5c62de4d08d..2d52747dece 100644
--- a/spec/factories/issues.rb
+++ b/spec/factories/issues.rb
@@ -13,6 +13,14 @@ FactoryBot.define do
confidential { true }
end
+ trait :with_asc_relative_position do
+ sequence(:relative_position) { |n| n * 1000 }
+ end
+
+ trait :with_desc_relative_position do
+ sequence(:relative_position) { |n| -n * 1000 }
+ end
+
trait :opened do
state_id { Issue.available_states[:opened] }
end
diff --git a/spec/factories/lfs_objects.rb b/spec/factories/lfs_objects.rb
index 35fc4db8519..59c6ea5f55a 100644
--- a/spec/factories/lfs_objects.rb
+++ b/spec/factories/lfs_objects.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-include ActionDispatch::TestProcess
-
FactoryBot.define do
factory :lfs_object do
sequence(:oid) { |n| "b68143e6463773b1b6c6fd009a76c32aeec041faff32ba2ed42fd7f708a%05x" % n }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index fce44c2cee0..0aab41df90b 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -279,7 +279,7 @@ FactoryBot.define do
trait :with_merge_request_pipeline do
transient do
- merge_sha { 'test-merge-sha' }
+ merge_sha { 'mergesha' }
source_sha { source_branch_sha }
target_sha { target_branch_sha }
end
diff --git a/spec/factories/namespace_package_settings.rb b/spec/factories/namespace_package_settings.rb
index 875933ce84f..042808f042f 100644
--- a/spec/factories/namespace_package_settings.rb
+++ b/spec/factories/namespace_package_settings.rb
@@ -7,6 +7,9 @@ FactoryBot.define do
maven_duplicates_allowed { true }
maven_duplicate_exception_regex { 'SNAPSHOT' }
+ generic_duplicates_allowed { true }
+ generic_duplicate_exception_regex { 'foo' }
+
trait :group do
namespace { association(:group) }
end
diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb
index f4b57369678..957ec88420d 100644
--- a/spec/factories/namespaces.rb
+++ b/spec/factories/namespaces.rb
@@ -5,22 +5,7 @@ FactoryBot.define do
sequence(:name) { |n| "namespace#{n}" }
path { name.downcase.gsub(/\s/, '_') }
- # This is a workaround to avoid the user creating another namespace via
- # User#ensure_namespace_correct. We should try to remove it and then
- # we could remove this workaround
- association :owner, factory: :user, strategy: :build
- before(:create) do |namespace|
- owner = namespace.owner
-
- if owner
- # We're changing the username here because we want to keep our path,
- # and User#ensure_namespace_correct would change the path based on
- # username, so we're forced to do this otherwise we'll need to change
- # a lot of existing tests.
- owner.username = namespace.path
- owner.namespace = namespace
- end
- end
+ owner { association(:user, strategy: :build, namespace: instance, username: path) }
trait :with_aggregation_schedule do
after(:create) do |namespace|
diff --git a/spec/factories/notes.rb b/spec/factories/notes.rb
index 299d08972b7..c15ec91d2ce 100644
--- a/spec/factories/notes.rb
+++ b/spec/factories/notes.rb
@@ -2,8 +2,6 @@
require_relative '../support/helpers/repo_helpers'
-include ActionDispatch::TestProcess
-
FactoryBot.define do
factory :note do
project
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 9edee735af9..a8020f396fd 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -94,6 +94,22 @@ FactoryBot.define do
end
end
+ factory :helm_package do
+ sequence(:name) { |n| "package-#{n}" }
+ sequence(:version) { |n| "v1.0.#{n}" }
+ package_type { :helm }
+
+ transient do
+ without_package_files { false }
+ end
+
+ after :create do |package, evaluator|
+ unless evaluator.without_package_files
+ create :helm_package_file, package: package
+ end
+ end
+ end
+
factory :npm_package do
sequence(:name) { |n| "@#{project.root_namespace.path}/package-#{n}"}
version { '1.0.0' }
@@ -113,6 +129,25 @@ FactoryBot.define do
end
end
+ factory :terraform_module_package do
+ sequence(:name) { |n| "module-#{n}/system" }
+ version { '1.0.0' }
+ package_type { :terraform_module }
+
+ after :create do |package|
+ create :package_file, :terraform_module, package: package
+ end
+
+ trait :with_build do
+ after :create do |package|
+ user = package.project.creator
+ pipeline = create(:ci_pipeline, user: user)
+ create(:ci_build, user: user, pipeline: pipeline)
+ create :package_build_info, package: package, pipeline: pipeline
+ end
+ end
+ end
+
factory :nuget_package do
sequence(:name) { |n| "NugetPackage#{n}"}
sequence(:version) { |n| "1.0.#{n}" }
diff --git a/spec/factories/packages/helm/file_metadatum.rb b/spec/factories/packages/helm/file_metadatum.rb
new file mode 100644
index 00000000000..e809f592546
--- /dev/null
+++ b/spec/factories/packages/helm/file_metadatum.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :helm_file_metadatum, class: 'Packages::Helm::FileMetadatum' do
+ package_file { association(:helm_package_file, without_loaded_metadatum: true) }
+ channel { 'stable' }
+ metadata { { 'name': package_file.package.name, 'version': package_file.package.version, 'apiVersion': 'v2' } }
+ end
+end
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_file.rb
index 74400975670..e49e3f36635 100644
--- a/spec/factories/packages/package_file.rb
+++ b/spec/factories/packages/package_file.rb
@@ -201,10 +201,29 @@ FactoryBot.define do
end
end
+ factory :helm_package_file do
+ package { association(:helm_package, without_package_files: true) }
+ file_name { "#{package.name}-#{package.version}.tgz" }
+ file_fixture { "spec/fixtures/packages/helm/rook-ceph-v1.5.8.tgz" }
+
+ transient do
+ without_loaded_metadatum { false }
+ channel { 'stable' }
+ end
+
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :helm_file_metadatum, package_file: package_file, channel: evaluator.channel
+ end
+ end
+ end
+
trait(:jar) do
file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
file_name { 'my-app-1.0-20180724.124855-1.jar' }
file_sha1 { '4f0bfa298744d505383fbb57c554d4f5c12d88b3' }
+ file_md5 { '0a7392d24f42f83068fa3767c5310052' }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
size { 100.kilobytes }
end
@@ -212,6 +231,8 @@ FactoryBot.define do
file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom' }
file_name { 'my-app-1.0-20180724.124855-1.pom' }
file_sha1 { '19c975abd49e5102ca6c74a619f21e0cf0351c57' }
+ file_md5 { '0a7392d24f42f83068fa3767c5310052' }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
size { 200.kilobytes }
end
@@ -219,6 +240,8 @@ FactoryBot.define do
file_fixture { 'spec/fixtures/packages/maven/maven-metadata.xml' }
file_name { 'maven-metadata.xml' }
file_sha1 { '42b1bdc80de64953b6876f5a8c644f20204011b0' }
+ file_md5 { '0a7392d24f42f83068fa3767c5310052' }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
size { 300.kilobytes }
end
@@ -231,6 +254,13 @@ FactoryBot.define do
size { 400.kilobytes }
end
+ trait(:terraform_module) do
+ file_fixture { 'spec/fixtures/packages/terraform_module/module-system-v1.0.0.tgz' }
+ file_name { 'module-system-v1.0.0.tgz' }
+ file_sha1 { 'abf850accb1947c0c0e3ef4b441b771bb5c9ae3c' }
+ size { 806.bytes }
+ end
+
trait(:nuget) do
package
file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 80392a2fece..f4f1e1bcbda 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -194,7 +194,7 @@ FactoryBot.define do
filename,
content,
message: "Automatically created file #{filename}",
- branch_name: project.default_branch_or_master
+ branch_name: project.default_branch || 'master'
)
end
end
diff --git a/spec/factories/service_hooks.rb b/spec/factories/service_hooks.rb
index ff819f4f8d0..ea70d2fc433 100644
--- a/spec/factories/service_hooks.rb
+++ b/spec/factories/service_hooks.rb
@@ -3,6 +3,6 @@
FactoryBot.define do
factory :service_hook do
url { generate(:url) }
- service
+ integration
end
end
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 714f8451f39..2aa926e4dd8 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -32,8 +32,6 @@ FactoryBot.define do
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
- create(:service, project: projects[0], type: 'AlertsService', active: true)
- create(:service, project: projects[1], type: 'AlertsService', active: false)
alert_bot_issues = create_list(:incident, 2, project: projects[0], author: User.alert_bot)
create_list(:incident, 2, project: projects[1], author: User.alert_bot)
issues = create_list(:issue, 4, project: projects[0])
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 9b5e4a981a0..476c57f2d80 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -27,6 +27,10 @@ FactoryBot.define do
after(:build) { |user, _| user.block_pending_approval! }
end
+ trait :banned do
+ after(:build) { |user, _| user.ban! }
+ end
+
trait :ldap_blocked do
after(:build) { |user, _| user.ldap_block! }
end
@@ -80,6 +84,12 @@ FactoryBot.define do
last_sign_in_ip { '127.0.0.1' }
end
+ trait :with_credit_card_validation do
+ after :create do |user|
+ create :credit_card_validation, user: user
+ end
+ end
+
trait :two_factor_via_otp do
before(:create) do |user|
user.otp_required_for_login = true
diff --git a/spec/factories/users/credit_card_validations.rb b/spec/factories/users/credit_card_validations.rb
new file mode 100644
index 00000000000..09940347708
--- /dev/null
+++ b/spec/factories/users/credit_card_validations.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :credit_card_validation, class: 'Users::CreditCardValidation' do
+ user
+
+ credit_card_validated_at { Time.current }
+ end
+end
diff --git a/spec/features/action_cable_logging_spec.rb b/spec/features/action_cable_logging_spec.rb
index ce7c0e03aad..2e6ce93f7f7 100644
--- a/spec/features/action_cable_logging_spec.rb
+++ b/spec/features/action_cable_logging_spec.rb
@@ -22,11 +22,7 @@ RSpec.describe 'ActionCable logging', :js do
subscription_data = a_hash_including(
remote_ip: '127.0.0.1',
user_id: user.id,
- username: user.username,
- params: a_hash_including(
- project_path: project.full_path,
- iid: issue.iid.to_s
- )
+ username: user.username
)
expect(ActiveSupport::Notifications).to receive(:instrument).with('subscribe.action_cable', subscription_data)
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index 61e7efbc56c..603e757096f 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe 'Admin Appearance' do
expect_custom_sign_in_appearance(appearance)
end
- it 'preview new project page appearance' do
+ it 'preview new project page appearance', :js do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
@@ -86,10 +86,11 @@ RSpec.describe 'Admin Appearance' do
expect_custom_sign_in_appearance(appearance)
end
- it 'custom new project page' do
+ it 'custom new project page', :js do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
expect_custom_new_project_appearance(appearance)
end
diff --git a/spec/features/admin/admin_dev_ops_report_spec.rb b/spec/features/admin/admin_dev_ops_report_spec.rb
index a05fa0640d8..33f984af807 100644
--- a/spec/features/admin/admin_dev_ops_report_spec.rb
+++ b/spec/features/admin/admin_dev_ops_report_spec.rb
@@ -53,15 +53,13 @@ RSpec.describe 'DevOps Report page', :js do
end
context 'when there is data to display' do
- it 'shows numbers for each metric' do
+ it 'shows the DevOps Score app' do
stub_application_setting(usage_ping_enabled: true)
create(:dev_ops_report_metric)
visit admin_dev_ops_report_path
- expect(page).to have_content(
- 'Issues created per active user 1.2 You 9.3 Lead 13.3%'
- )
+ expect(page).to have_selector('[data-testid="devops-score-app"]')
end
end
end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index e7634f4e020..f9673a8aa2f 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Admin Groups' do
include Select2Helper
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let(:internal) { Gitlab::VisibilityLevel::INTERNAL }
@@ -202,6 +203,7 @@ RSpec.describe 'Admin Groups' do
select2(Gitlab::Access::REPORTER, from: '#access_level')
end
click_button "Add users to group"
+
page.within ".group-users-list" do
expect(page).to have_content(user.name)
expect(page).to have_content('Reporter')
@@ -220,19 +222,13 @@ RSpec.describe 'Admin Groups' do
describe 'add admin himself to a group' do
before do
- stub_feature_flags(invite_members_group_modal: false)
group.add_user(:user, Gitlab::Access::OWNER)
end
it 'adds admin a to a group as developer', :js do
visit group_group_members_path(group)
- page.within '.invite-users-form' do
- select2(current_user.id, from: '#user_ids', multiple: true)
- select 'Developer', from: 'access_level'
- end
-
- click_button 'Invite'
+ invite_member(current_user.name, role: 'Developer')
page.within members_table do
expect(page).to have_content(current_user.name)
diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb
index 43fb1f31a0f..08d81906d9f 100644
--- a/spec/features/admin/admin_labels_spec.rb
+++ b/spec/features/admin/admin_labels_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'admin issues labels' do
it 'deletes all labels', :js do
page.within '.labels' do
- page.all('.js-remove-row').each do |remove|
+ page.all('.js-remove-label').each do |remove|
accept_confirm { remove.click }
wait_for_requests
end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index 8cfac5d8b99..664eb51e58f 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -8,37 +8,67 @@ RSpec.describe 'Admin Mode Logout', :js do
let(:user) { create(:admin) }
- before do
- stub_feature_flags(combined_menu: false)
+ shared_examples 'combined_menu: feature flag examples' do
+ before do
+ gitlab_sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user)
+ visit admin_root_path
+ end
- gitlab_sign_in(user)
- gitlab_enable_admin_mode_sign_in(user)
- visit admin_root_path
- end
+ it 'disable removes admin mode and redirects to root page' do
+ pending_on_combined_menu_flag
- it 'disable removes admin mode and redirects to root page' do
- gitlab_disable_admin_mode
+ gitlab_disable_admin_mode
- expect(current_path).to eq root_path
- expect(page).to have_link(href: new_admin_session_path)
- end
+ expect(current_path).to eq root_path
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+
+ it 'disable shows flash notice' do
+ pending_on_combined_menu_flag
+
+ gitlab_disable_admin_mode
+
+ expect(page).to have_selector('.flash-notice')
+ end
- it 'disable shows flash notice' do
- gitlab_disable_admin_mode
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
- expect(page).to have_selector('.flash-notice')
+ it 'disable removes admin mode and redirects to root page' do
+ pending_on_combined_menu_flag
+
+ gitlab_disable_admin_mode
+
+ expect(current_path).to eq root_path
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
end
- context 'on a read-only instance' do
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
+
before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ stub_feature_flags(combined_menu: true)
end
- it 'disable removes admin mode and redirects to root page' do
- gitlab_disable_admin_mode
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
- expect(current_path).to eq root_path
- expect(page).to have_link(href: new_admin_session_path)
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
+
+ before do
+ stub_feature_flags(combined_menu: false)
end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
end
end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 633de20c82d..4df035b13e8 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -8,55 +8,41 @@ RSpec.describe 'Admin mode' do
let(:admin) { create(:admin) }
- before do
- stub_feature_flags(combined_menu: false)
-
- stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
- end
-
- context 'application setting :admin_mode is enabled', :request_store do
+ shared_examples 'combined_menu: feature flag examples' do
before do
- sign_in(admin)
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
- context 'when not in admin mode' do
- it 'has no leave admin mode button' do
- visit new_admin_session_path
-
- page.within('.navbar-sub-nav') do
- expect(page).not_to have_link(href: destroy_admin_session_path)
- end
+ context 'application setting :admin_mode is enabled', :request_store do
+ before do
+ sign_in(admin)
end
- it 'can open pages not in admin scope' do
- visit new_admin_session_path
+ context 'when not in admin mode' do
+ it 'has no leave admin mode button' do
+ visit new_admin_session_path
- page.within('.navbar-sub-nav') do
- find_all('a', text: 'Projects').first.click
+ page.within('.navbar-sub-nav') do
+ expect(page).not_to have_link(href: destroy_admin_session_path)
+ end
end
- expect(page).to have_current_path(dashboard_projects_path)
- end
-
- it 'is necessary to provide credentials again before opening pages in admin scope' do
- visit general_admin_application_settings_path # admin logged out because not in admin_mode
-
- expect(page).to have_current_path(new_admin_session_path)
- end
+ it 'can open pages not in admin scope' do
+ pending_on_combined_menu_flag
- it 'can enter admin mode' do
- visit new_admin_session_path
+ visit new_admin_session_path
- fill_in 'user_password', with: admin.password
+ page.within('.navbar-sub-nav') do
+ find_all('a', text: 'Projects').first.click
+ end
- click_button 'Enter Admin Mode'
+ expect(page).to have_current_path(dashboard_projects_path)
+ end
- expect(page).to have_current_path(admin_root_path)
- end
+ it 'is necessary to provide credentials again before opening pages in admin scope' do
+ visit general_admin_application_settings_path # admin logged out because not in admin_mode
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ expect(page).to have_current_path(new_admin_session_path)
end
it 'can enter admin mode' do
@@ -68,108 +54,161 @@ RSpec.describe 'Admin mode' do
expect(page).to have_current_path(admin_root_path)
end
- end
- end
- context 'when in admin_mode' do
- before do
- gitlab_enable_admin_mode_sign_in(admin)
- end
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
- it 'contains link to leave admin mode' do
- page.within('.navbar-sub-nav') do
- expect(page).to have_link(href: destroy_admin_session_path)
+ it 'can enter admin mode' do
+ visit new_admin_session_path
+
+ fill_in 'user_password', with: admin.password
+
+ click_button 'Enter Admin Mode'
+
+ expect(page).to have_current_path(admin_root_path)
+ end
end
end
- it 'can leave admin mode using main dashboard link', :js do
- page.within('.navbar-sub-nav') do
- click_on 'Leave Admin Mode'
+ context 'when in admin_mode' do
+ before do
+ gitlab_enable_admin_mode_sign_in(admin)
+ end
- expect(page).to have_link(href: new_admin_session_path)
+ it 'contains link to leave admin mode' do
+ pending_on_combined_menu_flag
+
+ page.within('.navbar-sub-nav') do
+ expect(page).to have_link(href: destroy_admin_session_path)
+ end
end
- end
- it 'can leave admin mode using dropdown menu on smaller screens', :js do
- resize_screen_xs
- visit root_dashboard_path
+ it 'can leave admin mode using main dashboard link', :js do
+ pending_on_combined_menu_flag
- find('.header-more').click
+ page.within('.navbar-sub-nav') do
+ click_on 'Leave Admin Mode'
- page.within '.navbar-sub-nav' do
- click_on 'Leave Admin Mode'
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
+
+ it 'can leave admin mode using dropdown menu on smaller screens', :js do
+ pending_on_combined_menu_flag
+
+ resize_screen_xs
+ visit root_dashboard_path
find('.header-more').click
- expect(page).to have_link(href: new_admin_session_path)
- end
- end
+ page.within '.navbar-sub-nav' do
+ click_on 'Leave Admin Mode'
- it 'can open pages not in admin scope' do
- page.within('.navbar-sub-nav') do
- find_all('a', text: 'Projects').first.click
+ find('.header-more').click
- expect(page).to have_current_path(dashboard_projects_path)
+ expect(page).to have_link(href: new_admin_session_path)
+ end
end
- end
- context 'nav bar' do
- it 'shows admin dashboard links on bigger screen' do
- visit root_dashboard_path
+ it 'can open pages not in admin scope' do
+ pending_on_combined_menu_flag
- page.within '.navbar' do
- expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ page.within('.navbar-sub-nav') do
+ find_all('a', text: 'Projects').first.click
+
+ expect(page).to have_current_path(dashboard_projects_path)
end
end
- it 'relocates admin dashboard links to dropdown list on smaller screen', :js do
- resize_screen_xs
- visit root_dashboard_path
+ context 'nav bar' do
+ it 'shows admin dashboard links on bigger screen' do
+ pending_on_combined_menu_flag
- page.within '.navbar' do
- expect(page).not_to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).not_to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ visit root_dashboard_path
+
+ page.within '.navbar' do
+ expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
end
- find('.header-more').click
+ it 'relocates admin dashboard links to dropdown list on smaller screen', :js do
+ pending_on_combined_menu_flag
+
+ resize_screen_xs
+ visit root_dashboard_path
- page.within '.navbar' do
- expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
- expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ page.within '.navbar' do
+ expect(page).not_to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
+
+ find('.header-more').click
+
+ page.within '.navbar' do
+ expect(page).to have_link(text: 'Admin Area', href: admin_root_path, visible: true)
+ expect(page).to have_link(text: 'Leave Admin Mode', href: destroy_admin_session_path, visible: true)
+ end
end
end
- end
- context 'on a read-only instance' do
- before do
- allow(Gitlab::Database).to receive(:read_only?).and_return(true)
- end
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
- it 'can leave admin mode', :js do
- page.within('.navbar-sub-nav') do
- click_on 'Leave Admin Mode'
+ it 'can leave admin mode', :js do
+ pending_on_combined_menu_flag
- expect(page).to have_link(href: new_admin_session_path)
+ page.within('.navbar-sub-nav') do
+ click_on 'Leave Admin Mode'
+
+ expect(page).to have_link(href: new_admin_session_path)
+ end
end
end
end
end
+
+ context 'application setting :admin_mode is disabled' do
+ before do
+ stub_application_setting(admin_mode: false)
+ sign_in(admin)
+ end
+
+ it 'shows no admin mode buttons in navbar' do
+ visit admin_root_path
+
+ page.within('.navbar-sub-nav') do
+ expect(page).not_to have_link(href: new_admin_session_path)
+ expect(page).not_to have_link(href: destroy_admin_session_path)
+ end
+ end
+ end
end
- context 'application setting :admin_mode is disabled' do
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
+
before do
- stub_application_setting(admin_mode: false)
- sign_in(admin)
+ stub_feature_flags(combined_menu: true)
end
- it 'shows no admin mode buttons in navbar' do
- visit admin_root_path
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
- page.within('.navbar-sub-nav') do
- expect(page).not_to have_link(href: new_admin_session_path)
- expect(page).not_to have_link(href: destroy_admin_session_path)
- end
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
+
+ before do
+ stub_feature_flags(combined_menu: false)
end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
end
end
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index bf280595ec7..cbbe9aa3b8b 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe "Admin::Projects" do
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
include Select2Helper
let(:user) { create :user }
@@ -95,21 +96,27 @@ RSpec.describe "Admin::Projects" do
describe 'admin adds themselves to the project', :js do
before do
project.add_maintainer(user)
- stub_feature_flags(invite_members_group_modal: false)
end
it 'adds admin to the project as developer' do
visit project_project_members_path(project)
- page.within '.invite-users-form' do
- select2(current_user.id, from: '#user_ids', multiple: true)
- select 'Developer', from: 'access_level'
- end
-
- click_button 'Invite'
+ invite_member(current_user.name, role: 'Developer')
expect(find_member_row(current_user)).to have_content('Developer')
end
+
+ context 'with the invite_members_group_modal feature flag disabled' do
+ it 'adds admin to the project as developer' do
+ stub_feature_flags(invite_members_group_modal: false)
+
+ visit project_project_members_path(project)
+
+ add_member_using_form(current_user.id, role: 'Developer')
+
+ expect(find_member_row(current_user)).to have_content('Developer')
+ end
+ end
end
describe 'admin removes themselves from the project', :js do
@@ -134,4 +141,19 @@ RSpec.describe "Admin::Projects" do
expect(current_path).to match dashboard_projects_path
end
end
+
+ # temporary method for the form until the :invite_members_group_modal feature flag is
+ # enabled: https://gitlab.com/gitlab-org/gitlab/-/issues/247208
+ def add_member_using_form(id, role: 'Developer')
+ page.within '.invite-users-form' do
+ select2(id, from: '#user_ids', multiple: true)
+
+ fill_in 'expires_at', with: 5.days.from_now.to_date
+ find_field('expires_at').native.send_keys :enter
+
+ select(role, from: "access_level")
+
+ click_on 'Invite'
+ end
+ end
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 90ce865cc00..0a7113a5559 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -251,40 +251,62 @@ RSpec.describe 'Admin updates settings' do
end
end
- context 'when the Slack Notifications Service template is active' do
+ context 'when Service Templates are enabled' do
before do
- create(:service, :template, type: 'SlackService', active: true)
-
+ stub_feature_flags(disable_service_templates: false)
visit general_admin_application_settings_path
end
- it 'change Slack Notifications Service template settings', :js do
- first(:link, 'Service Templates').click
- click_link 'Slack notifications'
- fill_in 'Webhook', with: 'http://localhost'
- fill_in 'Username', with: 'test_user'
- fill_in 'service[push_channel]', with: '#test_channel'
- page.check('Notify only broken pipelines')
- page.select 'All branches', from: 'Branches to be notified'
+ it 'shows Service Templates link' do
+ expect(page).to have_link('Service Templates')
+ end
- check_all_events
- click_button 'Save changes'
+ context 'when the Slack Notifications Service template is active' do
+ before do
+ create(:service, :template, type: 'SlackService', active: true)
- expect(page).to have_content 'Application settings saved successfully'
+ visit general_admin_application_settings_path
+ end
- click_link 'Slack notifications'
+ it 'change Slack Notifications Service template settings', :js do
+ first(:link, 'Service Templates').click
+ click_link 'Slack notifications'
+ fill_in 'Webhook', with: 'http://localhost'
+ fill_in 'Username', with: 'test_user'
+ fill_in 'service[push_channel]', with: '#test_channel'
+ page.check('Notify only broken pipelines')
+ page.select 'All branches', from: 'Branches to be notified'
+ page.select 'Match any of the labels', from: 'Labels to be notified behavior'
+
+ check_all_events
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Application settings saved successfully'
- expect(page.all('input[type=checkbox]')).to all(be_checked)
- expect(find_field('Webhook').value).to eq 'http://localhost'
- expect(find_field('Username').value).to eq 'test_user'
- expect(find('[name="service[push_channel]"]').value).to eq '#test_channel'
+ click_link 'Slack notifications'
+
+ expect(page.all('input[type=checkbox]')).to all(be_checked)
+ expect(find_field('Webhook').value).to eq 'http://localhost'
+ expect(find_field('Username').value).to eq 'test_user'
+ expect(find('[name="service[push_channel]"]').value).to eq '#test_channel'
+ end
+
+ it 'defaults Deployment events to false for chat notification template settings', :js do
+ first(:link, 'Service Templates').click
+ click_link 'Slack notifications'
+
+ expect(find_field('Deployment')).not_to be_checked
+ end
end
+ end
- it 'defaults Deployment events to false for chat notification template settings', :js do
- first(:link, 'Service Templates').click
- click_link 'Slack notifications'
+ context 'When Service templates are disabled' do
+ before do
+ stub_feature_flags(disable_service_templates: true)
+ end
- expect(find_field('Deployment')).not_to be_checked
+ it 'does not show Service Templates link' do
+ expect(page).not_to have_link('Service Templates')
end
end
@@ -424,7 +446,8 @@ RSpec.describe 'Admin updates settings' do
check 'Enable reCAPTCHA for login'
fill_in 'IPs per user', with: 15
check 'Enable Spam Check via external API endpoint'
- fill_in 'URL of the external Spam Check endpoint', with: 'https://www.example.com/spamcheck'
+ fill_in 'URL of the external Spam Check endpoint', with: 'grpc://www.example.com/spamcheck'
+ fill_in 'Spam Check API Key', with: 'SPAM_CHECK_API_KEY'
click_button 'Save changes'
end
@@ -433,7 +456,7 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.login_recaptcha_protection_enabled).to be true
expect(current_settings.unique_ips_limit_per_user).to eq(15)
expect(current_settings.spam_check_endpoint_enabled).to be true
- expect(current_settings.spam_check_endpoint_url).to eq 'https://www.example.com/spamcheck'
+ expect(current_settings.spam_check_endpoint_url).to eq 'grpc://www.example.com/spamcheck'
end
end
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index 4fc60d17886..6d5944002a1 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -10,61 +10,51 @@ RSpec.describe "Admin::Users" do
gitlab_enable_admin_mode_sign_in(current_user)
end
- describe 'Tabs', :js do
+ describe 'Tabs' do
let(:tabs_selector) { '.js-users-tabs' }
let(:active_tab_selector) { '.nav-link.active' }
- it 'does not add the tab param when the Users tab is selected' do
- visit admin_users_path
+ it 'links to the Users tab' do
+ visit cohorts_admin_users_path
within tabs_selector do
click_link 'Users'
+
+ expect(page).to have_selector active_tab_selector, text: 'Users'
end
expect(page).to have_current_path(admin_users_path)
end
- it 'adds the ?tab=cohorts param when the Cohorts tab is selected' do
+ it 'links to the Cohorts tab' do
visit admin_users_path
within tabs_selector do
click_link 'Cohorts'
+
+ expect(page).to have_selector active_tab_selector, text: 'Cohorts'
end
- expect(page).to have_current_path(admin_users_path(tab: 'cohorts'))
+ expect(page).to have_current_path(cohorts_admin_users_path)
+ expect(page).to have_selector active_tab_selector, text: 'Cohorts'
end
- it 'shows the cohorts tab when the tab param is set' do
+ it 'redirects legacy route' do
visit admin_users_path(tab: 'cohorts')
- within tabs_selector do
- expect(page).to have_selector active_tab_selector, text: 'Cohorts'
- end
+ expect(page).to have_current_path(cohorts_admin_users_path)
end
end
describe 'Cohorts tab content' do
- context 'with usage ping enabled' do
- it 'shows users count per month' do
- stub_application_setting(usage_ping_enabled: true)
+ it 'shows users count per month' do
+ stub_application_setting(usage_ping_enabled: false)
- create_list(:user, 2)
+ create_list(:user, 2)
- visit admin_users_path(tab: 'cohorts')
-
- expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0")
- end
- end
-
- context 'with usage ping disabled' do
- it 'shows empty state', :js do
- stub_application_setting(usage_ping_enabled: false)
-
- visit admin_users_path(tab: 'cohorts')
+ visit admin_users_path(tab: 'cohorts')
- expect(page).to have_selector(".js-empty-state")
- expect(page).to have_content("Activate user activity analysis")
- end
+ expect(page).to have_content("#{Time.now.strftime('%b %Y')} 3 0")
end
end
end
diff --git a/spec/features/admin/services/admin_visits_service_templates_spec.rb b/spec/features/admin/services/admin_visits_service_templates_spec.rb
index 1fd8c8316e3..9d011b97f63 100644
--- a/spec/features/admin/services/admin_visits_service_templates_spec.rb
+++ b/spec/features/admin/services/admin_visits_service_templates_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Admin visits service templates' do
let(:admin) { create(:user, :admin) }
- let(:slack_service) { Service.for_template.find { |s| s.type == 'SlackService' } }
+ let(:slack_service) { Integration.for_template.find { |s| s.type == 'SlackService' } }
before do
sign_in(admin)
diff --git a/spec/features/admin/users/user_spec.rb b/spec/features/admin/users/user_spec.rb
index befa7bd338b..01341398135 100644
--- a/spec/features/admin/users/user_spec.rb
+++ b/spec/features/admin/users/user_spec.rb
@@ -4,18 +4,16 @@ require 'spec_helper'
RSpec.describe 'Admin::Users::User' do
let_it_be(:user) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
- let_it_be(:current_user) { create(:admin, last_activity_on: 5.days.ago) }
+ let_it_be(:current_user) { create(:admin) }
before do
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
- stub_feature_flags(vue_admin_users: false)
end
describe 'GET /admin/users/:id' do
it 'has user info', :aggregate_failures do
- visit admin_users_path
- click_link user.name
+ visit admin_user_path(user)
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
@@ -27,21 +25,6 @@ RSpec.describe 'Admin::Users::User' do
expect(page).to have_button('Delete user and contributions')
end
- context 'user pending approval' do
- it 'shows user info', :aggregate_failures do
- user = create(:user, :blocked_pending_approval)
-
- visit admin_users_path
- click_link 'Pending approval'
- click_link user.name
-
- expect(page).to have_content(user.name)
- expect(page).to have_content('Pending approval')
- expect(page).to have_link('Approve user')
- expect(page).to have_link('Reject request')
- end
- end
-
context 'when blocking/unblocking the user' do
it 'shows confirmation and allows blocking and unblocking', :js do
visit admin_user_path(user)
@@ -171,6 +154,8 @@ RSpec.describe 'Admin::Users::User' do
it 'logs in as the user when impersonate is clicked' do
subject
+ find('[data-qa-selector="user_menu"]').click
+
expect(page.find(:css, '[data-testid="user-profile-link"]')['data-user']).to eql(another_user.username)
end
@@ -205,6 +190,8 @@ RSpec.describe 'Admin::Users::User' do
it 'logs out of impersonated user back to original user' do
subject
+ find('[data-qa-selector="user_menu"]').click
+
expect(page.find(:css, '[data-testid="user-profile-link"]')['data-user']).to eq(current_user.username)
end
@@ -238,6 +225,8 @@ RSpec.describe 'Admin::Users::User' do
end
it 'shows when disabled' do
+ user.update!(otp_required_for_login: false)
+
visit admin_user_path(user)
expect_two_factor_status('Disabled')
@@ -251,7 +240,7 @@ RSpec.describe 'Admin::Users::User' do
end
describe 'Email verification status' do
- let!(:secondary_email) do
+ let_it_be(:secondary_email) do
create :email, email: 'secondary@example.com', user: user
end
@@ -274,99 +263,121 @@ RSpec.describe 'Admin::Users::User' do
expect(page).to have_content("#{secondary_email.email} Verified")
end
end
- end
-
- describe 'show user attributes' do
- it 'has expected attributes', :aggregate_failures do
- visit admin_users_path
- click_link user.name
+ describe 'show user identities' do
+ it 'shows user identities', :aggregate_failures do
+ visit admin_user_identities_path(user)
- expect(page).to have_content 'Account'
- expect(page).to have_content 'Personal projects limit'
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('twitter')
+ end
end
- end
- describe 'remove users secondary email', :js do
- let!(:secondary_email) do
- create :email, email: 'secondary@example.com', user: user
+ describe 'update user identities' do
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:providers).and_return([:twitter, :twitter_updated])
+ end
+
+ it 'modifies twitter identity', :aggregate_failures do
+ visit admin_user_identities_path(user)
+
+ find('.table').find(:link, 'Edit').click
+ fill_in 'identity_extern_uid', with: '654321'
+ select 'twitter_updated', from: 'identity_provider'
+ click_button 'Save changes'
+
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('twitter_updated')
+ expect(page).to have_content('654321')
+ end
end
- it do
- visit admin_user_path(user.username)
+ describe 'remove users secondary email', :js do
+ let_it_be(:secondary_email) do
+ create :email, email: 'secondary@example.com', user: user
+ end
+
+ it do
+ visit admin_user_path(user.username)
- expect(page).to have_content("Secondary email: #{secondary_email.email}")
+ expect(page).to have_content("Secondary email: #{secondary_email.email}")
- accept_confirm { find("#remove_email_#{secondary_email.id}").click }
+ accept_confirm { find("#remove_email_#{secondary_email.id}").click }
- expect(page).not_to have_content(secondary_email.email)
+ expect(page).not_to have_content(secondary_email.email)
+ end
end
- end
- describe 'show user keys', :js do
- it do
- key1 = create(:key, user: user, title: 'ssh-rsa Key1', key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4FIEBXGi4bPU8kzxMefudPIJ08/gNprdNTaO9BR/ndy3+58s2HCTw2xCHcsuBmq+TsAqgEidVq4skpqoTMB+Uot5Uzp9z4764rc48dZiI661izoREoKnuRQSsRqUTHg5wrLzwxlQbl1MVfRWQpqiz/5KjBC7yLEb9AbusjnWBk8wvC1bQPQ1uLAauEA7d836tgaIsym9BrLsMVnR4P1boWD3Xp1B1T/ImJwAGHvRmP/ycIqmKdSpMdJXwxcb40efWVj0Ibbe7ii9eeoLdHACqevUZi6fwfbymdow+FeqlkPoHyGg3Cu4vD/D8+8cRc7mE/zGCWcQ15Var83Tczour Key1')
- key2 = create(:key, user: user, title: 'ssh-rsa Key2', key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDQSTWXhJAX/He+nG78MiRRRn7m0Pb0XbcgTxE0etArgoFoh9WtvDf36HG6tOSg/0UUNcp0dICsNAmhBKdncp6cIyPaXJTURPRAGvhI0/VDk4bi27bRnccGbJ/hDaUxZMLhhrzY0r22mjVf8PF6dvv5QUIQVm1/LeaWYsHHvLgiIjwrXirUZPnFrZw6VLREoBKG8uWvfSXw1L5eapmstqfsME8099oi+vWLR8MgEysZQmD28M73fgW4zek6LDQzKQyJx9nB+hJkKUDvcuziZjGmRFlNgSA2mguERwL1OXonD8WYUrBDGKroIvBT39zS5d9tQDnidEJZ9Y8gv5ViYP7x Key2')
+ describe 'remove user with identities' do
+ it 'removes user with twitter identity', :aggregate_failures do
+ visit admin_user_identities_path(user)
- visit admin_users_path
+ click_link 'Delete'
- click_link user.name
- click_link 'SSH keys'
+ expect(page).to have_content(user.name)
+ expect(page).not_to have_content('twitter')
+ end
+ end
- expect(page).to have_content(key1.title)
- expect(page).to have_content(key2.title)
+ describe 'show user keys', :js do
+ it do
+ key1 = create(:key, user: user, title: 'ssh-rsa Key1', key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4FIEBXGi4bPU8kzxMefudPIJ08/gNprdNTaO9BR/ndy3+58s2HCTw2xCHcsuBmq+TsAqgEidVq4skpqoTMB+Uot5Uzp9z4764rc48dZiI661izoREoKnuRQSsRqUTHg5wrLzwxlQbl1MVfRWQpqiz/5KjBC7yLEb9AbusjnWBk8wvC1bQPQ1uLAauEA7d836tgaIsym9BrLsMVnR4P1boWD3Xp1B1T/ImJwAGHvRmP/ycIqmKdSpMdJXwxcb40efWVj0Ibbe7ii9eeoLdHACqevUZi6fwfbymdow+FeqlkPoHyGg3Cu4vD/D8+8cRc7mE/zGCWcQ15Var83Tczour Key1')
+ key2 = create(:key, user: user, title: 'ssh-rsa Key2', key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDQSTWXhJAX/He+nG78MiRRRn7m0Pb0XbcgTxE0etArgoFoh9WtvDf36HG6tOSg/0UUNcp0dICsNAmhBKdncp6cIyPaXJTURPRAGvhI0/VDk4bi27bRnccGbJ/hDaUxZMLhhrzY0r22mjVf8PF6dvv5QUIQVm1/LeaWYsHHvLgiIjwrXirUZPnFrZw6VLREoBKG8uWvfSXw1L5eapmstqfsME8099oi+vWLR8MgEysZQmD28M73fgW4zek6LDQzKQyJx9nB+hJkKUDvcuziZjGmRFlNgSA2mguERwL1OXonD8WYUrBDGKroIvBT39zS5d9tQDnidEJZ9Y8gv5ViYP7x Key2')
- click_link key2.title
+ visit admin_user_path(user)
- expect(page).to have_content(key2.title)
- expect(page).to have_content(key2.key)
+ click_link 'SSH keys'
- click_button 'Delete'
+ expect(page).to have_content(key1.title)
+ expect(page).to have_content(key2.title)
- page.within('.modal') do
- page.click_button('Delete')
- end
+ click_link key2.title
- expect(page).not_to have_content(key2.title)
- end
- end
+ expect(page).to have_content(key2.title)
+ expect(page).to have_content(key2.key)
- describe 'show user identities' do
- it 'shows user identities', :aggregate_failures do
- visit admin_user_identities_path(user)
+ click_button 'Delete'
- expect(page).to have_content(user.name)
- expect(page).to have_content('twitter')
- end
- end
+ page.within('.modal') do
+ page.click_button('Delete')
+ end
- describe 'update user identities' do
- before do
- allow(Gitlab::Auth::OAuth::Provider).to receive(:providers).and_return([:twitter, :twitter_updated])
+ expect(page).not_to have_content(key2.title)
+ end
end
- it 'modifies twitter identity', :aggregate_failures do
- visit admin_user_identities_path(user)
-
- find('.table').find(:link, 'Edit').click
- fill_in 'identity_extern_uid', with: '654321'
- select 'twitter_updated', from: 'identity_provider'
- click_button 'Save changes'
+ describe 'show user attributes' do
+ it 'has expected attributes', :aggregate_failures do
+ visit admin_user_path(user)
- expect(page).to have_content(user.name)
- expect(page).to have_content('twitter_updated')
- expect(page).to have_content('654321')
+ expect(page).to have_content 'Account'
+ expect(page).to have_content 'Personal projects limit'
+ end
end
end
- describe 'remove user with identities' do
- it 'removes user with twitter identity', :aggregate_failures do
- visit admin_user_identities_path(user)
+ [true, false].each do |vue_admin_users|
+ context "with vue_admin_users feature flag set to #{vue_admin_users}", js: vue_admin_users do
+ before do
+ stub_feature_flags(vue_admin_users: vue_admin_users)
+ end
- click_link 'Delete'
+ describe 'GET /admin/users' do
+ context 'user pending approval' do
+ it 'shows user info', :aggregate_failures do
+ user = create(:user, :blocked_pending_approval)
- expect(page).to have_content(user.name)
- expect(page).not_to have_content('twitter')
+ visit admin_users_path
+ click_link 'Pending approval'
+ click_link user.name
+
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('Pending approval')
+ expect(page).to have_link('Approve user')
+ expect(page).to have_link('Reject request')
+ end
+ end
+ end
end
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 9482b4f8603..d3931373ee3 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -3,298 +3,306 @@
require 'spec_helper'
RSpec.describe 'Admin::Users' do
- include Spec::Support::Helpers::Features::ResponsiveTableHelpers
-
let_it_be(:user, reload: true) { create(:omniauth_user, provider: 'twitter', extern_uid: '123456') }
- let_it_be(:current_user) { create(:admin, last_activity_on: 5.days.ago) }
+ let_it_be(:current_user) { create(:admin) }
before do
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
end
- describe 'GET /admin/users' do
- before do
- stub_feature_flags(vue_admin_users: false)
- visit admin_users_path
- end
+ [true, false].each do |vue_admin_users|
+ context "with vue_admin_users feature flag set to #{vue_admin_users}", js: vue_admin_users do
+ before do
+ stub_feature_flags(vue_admin_users: vue_admin_users)
+ end
- it "is ok" do
- expect(current_path).to eq(admin_users_path)
- end
+ describe 'GET /admin/users' do
+ before do
+ visit admin_users_path
+ end
- it "has users list" do
- expect(page).to have_content(current_user.email)
- expect(page).to have_content(current_user.name)
- expect(page).to have_content(current_user.created_at.strftime('%e %b, %Y'))
- expect(page).to have_content(current_user.last_activity_on.strftime('%e %b, %Y'))
- expect(page).to have_content(user.email)
- expect(page).to have_content(user.name)
- expect(page).to have_content('Projects')
- expect(page).to have_button('Block')
- expect(page).to have_button('Deactivate')
- expect(page).to have_button('Delete user')
- expect(page).to have_button('Delete user and contributions')
- end
+ it "is ok" do
+ expect(current_path).to eq(admin_users_path)
+ end
- describe 'view extra user information' do
- it 'shows the user popover on hover', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/11290' do
- expect(page).not_to have_selector('#__BV_popover_1__')
+ it "has users list" do
+ current_user.reload
- first_user_link = page.first('.js-user-link')
- first_user_link.hover
+ expect(page).to have_content(current_user.email)
+ expect(page).to have_content(current_user.name)
+ expect(page).to have_content(current_user.created_at.strftime('%e %b, %Y'))
+ expect(page).to have_content(user.email)
+ expect(page).to have_content(user.name)
+ expect(page).to have_content('Projects')
- expect(page).to have_selector('#__BV_popover_1__')
- end
- end
+ click_user_dropdown_toggle(user.id)
- context 'user project count' do
- before do
- project = create(:project)
- project.add_maintainer(current_user)
- end
+ expect(page).to have_button('Block')
+ expect(page).to have_button('Deactivate')
+ expect(page).to have_button('Delete user')
+ expect(page).to have_button('Delete user and contributions')
+ end
- it 'displays count of users projects' do
- visit admin_users_path
+ it 'clicking edit user takes us to edit page', :aggregate_failures do
+ page.within("[data-testid='user-actions-#{user.id}']") do
+ click_link 'Edit'
+ end
- expect(page.find("[data-testid='user-project-count-#{current_user.id}']").text).to eq("1")
- end
- end
+ expect(page).to have_content('Name')
+ expect(page).to have_content('Password')
+ end
- describe 'tabs' do
- it 'has multiple tabs to filter users' do
- expect(page).to have_link('Active', href: admin_users_path)
- expect(page).to have_link('Admins', href: admin_users_path(filter: 'admins'))
- expect(page).to have_link('2FA Enabled', href: admin_users_path(filter: 'two_factor_enabled'))
- expect(page).to have_link('2FA Disabled', href: admin_users_path(filter: 'two_factor_disabled'))
- expect(page).to have_link('External', href: admin_users_path(filter: 'external'))
- expect(page).to have_link('Blocked', href: admin_users_path(filter: 'blocked'))
- expect(page).to have_link('Deactivated', href: admin_users_path(filter: 'deactivated'))
- expect(page).to have_link('Without projects', href: admin_users_path(filter: 'wop'))
- end
+ describe 'view extra user information' do
+ it 'shows the user popover on hover', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/11290' do
+ expect(page).not_to have_selector('#__BV_popover_1__')
- context '`Pending approval` tab' do
- before do
- visit admin_users_path
- end
+ first_user_link = page.first('.js-user-link')
+ first_user_link.hover
- it 'shows the `Pending approval` tab' do
- expect(page).to have_link('Pending approval', href: admin_users_path(filter: 'blocked_pending_approval'))
+ expect(page).to have_selector('#__BV_popover_1__')
+ end
end
- end
- end
- describe 'search and sort' do
- before_all do
- create(:user, name: 'Foo Bar', last_activity_on: 3.days.ago)
- create(:user, name: 'Foo Baz', last_activity_on: 2.days.ago)
- create(:user, name: 'Dmitriy')
- end
+ context 'user project count' do
+ before do
+ project = create(:project)
+ project.add_maintainer(current_user)
+ end
- it 'searches users by name' do
- visit admin_users_path(search_query: 'Foo')
+ it 'displays count of users projects' do
+ visit admin_users_path
- expect(page).to have_content('Foo Bar')
- expect(page).to have_content('Foo Baz')
- expect(page).not_to have_content('Dmitriy')
- end
+ expect(page.find("[data-testid='user-project-count-#{current_user.id}']").text).to eq("1")
+ end
+ end
- it 'sorts users by name' do
- visit admin_users_path
+ describe 'tabs' do
+ it 'has multiple tabs to filter users' do
+ expect(page).to have_link('Active', href: admin_users_path)
+ expect(page).to have_link('Admins', href: admin_users_path(filter: 'admins'))
+ expect(page).to have_link('2FA Enabled', href: admin_users_path(filter: 'two_factor_enabled'))
+ expect(page).to have_link('2FA Disabled', href: admin_users_path(filter: 'two_factor_disabled'))
+ expect(page).to have_link('External', href: admin_users_path(filter: 'external'))
+ expect(page).to have_link('Blocked', href: admin_users_path(filter: 'blocked'))
+ expect(page).to have_link('Banned', href: admin_users_path(filter: 'banned'))
+ expect(page).to have_link('Deactivated', href: admin_users_path(filter: 'deactivated'))
+ expect(page).to have_link('Without projects', href: admin_users_path(filter: 'wop'))
+ end
+
+ context '`Pending approval` tab' do
+ before do
+ visit admin_users_path
+ end
+
+ it 'shows the `Pending approval` tab' do
+ expect(page).to have_link('Pending approval', href: admin_users_path(filter: 'blocked_pending_approval'))
+ end
+ end
+ end
- sort_by('Name')
+ describe 'search and sort' do
+ before_all do
+ create(:user, name: 'Foo Bar', last_activity_on: 3.days.ago)
+ create(:user, name: 'Foo Baz', last_activity_on: 2.days.ago)
+ create(:user, name: 'Dmitriy')
+ end
- expect(first_row.text).to include('Dmitriy')
- expect(second_row.text).to include('Foo Bar')
- end
+ it 'searches users by name' do
+ visit admin_users_path(search_query: 'Foo')
- it 'sorts search results only' do
- visit admin_users_path(search_query: 'Foo')
+ expect(page).to have_content('Foo Bar')
+ expect(page).to have_content('Foo Baz')
+ expect(page).not_to have_content('Dmitriy')
+ end
- sort_by('Name')
+ it 'sorts users by name' do
+ visit admin_users_path
- expect(page).not_to have_content('Dmitriy')
- expect(first_row.text).to include('Foo Bar')
- expect(second_row.text).to include('Foo Baz')
- end
+ sort_by('Name')
- it 'searches with respect of sorting' do
- visit admin_users_path(sort: 'Name')
+ expect(first_row.text).to include('Dmitriy')
+ expect(second_row.text).to include('Foo Bar')
+ end
- fill_in :search_query, with: 'Foo'
- click_button('Search users')
+ it 'sorts search results only' do
+ visit admin_users_path(search_query: 'Foo')
- expect(first_row.text).to include('Foo Bar')
- expect(second_row.text).to include('Foo Baz')
- end
+ sort_by('Name')
+ expect(page).not_to have_content('Dmitriy')
+ expect(first_row.text).to include('Foo Bar')
+ expect(second_row.text).to include('Foo Baz')
+ end
- it 'sorts users by recent last activity' do
- visit admin_users_path(search_query: 'Foo')
+ it 'searches with respect of sorting' do
+ visit admin_users_path(sort: 'Name')
- sort_by('Recent last activity')
+ fill_in :search_query, with: 'Foo'
+ click_button('Search users')
- expect(first_row.text).to include('Foo Baz')
- expect(second_row.text).to include('Foo Bar')
- end
+ expect(first_row.text).to include('Foo Bar')
+ expect(second_row.text).to include('Foo Baz')
+ end
- it 'sorts users by oldest last activity' do
- visit admin_users_path(search_query: 'Foo')
+ it 'sorts users by recent last activity' do
+ visit admin_users_path(search_query: 'Foo')
- sort_by('Oldest last activity')
+ sort_by('Recent last activity')
- expect(first_row.text).to include('Foo Bar')
- expect(second_row.text).to include('Foo Baz')
- end
- end
+ expect(first_row.text).to include('Foo Baz')
+ expect(second_row.text).to include('Foo Bar')
+ end
- describe 'Two-factor Authentication filters' do
- it 'counts users who have enabled 2FA' do
- create(:user, :two_factor)
+ it 'sorts users by oldest last activity' do
+ visit admin_users_path(search_query: 'Foo')
- visit admin_users_path
+ sort_by('Oldest last activity')
- page.within('.filter-two-factor-enabled small') do
- expect(page).to have_content('1')
+ expect(first_row.text).to include('Foo Bar')
+ expect(second_row.text).to include('Foo Baz')
+ end
end
- end
- it 'filters by users who have enabled 2FA' do
- user = create(:user, :two_factor)
+ describe 'Two-factor Authentication filters' do
+ it 'counts users who have enabled 2FA' do
+ create(:user, :two_factor)
- visit admin_users_path
- click_link '2FA Enabled'
+ visit admin_users_path
- expect(page).to have_content(user.email)
- end
+ page.within('.filter-two-factor-enabled small') do
+ expect(page).to have_content('1')
+ end
+ end
- it 'counts users who have not enabled 2FA' do
- visit admin_users_path
+ it 'filters by users who have enabled 2FA' do
+ user = create(:user, :two_factor)
- page.within('.filter-two-factor-disabled small') do
- expect(page).to have_content('2') # Including admin
- end
- end
+ visit admin_users_path
+ click_link '2FA Enabled'
- it 'filters by users who have not enabled 2FA' do
- visit admin_users_path
- click_link '2FA Disabled'
+ expect(page).to have_content(user.email)
+ end
- expect(page).to have_content(user.email)
- end
- end
+ it 'counts users who have not enabled 2FA' do
+ visit admin_users_path
- describe 'Pending approval filter' do
- it 'counts users who are pending approval' do
- create_list(:user, 2, :blocked_pending_approval)
+ page.within('.filter-two-factor-disabled small') do
+ expect(page).to have_content('2') # Including admin
+ end
+ end
- visit admin_users_path
+ it 'filters by users who have not enabled 2FA' do
+ visit admin_users_path
+ click_link '2FA Disabled'
- page.within('.filter-blocked-pending-approval small') do
- expect(page).to have_content('2')
+ expect(page).to have_content(user.email)
+ end
end
- end
- it 'filters by users who are pending approval' do
- user = create(:user, :blocked_pending_approval)
+ describe 'Pending approval filter' do
+ it 'counts users who are pending approval' do
+ create_list(:user, 2, :blocked_pending_approval)
- visit admin_users_path
- click_link 'Pending approval'
+ visit admin_users_path
- expect(page).to have_content(user.email)
- end
- end
+ page.within('.filter-blocked-pending-approval small') do
+ expect(page).to have_content('2')
+ end
+ end
- context 'when blocking/unblocking a user' do
- it 'shows confirmation and allows blocking and unblocking', :js do
- expect(page).to have_content(user.email)
+ it 'filters by users who are pending approval' do
+ user = create(:user, :blocked_pending_approval)
- click_action_in_user_dropdown(user.id, 'Block')
+ visit admin_users_path
+ click_link 'Pending approval'
- wait_for_requests
+ expect(page).to have_content(user.email)
+ end
+ end
- expect(page).to have_content('Block user')
- expect(page).to have_content('Blocking user has the following effects')
- expect(page).to have_content('User will not be able to login')
- expect(page).to have_content('Owned groups will be left')
+ context 'when blocking/unblocking a user' do
+ it 'shows confirmation and allows blocking and unblocking', :js do
+ expect(page).to have_content(user.email)
- find('.modal-footer button', text: 'Block').click
+ click_action_in_user_dropdown(user.id, 'Block')
- wait_for_requests
+ wait_for_requests
- expect(page).to have_content('Successfully blocked')
- expect(page).not_to have_content(user.email)
+ expect(page).to have_content('Block user')
+ expect(page).to have_content('Blocking user has the following effects')
+ expect(page).to have_content('User will not be able to login')
+ expect(page).to have_content('Owned groups will be left')
- click_link 'Blocked'
+ find('.modal-footer button', text: 'Block').click
- wait_for_requests
+ wait_for_requests
- expect(page).to have_content(user.email)
+ expect(page).to have_content('Successfully blocked')
+ expect(page).not_to have_content(user.email)
- click_action_in_user_dropdown(user.id, 'Unblock')
+ click_link 'Blocked'
- expect(page).to have_content('Unblock user')
- expect(page).to have_content('You can always block their account again if needed.')
+ wait_for_requests
- find('.modal-footer button', text: 'Unblock').click
+ expect(page).to have_content(user.email)
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Unblock')
- expect(page).to have_content('Successfully unblocked')
- expect(page).not_to have_content(user.email)
- end
- end
+ expect(page).to have_content('Unblock user')
+ expect(page).to have_content('You can always block their account again if needed.')
- context 'when deactivating/re-activating a user' do
- it 'shows confirmation and allows deactivating and re-activating', :js do
- expect(page).to have_content(user.email)
+ find('.modal-footer button', text: 'Unblock').click
- click_action_in_user_dropdown(user.id, 'Deactivate')
+ wait_for_requests
- expect(page).to have_content('Deactivate user')
- expect(page).to have_content('Deactivating a user has the following effects')
- expect(page).to have_content('The user will be logged out')
- expect(page).to have_content('Personal projects, group and user history will be left intact')
+ expect(page).to have_content('Successfully unblocked')
+ expect(page).not_to have_content(user.email)
+ end
+ end
- find('.modal-footer button', text: 'Deactivate').click
+ context 'when deactivating/re-activating a user' do
+ it 'shows confirmation and allows deactivating and re-activating', :js do
+ expect(page).to have_content(user.email)
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Deactivate')
- expect(page).to have_content('Successfully deactivated')
- expect(page).not_to have_content(user.email)
+ expect(page).to have_content('Deactivate user')
+ expect(page).to have_content('Deactivating a user has the following effects')
+ expect(page).to have_content('The user will be logged out')
+ expect(page).to have_content('Personal projects, group and user history will be left intact')
- click_link 'Deactivated'
+ find('.modal-footer button', text: 'Deactivate').click
- wait_for_requests
+ wait_for_requests
- expect(page).to have_content(user.email)
+ expect(page).to have_content('Successfully deactivated')
+ expect(page).not_to have_content(user.email)
- click_action_in_user_dropdown(user.id, 'Activate')
+ click_link 'Deactivated'
- expect(page).to have_content('Activate user')
- expect(page).to have_content('You can always deactivate their account again if needed.')
+ wait_for_requests
- find('.modal-footer button', text: 'Activate').click
+ expect(page).to have_content(user.email)
- wait_for_requests
+ click_action_in_user_dropdown(user.id, 'Activate')
- expect(page).to have_content('Successfully activated')
- expect(page).not_to have_content(user.email)
- end
- end
+ expect(page).to have_content('Activate user')
+ expect(page).to have_content('You can always deactivate their account again if needed.')
- def click_action_in_user_dropdown(user_id, action)
- find("[data-testid='user-action-button-#{user_id}']").click
+ find('.modal-footer button', text: 'Activate').click
- within find("[data-testid='user-action-dropdown-#{user_id}']") do
- find('li button', text: action).click
- end
+ wait_for_requests
- wait_for_requests
+ expect(page).to have_content('Successfully activated')
+ expect(page).not_to have_content(user.email)
+ end
+ end
+ end
end
end
describe 'GET /admin/users/new' do
- let(:user_username) { 'bang' }
+ let_it_be(:user_username) { 'bang' }
before do
visit new_admin_user_path
@@ -344,7 +352,7 @@ RSpec.describe 'Admin::Users' do
end
context 'username contains spaces' do
- let(:user_username) { 'Bing bang' }
+ let_it_be(:user_username) { 'Bing bang' }
it "doesn't create the user and shows an error message" do
expect { click_button 'Create user' }.to change {User.count}.by(0)
@@ -363,22 +371,6 @@ RSpec.describe 'Admin::Users' do
visit new_admin_user_path
end
- def expects_external_to_be_checked
- expect(find('#user_external')).to be_checked
- end
-
- def expects_external_to_be_unchecked
- expect(find('#user_external')).not_to be_checked
- end
-
- def expects_warning_to_be_hidden
- expect(find('#warning_external_automatically_set', visible: :all)[:class]).to include 'hidden'
- end
-
- def expects_warning_to_be_shown
- expect(find('#warning_external_automatically_set')[:class]).not_to include 'hidden'
- end
-
it 'automatically unchecks external for matching email' do
expects_external_to_be_checked
expects_warning_to_be_hidden
@@ -413,55 +405,22 @@ RSpec.describe 'Admin::Users' do
expect(new_user.external).to be_falsy
end
- end
- end
- end
-
- describe 'GET /admin/users/:id/edit' do
- before do
- stub_feature_flags(vue_admin_users: false)
- visit admin_users_path
- click_link "edit_user_#{user.id}"
- end
-
- it 'has user edit page' do
- expect(page).to have_content('Name')
- expect(page).to have_content('Password')
- end
-
- describe 'Update user' do
- before do
- fill_in 'user_name', with: 'Big Bang'
- fill_in 'user_email', with: 'bigbang@mail.com'
- fill_in 'user_password', with: 'AValidPassword1'
- fill_in 'user_password_confirmation', with: 'AValidPassword1'
- choose 'user_access_level_admin'
- click_button 'Save changes'
- end
-
- it 'shows page with new data' do
- expect(page).to have_content('bigbang@mail.com')
- expect(page).to have_content('Big Bang')
- end
- it 'changes user entry' do
- user.reload
- expect(user.name).to eq('Big Bang')
- expect(user.admin?).to be_truthy
- expect(user.password_expires_at).to be <= Time.now
- end
- end
+ def expects_external_to_be_checked
+ expect(find('#user_external')).to be_checked
+ end
- describe 'update username to non ascii char' do
- it do
- fill_in 'user_username', with: '\u3042\u3044'
- click_button('Save')
+ def expects_external_to_be_unchecked
+ expect(find('#user_external')).not_to be_checked
+ end
- page.within '#error_explanation' do
- expect(page).to have_content('Username')
+ def expects_warning_to_be_hidden
+ expect(find('#warning_external_automatically_set', visible: :all)[:class]).to include 'hidden'
end
- expect(page).to have_selector(%(form[action="/admin/users/#{user.username}"]))
+ def expects_warning_to_be_shown
+ expect(find('#warning_external_automatically_set')[:class]).not_to include 'hidden'
+ end
end
end
end
@@ -541,15 +500,108 @@ RSpec.describe 'Admin::Users' do
check_breadcrumb('Edit Identity')
end
+
+ def check_breadcrumb(content)
+ expect(find('.breadcrumbs-sub-title')).to have_content(content)
+ end
end
- def check_breadcrumb(content)
- expect(find('.breadcrumbs-sub-title')).to have_content(content)
+ describe 'GET /admin/users/:id/edit' do
+ before do
+ visit edit_admin_user_path(user)
+ end
+
+ describe 'Update user' do
+ before do
+ fill_in 'user_name', with: 'Big Bang'
+ fill_in 'user_email', with: 'bigbang@mail.com'
+ fill_in 'user_password', with: 'AValidPassword1'
+ fill_in 'user_password_confirmation', with: 'AValidPassword1'
+ choose 'user_access_level_admin'
+ click_button 'Save changes'
+ end
+
+ it 'shows page with new data' do
+ expect(page).to have_content('bigbang@mail.com')
+ expect(page).to have_content('Big Bang')
+ end
+
+ it 'changes user entry' do
+ user.reload
+ expect(user.name).to eq('Big Bang')
+ expect(user.admin?).to be_truthy
+ expect(user.password_expires_at).to be <= Time.now
+ end
+ end
+
+ describe 'update username to non ascii char' do
+ it do
+ fill_in 'user_username', with: '\u3042\u3044'
+ click_button('Save')
+
+ page.within '#error_explanation' do
+ expect(page).to have_content('Username')
+ end
+
+ expect(page).to have_selector(%(form[action="/admin/users/#{user.username}"]))
+ end
+ end
+ end
+
+ # TODO: Move to main GET /admin/users block once feature flag is removed. Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/290737
+ context 'with vue_admin_users feature flag enabled', :js do
+ before do
+ stub_feature_flags(vue_admin_users: true)
+ end
+
+ describe 'GET /admin/users' do
+ context 'user group count', :js do
+ before do
+ group = create(:group)
+ group.add_developer(current_user)
+ project = create(:project, group: create(:group))
+ project.add_reporter(current_user)
+ end
+
+ it 'displays count of the users authorized groups' do
+ visit admin_users_path
+
+ wait_for_requests
+
+ expect(page.find("[data-testid='user-group-count-#{current_user.id}']").text).to eq("2")
+ end
+ end
+ end
end
- def sort_by(text)
- page.within('.user-sort-dropdown') do
- click_link text
+ def click_user_dropdown_toggle(user_id)
+ page.within("[data-testid='user-actions-#{user_id}']") do
+ find("[data-testid='dropdown-toggle']").click
end
end
+
+ def first_row
+ page.all('[role="row"]')[1]
+ end
+
+ def second_row
+ page.all('[role="row"]')[2]
+ end
+
+ def sort_by(option)
+ page.within('.filtered-search-block') do
+ find('.dropdown-menu-toggle').click
+ click_link option
+ end
+ end
+
+ def click_action_in_user_dropdown(user_id, action)
+ click_user_dropdown_toggle(user_id)
+
+ within find("[data-testid='user-actions-#{user_id}']") do
+ find('li button', text: action).click
+ end
+
+ wait_for_requests
+ end
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index ab544022bff..5d9bb8d8087 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -119,46 +119,21 @@ RSpec.describe 'Project issue boards', :js do
end
context 'search list negation queries' do
- context 'with the NOT queries feature flag disabled' do
- before do
- stub_feature_flags(not_issuable_queries: false)
-
- visit_project_board_path_without_query_limit(project, board)
- end
-
- it 'does not have the != option' do
- find('.filtered-search').set('label:')
-
- wait_for_requests
- within('#js-dropdown-operator') do
- tokens = all(:css, 'li.filter-dropdown-item')
- expect(tokens.count).to eq(1)
- button = tokens[0].find('button')
- expect(button).to have_content('=')
- expect(button).not_to have_content('!=')
- end
- end
+ before do
+ visit_project_board_path_without_query_limit(project, board)
end
- context 'with the NOT queries feature flag enabled' do
- before do
- stub_feature_flags(not_issuable_queries: true)
-
- visit_project_board_path_without_query_limit(project, board)
- end
-
- it 'does not have the != option' do
- find('.filtered-search').set('label:')
+ it 'does not have the != option' do
+ find('.filtered-search').set('label:')
- wait_for_requests
- within('#js-dropdown-operator') do
- tokens = all(:css, 'li.filter-dropdown-item')
- expect(tokens.count).to eq(2)
- button = tokens[0].find('button')
- expect(button).to have_content('=')
- button = tokens[1].find('button')
- expect(button).to have_content('!=')
- end
+ wait_for_requests
+ within('#js-dropdown-operator') do
+ tokens = all(:css, 'li.filter-dropdown-item')
+ expect(tokens.count).to eq(2)
+ button = tokens[0].find('button')
+ expect(button).to have_content('=')
+ button = tokens[1].find('button')
+ expect(button).to have_content('!=')
end
end
end
diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb
index 20ae569322c..129d03d17f3 100644
--- a/spec/features/boards/new_issue_spec.rb
+++ b/spec/features/boards/new_issue_spec.rb
@@ -10,6 +10,9 @@ RSpec.describe 'Issue Boards new issue', :js do
let_it_be(:list) { create(:list, board: board, label: label, position: 0) }
let_it_be(:user) { create(:user) }
+ let(:board_list_header) { first('[data-testid="board-list-header"]') }
+ let(:project_select_dropdown) { find('[data-testid="project-select-dropdown"]') }
+
context 'authorized user' do
before do
project.add_maintainer(user)
@@ -24,18 +27,18 @@ RSpec.describe 'Issue Boards new issue', :js do
end
it 'displays new issue button' do
- expect(first('.board')).to have_selector('.issue-count-badge-add-button', count: 1)
+ expect(first('.board')).to have_button('New issue', count: 1)
end
it 'does not display new issue button in closed list' do
page.within('.board:nth-child(3)') do
- expect(page).not_to have_selector('.issue-count-badge-add-button')
+ expect(page).not_to have_button('New issue')
end
end
it 'shows form when clicking button' do
page.within(first('.board')) do
- find('.issue-count-badge-add-button').click
+ click_button 'New issue'
expect(page).to have_selector('.board-new-issue-form')
end
@@ -43,7 +46,7 @@ RSpec.describe 'Issue Boards new issue', :js do
it 'hides form when clicking cancel' do
page.within(first('.board')) do
- find('.issue-count-badge-add-button').click
+ click_button 'New issue'
expect(page).to have_selector('.board-new-issue-form')
@@ -55,7 +58,7 @@ RSpec.describe 'Issue Boards new issue', :js do
it 'creates new issue' do
page.within(first('.board')) do
- find('.issue-count-badge-add-button').click
+ click_button 'New issue'
end
page.within(first('.board-new-issue-form')) do
@@ -80,7 +83,7 @@ RSpec.describe 'Issue Boards new issue', :js do
# TODO https://gitlab.com/gitlab-org/gitlab/-/issues/323446
xit 'shows sidebar when creating new issue' do
page.within(first('.board')) do
- find('.issue-count-badge-add-button').click
+ click_button 'New issue'
end
page.within(first('.board-new-issue-form')) do
@@ -95,7 +98,7 @@ RSpec.describe 'Issue Boards new issue', :js do
it 'successfuly loads labels to be added to newly created issue' do
page.within(first('.board')) do
- find('.issue-count-badge-add-button').click
+ click_button 'New issue'
end
page.within(first('.board-new-issue-form')) do
@@ -109,12 +112,12 @@ RSpec.describe 'Issue Boards new issue', :js do
find('.board-card').click
end
- page.within(first('[data-testid="issue-boards-sidebar"]')) do
- find('.labels [data-testid="edit-button"]').click
+ page.within('[data-testid="sidebar-labels"]') do
+ click_button 'Edit'
wait_for_requests
- expect(page).to have_selector('.labels-select-contents-list .dropdown-content li a')
+ expect(page).to have_content 'Label 1'
end
end
end
@@ -126,70 +129,94 @@ RSpec.describe 'Issue Boards new issue', :js do
end
it 'displays new issue button in open list' do
- expect(first('.board')).to have_selector('.issue-count-badge-add-button', count: 1)
+ expect(first('.board')).to have_button('New issue', count: 1)
end
it 'does not display new issue button in label list' do
page.within('.board:nth-child(2)') do
- expect(page).not_to have_selector('.issue-count-badge-add-button')
+ expect(page).not_to have_button('New issue')
end
end
end
context 'group boards' do
let_it_be(:group) { create(:group, :public) }
- let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:project) { create(:project, namespace: group, name: "root project") }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subproject1) { create(:project, group: subgroup, name: "sub project1") }
+ let_it_be(:subproject2) { create(:project, group: subgroup, name: "sub project2") }
let_it_be(:group_board) { create(:board, group: group) }
let_it_be(:project_label) { create(:label, project: project, name: 'label') }
let_it_be(:list) { create(:list, board: group_board, label: project_label, position: 0) }
context 'for unauthorized users' do
- context 'when backlog does not exist' do
- before do
- sign_in(user)
- visit group_board_path(group, group_board)
- wait_for_requests
- end
+ before do
+ visit group_board_path(group, group_board)
+ wait_for_requests
+ end
+ context 'when backlog does not exist' do
it 'does not display new issue button in label list' do
page.within('.board.is-draggable') do
- expect(page).not_to have_selector('.issue-count-badge-add-button')
+ expect(page).not_to have_button('New issue')
end
end
end
context 'when backlog list already exists' do
- let!(:backlog_list) { create(:backlog_list, board: group_board) }
-
- before do
- sign_in(user)
- visit group_board_path(group, group_board)
- wait_for_requests
- end
+ let_it_be(:backlog_list) { create(:backlog_list, board: group_board) }
it 'displays new issue button in open list' do
- expect(first('.board')).to have_selector('.issue-count-badge-add-button', count: 1)
+ expect(first('.board')).to have_button('New issue', count: 1)
end
it 'does not display new issue button in label list' do
page.within('.board.is-draggable') do
- expect(page).not_to have_selector('.issue-count-badge-add-button')
+ expect(page).not_to have_button('New issue')
end
end
end
end
context 'for authorized users' do
- it 'display new issue button in label list' do
- project = create(:project, namespace: group)
+ before do
project.add_reporter(user)
+ subproject1.add_reporter(user)
sign_in(user)
visit group_board_path(group, group_board)
wait_for_requests
+ end
+
+ context 'when backlog does not exist' do
+ it 'display new issue button in label list' do
+ expect(board_list_header).to have_button('New issue')
+ end
+ end
+
+ context 'project select dropdown' do
+ let_it_be(:backlog_list) { create(:backlog_list, board: group_board) }
+
+ before do
+ page.within(board_list_header) do
+ click_button 'New issue'
+ end
+
+ project_select_dropdown.click
+
+ wait_for_requests
+ end
+
+ it 'lists a project which is a direct descendant of the top-level group' do
+ expect(project_select_dropdown).to have_button("root project")
+ end
+
+ it 'lists a project that belongs to a subgroup' do
+ expect(project_select_dropdown).to have_button("sub project1")
+ end
- page.within('.board.is-draggable') do
- expect(page).to have_selector('.issue-count-badge-add-button')
+ it "does not list projects to which user doesn't have access" do
+ expect(project_select_dropdown).not_to have_button("sub project2")
end
end
end
diff --git a/spec/features/boards/sidebar_assignee_spec.rb b/spec/features/boards/sidebar_assignee_spec.rb
index e938612163f..d6adefea6e3 100644
--- a/spec/features/boards/sidebar_assignee_spec.rb
+++ b/spec/features/boards/sidebar_assignee_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
let(:card) { find('.board:nth-child(2)').first('.board-card') }
before do
+ stub_licensed_features(multiple_issue_assignees: false)
+
project.add_maintainer(user)
sign_in(user)
@@ -27,10 +29,12 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
end
context 'assignee' do
+ let(:assignees_widget) { '[data-testid="issue-boards-sidebar"] [data-testid="assignees-widget"]' }
+
it 'updates the issues assignee' do
click_card(card)
- page.within('.assignee') do
+ page.within(assignees_widget) do
click_button('Edit')
wait_for_requests
@@ -41,12 +45,11 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
first('.gl-avatar-labeled').click
end
- click_button('Apply')
- wait_for_requests
-
expect(page).to have_content(assignee)
end
+ wait_for_requests
+
expect(card).to have_selector('.avatar')
end
@@ -54,7 +57,7 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
card_two = find('.board:nth-child(2)').find('.board-card:nth-child(2)')
click_card(card_two)
- page.within('.assignee') do
+ page.within(assignees_widget) do
click_button('Edit')
wait_for_requests
@@ -63,9 +66,6 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
find('[data-testid="unassign"]').click
end
- click_button('Apply')
- wait_for_requests
-
expect(page).to have_content('None')
end
@@ -75,7 +75,7 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
it 'assignees to current user' do
click_card(card)
- page.within(find('.assignee')) do
+ page.within(assignees_widget) do
expect(page).to have_content('None')
click_button 'assign yourself'
@@ -91,7 +91,7 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
it 'updates assignee dropdown' do
click_card(card)
- page.within('.assignee') do
+ page.within(assignees_widget) do
click_button('Edit')
wait_for_requests
@@ -102,9 +102,6 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
first('.gl-avatar-labeled').click
end
- click_button('Apply')
- wait_for_requests
-
expect(page).to have_content(assignee)
end
@@ -112,7 +109,7 @@ RSpec.describe 'Project issue boards sidebar assignee', :js do
find('.board-card:nth-child(2)').click
end
- page.within('.assignee') do
+ page.within(assignees_widget) do
click_button('Edit')
expect(find('.dropdown-menu')).to have_selector('.gl-new-dropdown-item-check-icon')
diff --git a/spec/features/boards/sidebar_labels_in_namespaces_spec.rb b/spec/features/boards/sidebar_labels_in_namespaces_spec.rb
new file mode 100644
index 00000000000..8395a0b33c0
--- /dev/null
+++ b/spec/features/boards/sidebar_labels_in_namespaces_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Issue boards sidebar labels select', :js do
+ include BoardHelpers
+
+ include_context 'labels from nested groups and projects'
+
+ let(:card) { find('.board:nth-child(1)').first('[data-testid="board_card"]') }
+
+ context 'group boards' do
+ context 'in the top-level group board' do
+ let_it_be(:group_board) { create(:board, group: group) }
+ let_it_be(:board_list) { create(:backlog_list, board: group_board) }
+
+ before do
+ load_board group_board_path(group, group_board)
+ end
+
+ context 'selecting an issue from a direct descendant project' do
+ let_it_be(:project_issue) { create(:issue, project: project) }
+
+ include_examples 'an issue from a direct descendant project is selected'
+ end
+
+ context "selecting an issue from a subgroup's project" do
+ let_it_be(:subproject_issue) { create(:issue, project: subproject) }
+
+ include_examples "an issue from a subgroup's project is selected"
+ end
+ end
+ end
+end
diff --git a/spec/features/boards/sub_group_project_spec.rb b/spec/features/boards/sub_group_project_spec.rb
deleted file mode 100644
index bde5f061a67..00000000000
--- a/spec/features/boards/sub_group_project_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Sub-group project issue boards', :js do
- let(:group) { create(:group) }
- let(:nested_group_1) { create(:group, parent: group) }
- let(:project) { create(:project, group: nested_group_1) }
- let(:board) { create(:board, project: project) }
- let(:label) { create(:label, project: project) }
- let(:user) { create(:user) }
- let!(:list1) { create(:list, board: board, label: label, position: 0) }
- let!(:issue) { create(:labeled_issue, project: project, labels: [label]) }
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
-
- visit project_board_path(project, board)
- wait_for_requests
- end
-
- # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/324290
- xit 'creates new label from sidebar' do
- find('.board-card').click
-
- page.within '.labels' do
- click_link 'Edit'
- click_link 'Create project label'
- end
-
- page.within '.dropdown-new-label' do
- fill_in 'new_label_name', with: 'test label'
- first('.suggest-colors-dropdown a').click
-
- click_button 'Create'
-
- wait_for_requests
- end
-
- page.within '.labels' do
- expect(page).to have_link 'test label'
- end
- end
-end
diff --git a/spec/features/boards/user_visits_board_spec.rb b/spec/features/boards/user_visits_board_spec.rb
new file mode 100644
index 00000000000..7fe32557d6a
--- /dev/null
+++ b/spec/features/boards/user_visits_board_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User visits issue boards', :js do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { create_default(:group, :public) }
+ let_it_be(:project) { create_default(:project, :public, group: group) }
+
+ # TODO use 'let' when rspec-parameterized supports it.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/329746
+ label_name1 = 'foobar'
+ label_name2 = 'in dev'
+ assignee_username = 'root'
+ issue_with_label1 = "issue with label1"
+ issue_with_label2 = "issue with label2"
+ issue_with_assignee = "issue with assignee"
+ issue_with_milestone = "issue with milestone"
+ issue_with_all_filters = "issue with all filters"
+
+ let_it_be(:label1) { create(:group_label, group: group, name: label_name1) }
+ let_it_be(:label2) { create(:group_label, group: group, name: label_name2) }
+ let_it_be(:assignee) { create_default(:group_member, :maintainer, user: create(:user, username: assignee_username), group: group ).user }
+ let_it_be(:milestone) { create_default(:milestone, project: project, start_date: Date.today - 1, due_date: 7.days.from_now) }
+
+ before_all do
+ create_default(:issue, project: project, title: issue_with_label1, labels: [label1])
+ create_default(:issue, project: project, title: issue_with_label2, labels: [label2])
+ create_default(:issue, project: project, title: issue_with_assignee, assignees: [assignee])
+ create_default(:issue, project: project, title: issue_with_milestone, milestone: milestone)
+ create_default(:issue, project: project, title: issue_with_all_filters, labels: [label1, label2], assignees: [assignee], milestone: milestone)
+ end
+
+ shared_examples "visiting board path with search params" do
+ where(:params, :expected_issues) do
+ { "label_name" => [label_name1] } | [issue_with_label1, issue_with_all_filters]
+ { "label_name" => [label_name2] } | [issue_with_label2, issue_with_all_filters]
+ { "label_name" => [label_name1, label_name2] } | [issue_with_all_filters]
+ { "assignee_username" => assignee_username } | [issue_with_assignee, issue_with_all_filters]
+ { "milestone_title" => '#started' } | [issue_with_milestone, issue_with_all_filters]
+ { "label_name" => [label_name1, label_name2], "assignee_username" => assignee_username } | [issue_with_all_filters]
+ end
+
+ with_them do
+ before do
+ visit board_path
+
+ wait_for_requests
+ end
+
+ it 'displays all issues satisfiying filter params and correctly sets url params' do
+ expect(page).to have_current_path(board_path)
+
+ page.assert_selector('[data-testid="board_card"]', count: expected_issues.length)
+ expected_issues.each { |issue_title| expect(page).to have_link issue_title }
+ end
+ end
+ end
+
+ context "project boards" do
+ let_it_be(:board) { create_default(:board, project: project) }
+ let_it_be(:backlog_list) { create_default(:backlog_list, board: board) }
+
+ let(:board_path) { project_boards_path(project, params) }
+
+ include_examples "visiting board path with search params"
+ end
+
+ context "group boards" do
+ let_it_be(:board) { create_default(:board, group: group) }
+ let_it_be(:backlog_list) { create_default(:backlog_list, board: board) }
+
+ let(:board_path) { group_boards_path(group, params) }
+
+ include_examples 'visiting board path with search params'
+ end
+end
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 0b73226268d..1281d890ef7 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe 'Contributions Calendar', :js do
describe '1 issue creation calendar activity' do
before do
- Issues::CreateService.new(contributed_project, user, issue_params).execute
+ Issues::CreateService.new(project: contributed_project, current_user: user, params: issue_params).execute
end
it_behaves_like 'a day with activity', contribution_count: 1
@@ -181,7 +181,7 @@ RSpec.describe 'Contributions Calendar', :js do
push_code_contribution
travel_to(Date.yesterday) do
- Issues::CreateService.new(contributed_project, user, issue_params).execute
+ Issues::CreateService.new(project: contributed_project, current_user: user, params: issue_params).execute
end
end
include_context 'visit user page'
diff --git a/spec/features/dashboard/active_tab_spec.rb b/spec/features/dashboard/active_tab_spec.rb
index 3a532cb4161..a1fb0beda70 100644
--- a/spec/features/dashboard/active_tab_spec.rb
+++ b/spec/features/dashboard/active_tab_spec.rb
@@ -3,30 +3,56 @@
require 'spec_helper'
RSpec.describe 'Dashboard Active Tab', :js do
- before do
- stub_feature_flags(combined_menu: false)
+ shared_examples 'combined_menu: feature flag examples' do
+ before do
+ sign_in(create(:user))
+ end
- sign_in(create(:user))
- end
+ shared_examples 'page has active tab' do |title|
+ it "#{title} tab" do
+ pending_on_combined_menu_flag
+
+ subject
- shared_examples 'page has active tab' do |title|
- it "#{title} tab" do
- subject
+ expect(page).to have_selector('.navbar-sub-nav li.active', count: 1)
+ expect(find('.navbar-sub-nav li.active')).to have_content(title)
+ end
+ end
+
+ context 'on dashboard projects' do
+ it_behaves_like 'page has active tab', 'Projects' do
+ subject { visit dashboard_projects_path }
+ end
+ end
- expect(page).to have_selector('.navbar-sub-nav li.active', count: 1)
- expect(find('.navbar-sub-nav li.active')).to have_content(title)
+ context 'on dashboard groups' do
+ it_behaves_like 'page has active tab', 'Groups' do
+ subject { visit dashboard_groups_path }
+ end
end
end
- context 'on dashboard projects' do
- it_behaves_like 'page has active tab', 'Projects' do
- subject { visit dashboard_projects_path }
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
+
+ before do
+ stub_feature_flags(combined_menu: true)
end
+
+ it_behaves_like 'combined_menu: feature flag examples'
end
- context 'on dashboard groups' do
- it_behaves_like 'page has active tab', 'Groups' do
- subject { visit dashboard_groups_path }
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
+
+ before do
+ stub_feature_flags(combined_menu: false)
end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
end
end
diff --git a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
index 179d9d09905..0620f819332 100644
--- a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
+++ b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
@@ -7,36 +7,64 @@ RSpec.describe 'The group dashboard' do
let(:user) { create(:user) }
- before do
- stub_feature_flags(combined_menu: false)
+ shared_examples 'combined_menu: feature flag examples' do
+ before do
+ sign_in user
+ end
- sign_in user
- end
+ describe 'The top navigation' do
+ it 'has all the expected links' do
+ pending_on_combined_menu_flag
- describe 'The top navigation' do
- it 'has all the expected links' do
- visit dashboard_groups_path
+ visit dashboard_groups_path
- within('.navbar') do
- expect(page).to have_button('Projects')
- expect(page).to have_button('Groups')
- expect(page).to have_link('Activity')
- expect(page).to have_link('Milestones')
- expect(page).to have_link('Snippets')
+ within('.navbar') do
+ expect(page).to have_button('Projects')
+ expect(page).to have_button('Groups')
+ expect(page).to have_link('Activity')
+ expect(page).to have_link('Milestones')
+ expect(page).to have_link('Snippets')
+ end
end
- end
- it 'hides some links when an external authorization service is enabled' do
- enable_external_authorization_service_check
- visit dashboard_groups_path
+ it 'hides some links when an external authorization service is enabled' do
+ pending_on_combined_menu_flag
+
+ enable_external_authorization_service_check
+ visit dashboard_groups_path
- within('.navbar') do
- expect(page).to have_button('Projects')
- expect(page).to have_button('Groups')
- expect(page).not_to have_link('Activity')
- expect(page).not_to have_link('Milestones')
- expect(page).to have_link('Snippets')
+ within('.navbar') do
+ expect(page).to have_button('Projects')
+ expect(page).to have_button('Groups')
+ expect(page).not_to have_link('Activity')
+ expect(page).not_to have_link('Milestones')
+ expect(page).to have_link('Snippets')
+ end
end
end
end
+
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
+
+ before do
+ stub_feature_flags(combined_menu: true)
+ end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
+
+ before do
+ stub_feature_flags(combined_menu: false)
+ end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
+ end
end
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index e96a60b2ab2..5f60832dbc9 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -3,71 +3,97 @@
require 'spec_helper'
RSpec.describe 'Dashboard shortcuts', :js do
- before do
- stub_feature_flags(combined_menu: false)
- end
+ shared_examples 'combined_menu: feature flag examples' do
+ context 'logged in' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
- context 'logged in' do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ visit root_dashboard_path
+ end
- before do
- project.add_developer(user)
- sign_in(user)
- visit root_dashboard_path
- end
+ it 'navigate to tabs' do
+ pending_on_combined_menu_flag
- it 'navigate to tabs' do
- find('body').send_keys([:shift, 'I'])
+ find('body').send_keys([:shift, 'I'])
- check_page_title('Issues')
+ check_page_title('Issues')
- find('body').send_keys([:shift, 'M'])
+ find('body').send_keys([:shift, 'M'])
- check_page_title('Merge requests')
+ check_page_title('Merge requests')
- find('body').send_keys([:shift, 'T'])
+ find('body').send_keys([:shift, 'T'])
- check_page_title('To-Do List')
+ check_page_title('To-Do List')
- find('body').send_keys([:shift, 'G'])
+ find('body').send_keys([:shift, 'G'])
- check_page_title('Groups')
+ check_page_title('Groups')
- find('body').send_keys([:shift, 'P'])
+ find('body').send_keys([:shift, 'P'])
- check_page_title('Projects')
+ check_page_title('Projects')
- find('body').send_keys([:shift, 'A'])
+ find('body').send_keys([:shift, 'A'])
- check_page_title('Activity')
+ check_page_title('Activity')
+ end
end
- end
- context 'logged out' do
- before do
- visit explore_root_path
+ context 'logged out' do
+ before do
+ visit explore_root_path
+ end
+
+ it 'navigate to tabs' do
+ pending_on_combined_menu_flag
+
+ find('body').send_keys([:shift, 'G'])
+
+ find('.nothing-here-block')
+ expect(page).to have_content('No public groups')
+
+ find('body').send_keys([:shift, 'S'])
+
+ find('.nothing-here-block')
+ expect(page).to have_content('No snippets found')
+
+ find('body').send_keys([:shift, 'P'])
+
+ find('.nothing-here-block')
+ expect(page).to have_content('Explore public groups to find projects to contribute to.')
+ end
end
- it 'navigate to tabs' do
- find('body').send_keys([:shift, 'G'])
+ def check_page_title(title)
+ expect(find('.page-title')).to have_content(title)
+ end
+ end
- find('.nothing-here-block')
- expect(page).to have_content('No public groups')
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
- find('body').send_keys([:shift, 'S'])
+ before do
+ stub_feature_flags(combined_menu: true)
+ end
- find('.nothing-here-block')
- expect(page).to have_content('No snippets found')
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
- find('body').send_keys([:shift, 'P'])
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
- find('.nothing-here-block')
- expect(page).to have_content('Explore public groups to find projects to contribute to.')
+ before do
+ stub_feature_flags(combined_menu: false)
end
+
+ it_behaves_like 'combined_menu: feature flag examples'
end
- def check_page_title(title)
- expect(find('.page-title')).to have_content(title)
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
end
end
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 0b4fed55f11..0bc6cc9c017 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe 'Dashboard Todos' do
end
it 'has not "All done" message' do
- expect(page).not_to have_selector('.todos-all-done')
+ expect(page).not_to have_selector('.empty-state')
end
end
diff --git a/spec/features/frequently_visited_projects_and_groups_spec.rb b/spec/features/frequently_visited_projects_and_groups_spec.rb
index 6c25afdf6d4..9110c7ad65a 100644
--- a/spec/features/frequently_visited_projects_and_groups_spec.rb
+++ b/spec/features/frequently_visited_projects_and_groups_spec.rb
@@ -5,45 +5,73 @@ require 'spec_helper'
RSpec.describe 'Frequently visited items', :js do
let_it_be(:user) { create(:user) }
- before do
- stub_feature_flags(combined_menu: false)
+ shared_examples 'combined_menu: feature flag examples' do
+ before do
+ sign_in(user)
+ end
- sign_in(user)
- end
+ context 'for projects' do
+ let_it_be(:project) { create(:project, :public) }
- context 'for projects' do
- let_it_be(:project) { create(:project, :public) }
+ it 'increments localStorage counter when visiting the project' do
+ pending_on_combined_menu_flag
- it 'increments localStorage counter when visiting the project' do
- visit project_path(project)
+ visit project_path(project)
- frequent_projects = nil
+ frequent_projects = nil
- wait_for('localStorage frequent-projects') do
- frequent_projects = page.evaluate_script("localStorage['#{user.username}/frequent-projects']")
+ wait_for('localStorage frequent-projects') do
+ frequent_projects = page.evaluate_script("localStorage['#{user.username}/frequent-projects']")
- frequent_projects.present?
- end
+ frequent_projects.present?
+ end
- expect(Gitlab::Json.parse(frequent_projects)).to contain_exactly(a_hash_including('id' => project.id, 'frequency' => 1))
+ expect(Gitlab::Json.parse(frequent_projects)).to contain_exactly(a_hash_including('id' => project.id, 'frequency' => 1))
+ end
end
- end
- context 'for groups' do
- let_it_be(:group) { create(:group, :public) }
+ context 'for groups' do
+ let_it_be(:group) { create(:group, :public) }
- it 'increments localStorage counter when visiting the group' do
- visit group_path(group)
+ it 'increments localStorage counter when visiting the group' do
+ pending_on_combined_menu_flag
- frequent_groups = nil
+ visit group_path(group)
- wait_for('localStorage frequent-groups') do
- frequent_groups = page.evaluate_script("localStorage['#{user.username}/frequent-groups']")
+ frequent_groups = nil
- frequent_groups.present?
+ wait_for('localStorage frequent-groups') do
+ frequent_groups = page.evaluate_script("localStorage['#{user.username}/frequent-groups']")
+
+ frequent_groups.present?
+ end
+
+ expect(Gitlab::Json.parse(frequent_groups)).to contain_exactly(a_hash_including('id' => group.id, 'frequency' => 1))
end
+ end
+ end
- expect(Gitlab::Json.parse(frequent_groups)).to contain_exactly(a_hash_including('id' => group.id, 'frequency' => 1))
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
+
+ before do
+ stub_feature_flags(combined_menu: true)
end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
+
+ before do
+ stub_feature_flags(combined_menu: false)
+ end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
end
end
diff --git a/spec/features/groups/group_page_with_external_authorization_service_spec.rb b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
index 187d878472e..59a7feb813b 100644
--- a/spec/features/groups/group_page_with_external_authorization_service_spec.rb
+++ b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
@@ -15,8 +15,7 @@ RSpec.describe 'The group page' do
def expect_all_sidebar_links
within('.nav-sidebar') do
- expect(page).to have_link('Group overview')
- expect(page).to have_link('Details')
+ expect(page).to have_link('Group information')
expect(page).to have_link('Activity')
expect(page).to have_link('Issues')
expect(page).to have_link('Merge requests')
@@ -44,8 +43,7 @@ RSpec.describe 'The group page' do
visit group_path(group)
within('.nav-sidebar') do
- expect(page).to have_link('Group overview')
- expect(page).to have_link('Details')
+ expect(page).to have_link('Group information')
expect(page).not_to have_link('Activity')
expect(page).not_to have_link('Contribution')
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index b0d2f90145f..21b39d2da46 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -217,7 +217,7 @@ RSpec.describe 'Group issues page' do
it 'first pagination item is active' do
page.within('.gl-pagination') do
- expect(find('.active')).to have_content('1')
+ expect(find('li.active')).to have_content('1')
end
end
end
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index e9bbe9de3c9..40cd54c1e33 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Groups > Members > Manage groups', :js do
include Select2Helper
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let_it_be(:user) { create(:user) }
@@ -12,18 +13,43 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
sign_in(user)
end
- context 'when group link does not exist' do
- let_it_be(:group) { create(:group) }
- let_it_be(:group_to_add) { create(:group) }
-
+ context 'with invite_members_group_modal disabled' do
before do
stub_feature_flags(invite_members_group_modal: false)
- group.add_owner(user)
- visit group_group_members_path(group)
end
- it 'add group to group' do
- add_group(group_to_add.id, 'Reporter')
+ context 'when group link does not exist' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_to_add) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ group_to_add.add_owner(user)
+ visit group_group_members_path(group)
+ end
+
+ it 'can share group with group' do
+ add_group(group_to_add.id, 'Reporter')
+
+ click_groups_tab
+
+ page.within(first_row) do
+ expect(page).to have_content(group_to_add.name)
+ expect(page).to have_content('Reporter')
+ end
+ end
+ end
+ end
+
+ context 'when group link does not exist' do
+ it 'can share a group with group' do
+ group = create(:group)
+ group_to_add = create(:group)
+ group.add_owner(user)
+ group_to_add.add_owner(user)
+
+ visit group_group_members_path(group)
+ invite_group(group_to_add.name, role: 'Reporter')
click_groups_tab
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 3b637a10abe..c5e6479ec51 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -5,13 +5,13 @@ require 'spec_helper'
RSpec.describe 'Groups > Members > Manage members' do
include Select2Helper
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let(:user1) { create(:user, name: 'John Doe') }
let(:user2) { create(:user, name: 'Mary Jane') }
let(:group) { create(:group) }
before do
- stub_feature_flags(invite_members_group_modal: false)
sign_in(user1)
end
@@ -26,16 +26,28 @@ RSpec.describe 'Groups > Members > Manage members' do
end
end
- context 'when Invite Members modal is enabled' do
- before do
- stub_feature_flags(invite_members_group_modal: true)
+ shared_examples 'does not include either invite modal or either invite form' do
+ it 'does not include either of the invite members or invite group modal buttons' do
+ expect(page).not_to have_selector '.js-invite-members-modal'
+ expect(page).not_to have_selector '.js-invite-group-modal'
end
+ it 'does not include either of the invite users or invite group forms' do
+ expect(page).not_to have_selector '.invite-users-form'
+ expect(page).not_to have_selector '.invite-group-form'
+ end
+ end
+
+ context 'when Invite Members modal is enabled' do
it_behaves_like 'includes the correct Invite link', '.js-invite-members-trigger', '.invite-users-form'
it_behaves_like 'includes the correct Invite link', '.js-invite-group-trigger', '.invite-group-form'
end
context 'when Invite Members modal is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
+
it_behaves_like 'includes the correct Invite link', '.invite-users-form', '.js-invite-members-trigger'
it_behaves_like 'includes the correct Invite link', '.invite-group-form', '.js-invite-group-trigger'
end
@@ -59,7 +71,7 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
- add_user(user2.id, 'Reporter')
+ invite_member(user2.name, role: 'Reporter')
page.within(second_row) do
expect(page).to have_content(user2.name)
@@ -73,21 +85,46 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
- find('.select2-container').click
- select_input = find('.select2-input')
+ click_on 'Invite members'
+ fill_in 'Select members or type email addresses', with: '@gitlab.com'
- select_input.send_keys('@gitlab.com')
wait_for_requests
expect(page).to have_content('No matches found')
- select_input.native.clear
- select_input.send_keys('undisclosed_email@gitlab.com')
+ fill_in 'Select members or type email addresses', with: 'undisclosed_email@gitlab.com'
wait_for_requests
expect(page).to have_content("Jane 'invisible' Doe")
end
+ context 'when Invite Members modal is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
+
+ it 'do not disclose email addresses', :js do
+ group.add_owner(user1)
+ create(:user, email: 'undisclosed_email@gitlab.com', name: "Jane 'invisible' Doe")
+
+ visit group_group_members_path(group)
+
+ find('.select2-container').click
+ select_input = find('.select2-input')
+
+ select_input.send_keys('@gitlab.com')
+ wait_for_requests
+
+ expect(page).to have_content('No matches found')
+
+ select_input.native.clear
+ select_input.send_keys('undisclosed_email@gitlab.com')
+ wait_for_requests
+
+ expect(page).to have_content("Jane 'invisible' Doe")
+ end
+ end
+
it 'remove user from group', :js do
group.add_owner(user1)
group.add_developer(user2)
@@ -115,7 +152,7 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
- add_user(user1.id, 'Reporter')
+ invite_member(user1.name, role: 'Reporter')
page.within(first_row) do
expect(page).to have_content(user1.name)
@@ -128,7 +165,7 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
- add_user('test@example.com', 'Reporter')
+ invite_member('test@example.com', role: 'Reporter')
expect(page).to have_link 'Invited'
click_link 'Invited'
@@ -140,29 +177,46 @@ RSpec.describe 'Groups > Members > Manage members' do
end
end
- it 'guest can not manage other users', :js do
- group.add_guest(user1)
- group.add_developer(user2)
+ context 'as a guest', :js do
+ before do
+ group.add_guest(user1)
+ group.add_developer(user2)
- visit group_group_members_path(group)
+ visit group_group_members_path(group)
+ end
- expect(page).not_to have_selector '.invite-users-form'
- expect(page).not_to have_selector '.invite-group-form'
+ it_behaves_like 'does not include either invite modal or either invite form'
- page.within(second_row) do
- # Can not modify user2 role
- expect(page).not_to have_button 'Developer'
+ it 'does not include a button on the members page list to manage or remove the existing member', :js do
+ page.within(second_row) do
+ # Can not modify user2 role
+ expect(page).not_to have_button 'Developer'
- # Can not remove user2
- expect(page).not_to have_selector 'button[title="Remove member"]'
+ # Can not remove user2
+ expect(page).not_to have_selector 'button[title="Remove member"]'
+ end
end
end
- def add_user(id, role)
- page.within ".invite-users-form" do
- select2(id, from: "#user_ids", multiple: true)
- select(role, from: "access_level")
- click_button "Invite"
+ context 'As a guest when the :invite_members_group_modal feature flag is disabled', :js do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ group.add_guest(user1)
+ group.add_developer(user2)
+
+ visit group_group_members_path(group)
+ end
+
+ it_behaves_like 'does not include either invite modal or either invite form'
+
+ it 'does not include a button on the members page list to manage or remove the existing member', :js do
+ page.within(second_row) do
+ # Can not modify user2 role
+ expect(page).not_to have_button 'Developer'
+
+ # Can not remove user2
+ expect(page).not_to have_selector 'button[title="Remove member"]'
+ end
end
end
end
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index d31a7977f66..ddf3c6d8f9b 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js do
- include Select2Helper
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let_it_be(:user1) { create(:user, name: 'John Doe') }
let_it_be(:group) { create(:group) }
@@ -12,7 +12,6 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
let(:new_member) { create(:user, name: 'Mary Jane') }
before do
- stub_feature_flags(invite_members_group_modal: false)
group.add_owner(user1)
sign_in(user1)
end
@@ -20,14 +19,7 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
it 'expiration date is displayed in the members list' do
visit group_group_members_path(group)
- page.within invite_users_form do
- select2(new_member.id, from: '#user_ids', multiple: true)
-
- fill_in 'expires_at', with: 5.days.from_now.to_date
- find_field('expires_at').native.send_keys :enter
-
- click_on 'Invite'
- end
+ invite_member(new_member.name, role: 'Guest', expires_at: 5.days.from_now.to_date)
page.within second_row do
expect(page).to have_content(/in \d days/)
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index 1d9ac5ee1e9..c51ee250331 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -54,11 +54,11 @@ RSpec.describe 'Group milestones' do
expect(find('.start_date')).to have_content(Date.today.at_beginning_of_month.strftime('%b %-d, %Y'))
end
- it 'description input does not support autocomplete' do
+ it 'description input support autocomplete' do
description = find('.note-textarea')
description.native.send_keys('!')
- expect(page).not_to have_selector('.atwho-view')
+ expect(page).to have_selector('.atwho-view')
end
end
diff --git a/spec/features/groups/milestones/gfm_autocomplete_spec.rb b/spec/features/groups/milestones/gfm_autocomplete_spec.rb
new file mode 100644
index 00000000000..85a14123294
--- /dev/null
+++ b/spec/features/groups/milestones/gfm_autocomplete_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'GFM autocomplete', :js do
+ let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
+ let_it_be(:group) { create(:group, name: 'Ancestor') }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:issue) { create(:issue, project: project, assignees: [user], title: 'My special issue') }
+ let_it_be(:label) { create(:group_label, group: group, title: 'special+') }
+ let_it_be(:milestone) { create(:milestone, resource_parent: group, title: "group milestone") }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ shared_examples 'displays autocomplete menu for all entities' do
+ it 'autocompletes all available entities' do
+ fill_in 'Description', with: User.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(group.name)
+
+ fill_in 'Description', with: Label.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(label.title)
+
+ fill_in 'Description', with: Milestone.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(milestone.title)
+
+ fill_in 'Description', with: Issue.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(issue.title)
+
+ fill_in 'Description', with: MergeRequest.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(merge_request.title)
+ end
+ end
+
+ before_all do
+ group.add_maintainer(user)
+ end
+
+ describe 'new milestone page' do
+ before do
+ sign_in(user)
+ visit new_group_milestone_path(group)
+
+ wait_for_requests
+ end
+
+ it_behaves_like 'displays autocomplete menu for all entities'
+ end
+
+ describe 'update milestone page' do
+ before do
+ sign_in(user)
+ visit edit_group_milestone_path(group, milestone)
+
+ wait_for_requests
+ end
+
+ it_behaves_like 'displays autocomplete menu for all entities'
+ end
+
+ private
+
+ def find_autocomplete_menu
+ find('.atwho-view ul', visible: true)
+ end
+
+ def expect_autocomplete_entry(entry)
+ page.within('.atwho-container') do
+ expect(page).to have_content(entry)
+ end
+ end
+end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 021b1af54d4..b46d4dae87a 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -13,21 +13,10 @@ RSpec.describe 'Group navbar' do
let(:structure) do
[
- {
- nav_item: _('Group overview'),
- nav_sub_items: [
- _('Details'),
- _('Activity')
- ]
- },
+ group_information_nav_item,
{
nav_item: _('Issues'),
- nav_sub_items: [
- _('List'),
- _('Board'),
- _('Labels'),
- _('Milestones')
- ]
+ nav_sub_items: issues_nav_items
},
{
nav_item: _('Merge requests'),
@@ -40,11 +29,12 @@ RSpec.describe 'Group navbar' do
nav_sub_items: []
},
(analytics_nav_item if Gitlab.ee?),
- {
- nav_item: _('Members'),
- nav_sub_items: []
- }
- ]
+ members_nav_item
+ ].compact
+ end
+
+ let(:members_nav_item) do
+ nil
end
before do
@@ -87,4 +77,40 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ let(:group_information_nav_item) do
+ {
+ nav_item: _('Group overview'),
+ nav_sub_items: [
+ _('Details'),
+ _('Activity')
+ ]
+ }
+ end
+
+ let(:members_nav_item) do
+ {
+ nav_item: _('Members'),
+ nav_sub_items: []
+ }
+ end
+
+ let(:issues_nav_items) do
+ [
+ _('List'),
+ _('Board'),
+ _('Labels'),
+ _('Milestones')
+ ]
+ end
+
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+
+ visit group_path(group)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
end
diff --git a/spec/features/groups/settings/packages_and_registries_spec.rb b/spec/features/groups/settings/packages_and_registries_spec.rb
index 45ea77e3868..551a0bc5375 100644
--- a/spec/features/groups/settings/packages_and_registries_spec.rb
+++ b/spec/features/groups/settings/packages_and_registries_spec.rb
@@ -66,28 +66,31 @@ RSpec.describe 'Group Packages & Registries settings' do
it 'automatically saves changes to the server', :js do
visit_settings_page
- expect(page).to have_content('Allow duplicates')
+ within '[data-testid="maven-settings"]' do
+ expect(page).to have_content('Allow duplicates')
- find('.gl-toggle').click
+ find('.gl-toggle').click
- expect(page).to have_content('Do not allow duplicates')
+ expect(page).to have_content('Do not allow duplicates')
- visit_settings_page
+ visit_settings_page
- expect(page).to have_content('Do not allow duplicates')
+ expect(page).to have_content('Do not allow duplicates')
+ end
end
it 'shows an error on wrong regex', :js do
visit_settings_page
- expect(page).to have_content('Allow duplicates')
-
- find('.gl-toggle').click
+ within '[data-testid="maven-settings"]' do
+ expect(page).to have_content('Allow duplicates')
- expect(page).to have_content('Do not allow duplicates')
+ find('.gl-toggle').click
- fill_in 'Exceptions', with: ')'
+ expect(page).to have_content('Do not allow duplicates')
+ fill_in 'Exceptions', with: ')'
+ end
# simulate blur event
find('body').click
@@ -98,11 +101,13 @@ RSpec.describe 'Group Packages & Registries settings' do
it 'works correctly', :js do
visit_sub_group_settings_page
- expect(page).to have_content('Allow duplicates')
+ within '[data-testid="maven-settings"]' do
+ expect(page).to have_content('Allow duplicates')
- find('.gl-toggle').click
+ find('.gl-toggle').click
- expect(page).to have_content('Do not allow duplicates')
+ expect(page).to have_content('Do not allow duplicates')
+ end
end
end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 33d2ac50628..bcccadf7710 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Group' do
- let_it_be(:user) { create(:user) }
+ let(:user) { create(:user) }
before do
sign_in(user)
@@ -368,21 +368,21 @@ RSpec.describe 'Group' do
expect(page).to have_content(nested_group.name)
expect(page).to have_content(project.name)
- expect(page).to have_link('Group overview')
+ expect(page).to have_link('Group information')
end
- it 'renders subgroup page with the text "Subgroup overview"' do
+ it 'renders subgroup page with the text "Subgroup information"' do
visit group_path(nested_group)
wait_for_requests
- expect(page).to have_link('Subgroup overview')
+ expect(page).to have_link('Subgroup information')
end
- it 'renders project page with the text "Project overview"' do
+ it 'renders project page with the text "Project information"' do
visit project_path(project)
wait_for_requests
- expect(page).to have_link('Project overview')
+ expect(page).to have_link('Project information')
end
end
@@ -439,6 +439,35 @@ RSpec.describe 'Group' do
end
end
+ describe 'new_repo experiment' do
+ let_it_be(:group) { create_default(:group) }
+
+ it 'when in candidate renders "project/repository"' do
+ stub_experiments(new_repo: :candidate)
+
+ visit group_path(group)
+
+ find('li.header-new.dropdown').click
+
+ page.within('li.header-new.dropdown') do
+ expect(page).to have_selector('a', text: 'New project/repository')
+ end
+ end
+
+ it 'when in control renders "project/repository"' do
+ stub_experiments(new_repo: :control)
+
+ visit group_path(group)
+
+ find('li.header-new.dropdown').click
+
+ page.within('li.header-new.dropdown') do
+ expect(page).to have_selector('a', text: 'New project')
+ expect(page).to have_no_selector('a', text: 'New project/repository')
+ end
+ end
+ end
+
def remove_with_confirm(button_text, confirm_with)
click_button button_text
fill_in 'confirm_name_input', with: confirm_with
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index e9960802378..a72cf033d61 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -3,18 +3,16 @@
require 'spec_helper'
RSpec.describe 'Group or Project invitations', :aggregate_failures do
- let(:user) { create(:user, email: 'user@example.com') }
- let(:owner) { create(:user, name: 'John Doe') }
- let(:group) { create(:group, name: 'Owned') }
- let(:project) { create(:project, :repository, namespace: group) }
+ let_it_be(:owner) { create(:user, name: 'John Doe') }
+ let_it_be(:group) { create(:group, name: 'Owned') }
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+
let(:group_invite) { group.group_members.invite.last }
before do
stub_application_setting(require_admin_approval_after_user_signup: false)
project.add_maintainer(owner)
group.add_owner(owner)
- group.add_developer('user@example.com', owner)
- group_invite.generate_invite_token!
end
def confirm_email(new_user)
@@ -23,13 +21,13 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
visit user_confirmation_path(confirmation_token: new_user_token)
end
- def fill_in_sign_up_form(new_user)
+ def fill_in_sign_up_form(new_user, submit_button_text = 'Register')
fill_in 'new_user_first_name', with: new_user.first_name
fill_in 'new_user_last_name', with: new_user.last_name
fill_in 'new_user_username', with: new_user.username
fill_in 'new_user_email', with: new_user.email
fill_in 'new_user_password', with: new_user.password
- click_button 'Register'
+ click_button submit_button_text
end
def fill_in_sign_in_form(user)
@@ -44,46 +42,128 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
click_button 'Get started!'
end
- context 'when signed out' do
+ context 'when inviting a registered user' do
+ let(:invite_email) { 'user@example.com' }
+
before do
- visit invite_path(group_invite.raw_invite_token)
+ group.add_developer(invite_email, owner)
+ group_invite.generate_invite_token!
end
- it 'renders sign in page with sign in notice' do
- expect(current_path).to eq(new_user_registration_path)
- expect(page).to have_content('To accept this invitation, create an account or sign in')
- end
+ context 'when signed out' do
+ context 'when analyzing the redirects and forms from invite link click' do
+ before do
+ visit invite_path(group_invite.raw_invite_token)
+ end
- it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
- click_link 'Sign in'
+ it 'renders sign up page with sign up notice' do
+ expect(current_path).to eq(new_user_registration_path)
+ expect(page).to have_content('To accept this invitation, create an account or sign in')
+ end
- expect(find_field('Username or email').value).to eq(group_invite.invite_email)
- end
+ it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
+ click_link 'Sign in'
- it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
- expect(find_field('Email').value).to eq(group_invite.invite_email)
- end
+ expect(find_field('Username or email').value).to eq(group_invite.invite_email)
+ end
- it 'sign in, grants access and redirects to group page' do
- click_link 'Sign in'
+ it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
+ expect(find_field('Email').value).to eq(group_invite.invite_email)
+ end
+ end
- fill_in_sign_in_form(user)
+ context 'when invite is sent before account is created - ldap or social sign in for manual acceptance edge case' do
+ let(:user) { create(:user, email: 'user@example.com') }
- expect(current_path).to eq(group_path(group))
- expect(page).to have_content('You have been granted Developer access to group Owned.')
- end
- end
+ context 'when invite clicked and not signed in' do
+ before do
+ visit invite_path(group_invite.raw_invite_token)
+ end
- context 'when signed in as an existing member' do
- before do
- sign_in(owner)
- end
+ it 'sign in, grants access and redirects to group activity page' do
+ click_link 'Sign in'
- it 'shows message user already a member' do
- visit invite_path(group_invite.raw_invite_token)
+ fill_in_sign_in_form(user)
+
+ expect(current_path).to eq(activity_group_path(group))
+ end
+ end
+
+ context 'when signed in and an invite link is clicked' do
+ context 'when an invite email is a secondary email for the user' do
+ let(:invite_email) { 'user_secondary@example.com' }
+
+ before do
+ sign_in(user)
+ visit invite_path(group_invite.raw_invite_token)
+ end
+
+ it 'sends user to the invite url and allows them to decline' do
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(page).to have_content("Note that this invitation was sent to #{invite_email}")
+ expect(page).to have_content("but you are signed in as #{user.to_reference} with email #{user.email}")
+
+ click_link('Decline')
+
+ expect(page).to have_content('You have declined the invitation')
+ expect(current_path).to eq(dashboard_projects_path)
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+
+ it 'sends uer to the invite url and allows them to accept' do
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(page).to have_content("Note that this invitation was sent to #{invite_email}")
+ expect(page).to have_content("but you are signed in as #{user.to_reference} with email #{user.email}")
+
+ click_link('Accept invitation')
+
+ expect(page).to have_content('You have been granted')
+ expect(current_path).to eq(activity_group_path(group))
+ end
+ end
+
+ context 'when user is an existing member' do
+ before do
+ sign_in(owner)
+ visit invite_path(group_invite.raw_invite_token)
+ end
+
+ it 'shows message user already a member' do
+ expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(page).to have_link(owner.name, href: user_url(owner))
+ expect(page).to have_content('However, you are already a member of this group.')
+ end
+ end
+ end
+
+ context 'when declining the invitation from invitation reminder email' do
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ visit decline_invite_path(group_invite.raw_invite_token)
+ end
+
+ it 'declines application and redirects to dashboard' do
+ expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+ end
+
+ context 'when signed out with signup onboarding' do
+ before do
+ visit decline_invite_path(group_invite.raw_invite_token)
+ end
- expect(page).to have_link(owner.name, href: user_url(owner))
- expect(page).to have_content('However, you are already a member of this group.')
+ it 'declines application and redirects to sign in page' do
+ expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
+ expect(page).not_to have_content('You have declined the invitation to join')
+ expect(page).to have_content('You successfully declined the invitation')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+ end
+ end
+ end
end
end
@@ -91,12 +171,15 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
let(:new_user) { build_stubbed(:user) }
let(:invite_email) { new_user.email }
let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email, created_by: owner) }
- let!(:project_invite) { create(:project_member, :invited, project: project, invite_email: invite_email) }
+ let(:send_email_confirmation) { true }
+
+ before do
+ stub_application_setting(send_user_confirmation_email: send_email_confirmation)
+ end
context 'when registering using invitation email' do
before do
- stub_application_setting(send_user_confirmation_email: send_email_confirmation)
- visit invite_path(group_invite.raw_invite_token)
+ visit invite_path(group_invite.raw_invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE)
end
context 'with admin approval required enabled' do
@@ -104,8 +187,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
stub_application_setting(require_admin_approval_after_user_signup: true)
end
- let(:send_email_confirmation) { true }
-
it 'does not sign the user in' do
fill_in_sign_up_form(new_user)
@@ -117,79 +198,42 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
context 'email confirmation disabled' do
let(:send_email_confirmation) { false }
- it 'signs up and redirects to the dashboard page with all the projects/groups invitations automatically accepted' do
+ it 'signs up and redirects to the most recent membership activity page with all the projects/groups invitations automatically accepted' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(dashboard_projects_path)
- expect(page).to have_content(project.full_name)
-
- visit group_path(group)
-
- expect(page).to have_content(group.full_name)
+ expect(current_path).to eq(activity_group_path(group))
+ expect(page).to have_content('You have been granted Owner access to group Owned.')
end
context 'the user sign-up using a different email address' do
let(:invite_email) { build_stubbed(:user).email }
- it 'signs up and redirects to the invitation page' do
+ it 'signs up and redirects to the activity page' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(current_path).to eq(activity_group_path(group))
end
end
end
context 'email confirmation enabled' do
- let(:send_email_confirmation) { true }
-
- context 'when soft email confirmation is not enabled' do
- before do
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
- end
+ context 'with members/invite_email experiment', :experiment do
+ it 'tracks the accepted invite' do
+ expect(experiment('members/invite_email')).to track(:accepted)
+ .with_context(actor: group_invite)
+ .on_next_instance
- it 'signs up and redirects to root page with all the project/groups invitation automatically accepted' do
fill_in_sign_up_form(new_user)
- confirm_email(new_user)
- fill_in_sign_in_form(new_user)
- fill_in_welcome_form
-
- expect(current_path).to eq(root_path)
- expect(page).to have_content(project.full_name)
-
- visit group_path(group)
-
- expect(page).to have_content(group.full_name)
end
end
- context 'when soft email confirmation is enabled' do
- before do
- allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
- end
-
- it 'signs up and redirects to root page with all the project/groups invitation automatically accepted' do
- fill_in_sign_up_form(new_user)
- fill_in_welcome_form
- confirm_email(new_user)
-
- expect(current_path).to eq(root_path)
- expect(page).to have_content(project.full_name)
-
- visit group_path(group)
-
- expect(page).to have_content(group.full_name)
- end
- end
-
- it "doesn't accept invitations until the user confirms their email" do
+ it 'signs up and redirects to the group activity page with all the project/groups invitation automatically accepted' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- sign_in(owner)
- visit project_project_members_path(project)
- expect(page).to have_content 'Invited'
+ expect(current_path).to eq(activity_group_path(group))
end
context 'the user sign-up using a different email address' do
@@ -201,13 +245,13 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 0
end
- it 'signs up and redirects to the invitation page' do
+ it 'signs up and redirects to the group activity page' do
fill_in_sign_up_form(new_user)
confirm_email(new_user)
fill_in_sign_in_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(current_path).to eq(activity_group_path(group))
end
end
@@ -217,78 +261,75 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
allow(User).to receive(:allow_unconfirmed_access_for).and_return 2.days
end
- it 'signs up and redirects to the invitation page' do
+ it 'signs up and redirects to the group activity page' do
fill_in_sign_up_form(new_user)
fill_in_welcome_form
- expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
+ expect(current_path).to eq(activity_group_path(group))
end
end
end
end
end
- context 'when declining the invitation' do
- let(:send_email_confirmation) { true }
+ context 'with invite_signup_page_interaction experiment on', :experiment do
+ context 'with control experience' do
+ before do
+ stub_experiments(invite_signup_page_interaction: :control)
+ end
- context 'as an existing user' do
- let(:group_invite) { create(:group_member, user: user, group: group, created_by: owner) }
+ it 'lands on invite sign up page and tracks the accepted invite' do
+ expect(experiment(:invite_signup_page_interaction)).to track(:view)
+ .with_context(actor: group_invite)
+ .on_next_instance
- context 'when signed in' do
- before do
- sign_in(user)
- visit decline_invite_path(group_invite.raw_invite_token)
- end
+ visit invite_path(group_invite.raw_invite_token)
- it 'declines application and redirects to dashboard' do
- expect(current_path).to eq(dashboard_projects_path)
- expect(page).to have_content('You have declined the invitation to join group Owned.')
- expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
- end
- end
+ expect(current_path).to eq(new_user_registration_path)
- context 'when signed out' do
- before do
- visit decline_invite_path(group_invite.raw_invite_token)
- end
+ expect(experiment(:invite_signup_page_interaction)).to track(:form_submission)
+ .with_context(actor: group_invite)
+ .on_next_instance
- it 'declines application and redirects to sign in page' do
- expect(current_path).to eq(new_user_session_path)
- expect(page).to have_content('You have declined the invitation to join group Owned.')
- expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
- end
+ fill_in_sign_up_form(new_user, 'Register')
+
+ expect(current_path).to eq(users_sign_up_welcome_path)
end
end
- context 'as a non-existing user' do
+ context 'with candidate experience on .com' do
before do
- visit decline_invite_path(group_invite.raw_invite_token)
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ stub_experiments(invite_signup_page_interaction: :candidate)
end
- it 'declines application and shows a decline page' do
- expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
- expect(page).to have_content('You successfully declined the invitation')
- expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
- end
- end
- end
+ it 'lands on invite sign up page and tracks the accepted invite' do
+ expect(experiment(:invite_signup_page_interaction)).to track(:view)
+ .with_context(actor: group_invite)
+ .on_next_instance
- context 'when accepting the invitation' do
- let(:send_email_confirmation) { true }
+ visit invite_path(group_invite.raw_invite_token)
- before do
- sign_in(user)
- visit invite_path(group_invite.raw_invite_token)
- end
+ expect(current_path).to eq(new_users_sign_up_invite_path)
+
+ expect(experiment(:invite_signup_page_interaction)).to track(:form_submission)
+ .with_context(actor: group_invite)
+ .on_next_instance
+
+ fill_in_sign_up_form(new_user, 'Continue')
- it 'grants access and redirects to group page' do
- expect(group.users.include?(user)).to be false
+ expect(current_path).to eq(users_sign_up_welcome_path)
+ end
+ end
+ end
- page.click_link 'Accept invitation'
+ context 'when declining the invitation from invitation reminder email' do
+ it 'declines application and shows a decline page' do
+ visit decline_invite_path(group_invite.raw_invite_token)
- expect(current_path).to eq(group_path(group))
- expect(page).to have_content('You have been granted Owner access to group Owned.')
- expect(group.users.include?(user)).to be true
+ expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
+ expect(page).to have_content('You successfully declined the invitation')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
end
diff --git a/spec/features/issuables/sorting_list_spec.rb b/spec/features/issuables/sorting_list_spec.rb
index d065e96885c..6e07c6ffed2 100644
--- a/spec/features/issuables/sorting_list_spec.rb
+++ b/spec/features/issuables/sorting_list_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "last updated"' do
visit_merge_requests_with_state(project, 'merged')
- expect(find('.filter-dropdown-container')).to have_content('Last updated')
+ expect(page).to have_button 'Last updated'
expect(first_merge_request).to include(last_updated_issuable.title)
expect(last_merge_request).to include(first_updated_issuable.title)
end
@@ -69,7 +69,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "last updated"' do
visit_merge_requests_with_state(project, 'closed')
- expect(find('.filter-dropdown-container')).to have_content('Last updated')
+ expect(page).to have_button 'Last updated'
expect(first_merge_request).to include(last_updated_issuable.title)
expect(last_merge_request).to include(first_updated_issuable.title)
end
@@ -81,7 +81,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "created date"' do
visit_merge_requests_with_state(project, 'all')
- expect(find('.filter-dropdown-container')).to have_content('Created date')
+ expect(page).to have_button 'Created date'
expect(first_merge_request).to include(last_created_issuable.title)
expect(last_merge_request).to include(first_created_issuable.title)
end
@@ -94,15 +94,13 @@ RSpec.describe 'Sort Issuable List' do
it 'supports sorting in asc and desc order' do
visit_merge_requests_with_state(project, 'open')
- page.within('.filter-dropdown-container') do
- click_button('Created date')
- click_link('Last updated')
- end
+ click_button('Created date')
+ click_link('Last updated')
expect(first_merge_request).to include(last_updated_issuable.title)
expect(last_merge_request).to include(first_updated_issuable.title)
- find('.filter-dropdown-container .rspec-reverse-sort').click
+ click_on 'Sort direction'
expect(first_merge_request).to include(first_updated_issuable.title)
expect(last_merge_request).to include(last_updated_issuable.title)
@@ -133,7 +131,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "created date"' do
visit_issues project
- expect(find('.filter-dropdown-container')).to have_content('Created date')
+ expect(page).to have_button 'Created date'
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -145,7 +143,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "created date"' do
visit_issues_with_state(project, 'opened')
- expect(find('.filter-dropdown-container')).to have_content('Created date')
+ expect(page).to have_button 'Created date'
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -157,7 +155,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "last updated"' do
visit_issues_with_state(project, 'closed')
- expect(find('.filter-dropdown-container')).to have_content('Last updated')
+ expect(page).to have_button 'Last updated'
expect(first_issue).to include(last_updated_issuable.title)
expect(last_issue).to include(first_updated_issuable.title)
end
@@ -169,7 +167,7 @@ RSpec.describe 'Sort Issuable List' do
it 'is "created date"' do
visit_issues_with_state(project, 'all')
- expect(find('.filter-dropdown-container')).to have_content('Created date')
+ expect(page).to have_button 'Created date'
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -183,7 +181,7 @@ RSpec.describe 'Sort Issuable List' do
end
it 'shows the sort order as created date' do
- expect(find('.filter-dropdown-container')).to have_content('Created date')
+ expect(page).to have_button 'Created date'
expect(first_issue).to include(last_created_issuable.title)
expect(last_issue).to include(first_created_issuable.title)
end
@@ -196,15 +194,17 @@ RSpec.describe 'Sort Issuable List' do
it 'supports sorting in asc and desc order' do
visit_issues_with_state(project, 'opened')
- page.within('.filter-dropdown-container') do
- click_button('Created date')
- click_link('Last updated')
- end
+ click_button('Created date')
+ click_on('Last updated')
+
+ wait_for_requests
expect(first_issue).to include(last_updated_issuable.title)
expect(last_issue).to include(first_updated_issuable.title)
- find('.filter-dropdown-container .rspec-reverse-sort').click
+ click_on 'Sort direction'
+
+ wait_for_requests
expect(first_issue).to include(first_updated_issuable.title)
expect(last_issue).to include(last_updated_issuable.title)
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index 34d78880991..a4c0a84af7d 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
end
it 'shows a warning that the merge request contains unresolved threads' do
- expect(page).to have_content 'Before this can be merged,'
+ expect(page).to have_content 'all threads must be resolved'
end
it 'has a link to resolve all threads by creating an issue' do
diff --git a/spec/features/issues/filtered_search/filter_issues_spec.rb b/spec/features/issues/filtered_search/filter_issues_spec.rb
index 4f4584e7dce..88a7b890daa 100644
--- a/spec/features/issues/filtered_search/filter_issues_spec.rb
+++ b/spec/features/issues/filtered_search/filter_issues_spec.rb
@@ -79,26 +79,6 @@ RSpec.describe 'Filter issues', :js do
expect_filtered_search_input(search_term)
end
- context 'with the NOT queries feature flag disabled' do
- before do
- stub_feature_flags(not_issuable_queries: false)
- visit project_issues_path(project)
- end
-
- it 'does not have the != option' do
- input_filtered_search("label:", submit: false, extra_space: false)
-
- wait_for_requests
- within('#js-dropdown-operator') do
- tokens = all(:css, 'li.filter-dropdown-item')
- expect(tokens.count).to eq(1)
- button = tokens[0].find('button')
- expect(button).to have_content('=')
- expect(button).not_to have_content('!=')
- end
- end
- end
-
describe 'filter issues by author' do
context 'only author' do
it 'filters issues by searched author' do
@@ -350,7 +330,7 @@ RSpec.describe 'Filter issues', :js do
context 'issue label clicked' do
it 'filters and displays in search bar' do
- find('[data-qa-selector="issuable-label"]', text: multiple_words_label.title).click
+ click_link multiple_words_label.title
expect_issues_list_count(1)
expect_tokens([label_token("\"#{multiple_words_label.title}\"")])
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index 04b4caa52fe..d147476f1ab 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -5,17 +5,14 @@ require 'spec_helper'
RSpec.describe 'Issue Sidebar' do
include MobileHelpers
- let(:group) { create(:group, :nested) }
- let(:project) { create(:project, :public, namespace: group) }
- let!(:user) { create(:user) }
- let!(:label) { create(:label, project: project, title: 'bug') }
- let(:issue) { create(:labeled_issue, project: project, labels: [label]) }
- let!(:xss_label) { create(:label, project: project, title: '&lt;script&gt;alert("xss");&lt;&#x2F;script&gt;') }
- let!(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
- let!(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
- let!(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
- let!(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
- let!(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
+ let_it_be(:group) { create(:group, :nested) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:label) { create(:label, project: project, title: 'bug') }
+ let_it_be(:issue) { create(:labeled_issue, project: project, labels: [label]) }
+ let_it_be(:mock_date) { Date.today.at_beginning_of_month + 2.days }
+ let_it_be(:issue_with_due_date) { create(:issue, project: project, due_date: mock_date) }
+ let_it_be(:xss_label) { create(:label, project: project, title: '&lt;script&gt;alert("xss");&lt;&#x2F;script&gt;') }
before do
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
@@ -130,30 +127,7 @@ RSpec.describe 'Issue Sidebar' do
end
end
- context 'when invite_members_version_b experiment is enabled' do
- before do
- stub_experiment_for_subject(invite_members_version_b: true)
- end
-
- it 'shows a link for inviting members and follows through to modal' do
- project.add_developer(user)
- visit_issue(project, issue2)
-
- open_assignees_dropdown
-
- page.within '.dropdown-menu-user' do
- expect(page).to have_link('Invite members', href: '#')
- expect(page).to have_selector('[data-track-event="click_invite_members_version_b"]')
- expect(page).to have_selector('[data-track-label="edit_assignee"]')
- end
-
- click_link 'Invite members'
-
- expect(page).to have_content("Oops, this feature isn't ready yet")
- end
- end
-
- context 'when invite_members_version_b experiment is disabled' do
+ context 'when user cannot invite members in assignee dropdown' do
it 'shows author in assignee dropdown and no invite link' do
project.add_developer(user)
visit_issue(project, issue2)
@@ -212,7 +186,8 @@ RSpec.describe 'Issue Sidebar' do
click_link user2.name
end
- find('.js-right-sidebar').click
+ find('.participants').click
+ wait_for_requests
open_assignees_dropdown
@@ -226,7 +201,31 @@ RSpec.describe 'Issue Sidebar' do
end
end
- context 'as a allowed user' do
+ context 'due date widget', :js do
+ let(:due_date_value) { find('[data-testid="due-date"] [data-testid="sidebar-date-value"]') }
+
+ context 'when no due date exists' do
+ before do
+ visit_issue(project, issue)
+ end
+
+ it "displays 'None'" do
+ expect(due_date_value.text).to have_content 'None'
+ end
+ end
+
+ context 'when due date exists' do
+ before do
+ visit_issue(project, issue_with_due_date)
+ end
+
+ it "displays the due date" do
+ expect(due_date_value.text).to have_content mock_date.strftime('%b %-d, %Y')
+ end
+ end
+ end
+
+ context 'as an allowed user' do
before do
project.add_developer(user)
visit_issue(project, issue)
@@ -260,6 +259,12 @@ RSpec.describe 'Issue Sidebar' do
end
context 'editing issue milestone', :js do
+ let_it_be(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
+ let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
+ let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
+ let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
+ let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
+
before do
page.within('.block.milestone > .title') do
click_on 'Edit'
@@ -448,6 +453,8 @@ RSpec.describe 'Issue Sidebar' do
def visit_issue(project, issue)
visit project_issue_path(project, issue)
+
+ wait_for_requests
end
def open_issue_sidebar
diff --git a/spec/features/issues/resource_label_events_spec.rb b/spec/features/issues/resource_label_events_spec.rb
index 8faec85f3df..33edf2f0b63 100644
--- a/spec/features/issues/resource_label_events_spec.rb
+++ b/spec/features/issues/resource_label_events_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'List issue resource label events', :js do
labels.each { |label| click_link label }
- click_on 'Edit'
+ send_keys(:escape)
wait_for_requests
end
end
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index 75ea8c14f7f..0a879fdd4d4 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe 'Service Desk Issue Tracker', :js do
let_it_be(:support_bot) { User.support_bot }
before do
+ stub_feature_flags(vue_issuables_list: true)
+
# The following two conditions equate to Gitlab::ServiceDesk.supported == true
allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true)
allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true)
@@ -21,7 +23,7 @@ RSpec.describe 'Service Desk Issue Tracker', :js do
before do
visit project_path(project)
find('.sidebar-top-level-items .shortcuts-issues').click
- find('.sidebar-sub-level-items a[title="Service Desk"]').click
+ find('.sidebar-sub-level-items a', text: 'Service Desk').click
end
it 'can navigate to the service desk from link in the sidebar' do
diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb
index 461030d3176..70d7deadec3 100644
--- a/spec/features/issues/spam_issues_spec.rb
+++ b/spec/features/issues/spam_issues_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe 'New issue', :js do
Gitlab::CurrentSettings.update!(
akismet_enabled: true,
akismet_api_key: 'testkey',
+ spam_check_api_key: 'testkey',
recaptcha_enabled: true,
recaptcha_site_key: 'test site key',
recaptcha_private_key: 'test private key'
diff --git a/spec/features/issues/bulk_assignment_labels_spec.rb b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
index 80bf964e2ee..97df2d0208b 100644
--- a/spec/features/issues/bulk_assignment_labels_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_labels_spec.rb
@@ -20,17 +20,13 @@ RSpec.describe 'Issues > Labels bulk assignment' do
end
context 'sidebar' do
- before do
- enable_bulk_update
- end
-
it 'is present when bulk edit is enabled' do
- expect(page).to have_css('.issuable-sidebar')
+ enable_bulk_update
+ expect(page).to have_css 'aside[aria-label="Bulk update"]'
end
it 'is not present when bulk edit is disabled' do
- disable_bulk_update
- expect(page).not_to have_css('.issuable-sidebar')
+ expect(page).not_to have_css 'aside[aria-label="Bulk update"]'
end
end
@@ -42,7 +38,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'a label' do
context 'to all issues' do
before do
- check 'check-all-issues'
+ check 'Select all'
open_labels_dropdown ['bug']
update_issues
end
@@ -57,8 +53,8 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'to some issues' do
before do
- check "selected_issue_#{issue1.id}"
- check "selected_issue_#{issue2.id}"
+ check issue1.title
+ check issue2.title
open_labels_dropdown ['bug']
update_issues
end
@@ -73,7 +69,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'to an issue' do
before do
- check "selected_issue_#{issue1.id}"
+ check issue1.title
open_labels_dropdown ['bug']
update_issues
end
@@ -89,7 +85,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'to an issue by selecting the label first' do
before do
open_labels_dropdown ['bug']
- check "selected_issue_#{issue1.id}"
+ check issue1.title
update_issues
end
@@ -105,7 +101,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'multiple labels' do
context 'to all issues' do
before do
- check 'check-all-issues'
+ check 'Select all'
open_labels_dropdown %w(bug feature)
update_issues
end
@@ -120,7 +116,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'to a issue' do
before do
- check "selected_issue_#{issue1.id}"
+ check issue1.title
open_labels_dropdown %w(bug feature)
update_issues
end
@@ -141,7 +137,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
issue2.labels << feature
enable_bulk_update
- check 'check-all-issues'
+ check 'Select all'
open_labels_dropdown ['bug']
update_issues
@@ -162,7 +158,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
issue2.labels << feature
enable_bulk_update
- check 'check-all-issues'
+ check 'Select all'
unmark_labels_in_dropdown %w(bug feature)
update_issues
end
@@ -229,7 +225,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
expect(find("#issue_#{issue1.id}")).to have_content 'bug'
expect(find("#issue_#{issue2.id}")).to have_content 'feature'
- check 'check-all-issues'
+ check 'Select all'
open_milestone_dropdown(['First Release'])
update_issues
@@ -250,7 +246,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
it 'keeps existing label and new label is present' do
expect(find("#issue_#{issue1.id}")).to have_content 'bug'
- check 'check-all-issues'
+ check 'Select all'
open_milestone_dropdown ['First Release']
open_labels_dropdown ['feature']
update_issues
@@ -277,7 +273,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
expect(find("#issue_#{issue1.id}")).to have_content 'bug'
expect(find("#issue_#{issue2.id}")).to have_content 'feature'
- check 'check-all-issues'
+ check 'Select all'
open_milestone_dropdown ['First Release']
unmark_labels_in_dropdown ['feature']
@@ -309,7 +305,7 @@ RSpec.describe 'Issues > Labels bulk assignment' do
expect(find("#issue_#{issue2.id}")).to have_content 'feature'
expect(find("#issue_#{issue2.id}")).to have_content 'First Release'
- check 'check-all-issues'
+ check 'Select all'
open_milestone_dropdown(['No milestone'])
update_issues
@@ -369,31 +365,31 @@ RSpec.describe 'Issues > Labels bulk assignment' do
end
it 'applies label from filtered results' do
- check 'check-all-issues'
+ check 'Select all'
- page.within('.issues-bulk-update') do
+ within('aside[aria-label="Bulk update"]') do
click_button 'Select labels'
wait_for_requests
- expect(find('.dropdown-menu-labels li', text: 'bug')).to have_css('.is-active')
- expect(find('.dropdown-menu-labels li', text: 'feature')).to have_css('.is-indeterminate')
+ expect(page).to have_link 'bug', class: 'is-active'
+ expect(page).to have_link 'feature', class: 'is-indeterminate'
click_link 'bug'
- find('.dropdown-input-field', visible: true).set('wontfix')
+ fill_in 'Search', with: 'wontfix'
click_link 'wontfix'
end
update_issues
- page.within '.issues-holder' do
- expect(find("#issue_#{issue1.id}")).not_to have_content 'bug'
- expect(find("#issue_#{issue1.id}")).to have_content 'feature'
- expect(find("#issue_#{issue1.id}")).to have_content 'wontfix'
+ first_issue = find("#issue_#{issue1.id}")
+ expect(first_issue).not_to have_content 'bug'
+ expect(first_issue).to have_content 'feature'
+ expect(first_issue).to have_content 'wontfix'
- expect(find("#issue_#{issue2.id}")).not_to have_content 'bug'
- expect(find("#issue_#{issue2.id}")).not_to have_content 'feature'
- expect(find("#issue_#{issue2.id}")).to have_content 'wontfix'
- end
+ second_issue = find("#issue_#{issue2.id}")
+ expect(second_issue).not_to have_content 'bug'
+ expect(second_issue).not_to have_content 'feature'
+ expect(second_issue).to have_content 'wontfix'
end
end
end
@@ -408,24 +404,22 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'cannot bulk assign labels' do
it do
expect(page).not_to have_button 'Edit issues'
- expect(page).not_to have_css '.check-all-issues'
- expect(page).not_to have_css '.issue-check'
+ expect(page).not_to have_unchecked_field 'Select all'
+ expect(page).not_to have_unchecked_field issue1.title
end
end
end
def open_milestone_dropdown(items = [])
- page.within('.issues-bulk-update') do
- click_button 'Select milestone'
- wait_for_requests
- items.map do |item|
- click_link item
- end
+ click_button 'Select milestone'
+ wait_for_requests
+ items.map do |item|
+ click_link item
end
end
def open_labels_dropdown(items = [], unmark = false)
- page.within('.issues-bulk-update') do
+ within('aside[aria-label="Bulk update"]') do
click_button 'Select labels'
wait_for_requests
items.map do |item|
@@ -446,12 +440,10 @@ RSpec.describe 'Issues > Labels bulk assignment' do
end
def check_issue(issue, uncheck = false)
- page.within('.issues-list') do
- if uncheck
- uncheck "selected_issue_#{issue.id}"
- else
- check "selected_issue_#{issue.id}"
- end
+ if uncheck
+ uncheck issue.title
+ else
+ check issue.title
end
end
@@ -460,12 +452,13 @@ RSpec.describe 'Issues > Labels bulk assignment' do
end
def update_issues
- find('.update-selected-issues').click
+ click_button 'Update all'
wait_for_requests
end
def enable_bulk_update
visit project_issues_path(project)
+ wait_for_requests
click_button 'Edit issues'
end
diff --git a/spec/features/issues/update_issues_spec.rb b/spec/features/issues/user_bulk_edits_issues_spec.rb
index eb78e4e2456..e34c16e27ba 100644
--- a/spec/features/issues/update_issues_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_spec.rb
@@ -17,10 +17,10 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
visit project_issues_path(project)
click_button 'Edit issues'
- find('#check-all-issues').click
- find('.js-issue-status').click
+ check 'Select all'
+ click_button 'Select status'
+ click_link 'Closed'
- find('.dropdown-menu-status a', text: 'Closed').click
click_update_issues_button
expect(page).to have_selector('.issue', count: 0)
end
@@ -30,10 +30,10 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
visit project_issues_path(project, state: 'closed')
click_button 'Edit issues'
- find('#check-all-issues').click
- find('.js-issue-status').click
+ check 'Select all'
+ click_button 'Select status'
+ click_link 'Open'
- find('.dropdown-menu-status a', text: 'Open').click
click_update_issues_button
expect(page).to have_selector('.issue', count: 0)
end
@@ -44,10 +44,10 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
visit project_issues_path(project)
click_button 'Edit issues'
- find('#check-all-issues').click
+ check 'Select all'
click_update_assignee_button
+ click_link user.username
- find('.dropdown-menu-user-link', text: user.username).click
click_update_issues_button
page.within('.issue .controls') do
@@ -59,13 +59,15 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
create_assigned
visit project_issues_path(project)
+ expect(find('.issue:first-of-type')).to have_link "Assigned to #{user.name}"
+
click_button 'Edit issues'
- find('#check-all-issues').click
+ check 'Select all'
click_update_assignee_button
-
click_link 'Unassigned'
click_update_issues_button
- expect(find('.issue:first-child .controls')).not_to have_css('.author-link')
+
+ expect(find('.issue:first-of-type')).not_to have_link "Assigned to #{user.name}"
end
end
@@ -76,10 +78,9 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
visit project_issues_path(project)
click_button 'Edit issues'
- find('#check-all-issues').click
- find('.issues-bulk-update .js-milestone-select').click
-
- find('.dropdown-menu-milestone a', text: milestone.title).click
+ check 'Select all'
+ click_button 'Select milestone'
+ click_link milestone.title
click_update_issues_button
expect(page.find('.issue')).to have_content milestone.title
@@ -91,16 +92,15 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
wait_for_requests
- expect(first('.issue')).to have_content milestone.title
+ expect(find('.issue:first-of-type')).to have_text milestone.title
click_button 'Edit issues'
- find('#check-all-issues').click
- find('.issues-bulk-update .js-milestone-select').click
-
- find('.dropdown-menu-milestone a', text: "No milestone").click
+ check 'Select all'
+ click_button 'Select milestone'
+ click_link 'No milestone'
click_update_issues_button
- expect(find('.issue:first-child')).not_to have_content milestone.title
+ expect(find('.issue:first-of-type')).not_to have_text milestone.title
end
end
@@ -117,12 +117,12 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
end
def click_update_assignee_button
- find('.js-update-assignee').click
+ click_button 'Select assignee'
wait_for_requests
end
def click_update_issues_button
- find('.update-selected-issues').click
+ click_button 'Update all'
wait_for_requests
end
end
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 1bbb96ff479..cb4a5a32762 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -406,6 +406,12 @@ RSpec.describe "Issues > User edits issue", :js do
end
context 'update due date' do
+ before do
+ # Due date widget uses GraphQL and needs to wait for requests to come back
+ # The date picker won't be rendered before requests complete
+ wait_for_requests
+ end
+
it 'adds due date to issue' do
date = Date.today.at_beginning_of_month + 2.days
@@ -417,7 +423,7 @@ RSpec.describe "Issues > User edits issue", :js do
wait_for_requests
- expect(find('[data-testid="sidebar-duedate-value"]').text).to have_content date.strftime('%b %-d, %Y')
+ expect(find('[data-testid="sidebar-date-value"]').text).to have_content date.strftime('%b %-d, %Y')
end
end
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index e862f7030c0..bbb7e8a028d 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -5,10 +5,6 @@ require 'spec_helper'
RSpec.describe 'User interacts with awards' do
let(:user) { create(:user) }
- before do
- stub_feature_flags(improved_emoji_picker: false)
- end
-
describe 'User interacts with awards in an issue', :js do
let(:issue) { create(:issue, project: project)}
let(:project) { create(:project) }
@@ -55,29 +51,24 @@ RSpec.describe 'User interacts with awards' do
it 'toggles a custom award emoji' do
page.within('.awards') do
- page.find('.js-add-award').click
+ page.find('.add-reaction-button').click
end
- page.find('.emoji-menu.is-visible')
-
- expect(page).to have_selector('.js-emoji-menu-search')
- expect(page.evaluate_script("document.activeElement.classList.contains('js-emoji-menu-search')")).to eq(true)
-
- page.within('.emoji-menu-content') do
- emoji_button = page.first('.js-emoji-btn')
+ page.within('.emoji-picker') do
+ emoji_button = page.first('gl-emoji[data-name="8ball"]')
emoji_button.hover
emoji_button.click
end
page.within('.awards') do
- expect(page).to have_selector('.js-emoji-btn')
- expect(page.find('.js-emoji-btn.active .js-counter')).to have_content('1')
- expect(page).to have_css(".js-emoji-btn.active[title='You']")
+ expect(page).to have_selector('[data-testid="award-button"]')
+ expect(page.find('[data-testid="award-button"].is-active .js-counter')).to have_content('1')
+ expect(page).to have_css('[data-testid="award-button"].is-active[title="You"]')
expect do
- page.find('.js-emoji-btn.active').click
+ page.find('[data-testid="award-button"].is-active').click
wait_for_requests
- end.to change { page.all('.award-control.js-emoji-btn').size }.from(3).to(2)
+ end.to change { page.all('[data-testid="award-button"]').size }.from(3).to(2)
end
end
@@ -212,31 +203,25 @@ RSpec.describe 'User interacts with awards' do
end
it 'adds award to issue' do
- first('.js-emoji-btn').click
+ first('[data-testid="award-button"]').click
- expect(page).to have_selector('.js-emoji-btn.active')
- expect(first('.js-emoji-btn')).to have_content '1'
+ expect(page).to have_selector('[data-testid="award-button"].is-active')
+ expect(first('[data-testid="award-button"]')).to have_content '1'
visit project_issue_path(project, issue)
- expect(first('.js-emoji-btn')).to have_content '1'
+ expect(first('[data-testid="award-button"]')).to have_content '1'
end
it 'removes award from issue' do
- first('.js-emoji-btn').click
- find('.js-emoji-btn.active').click
+ first('[data-testid="award-button"]').click
+ find('[data-testid="award-button"].is-active').click
- expect(first('.js-emoji-btn')).to have_content '0'
+ expect(first('[data-testid="award-button"]')).to have_content '0'
visit project_issue_path(project, issue)
- expect(first('.js-emoji-btn')).to have_content '0'
- end
-
- it 'only has one menu on the page' do
- first('.js-add-award').click
-
- expect(page).to have_selector('.emoji-menu', count: 1)
+ expect(first('[data-testid="award-button"]')).to have_content '0'
end
end
@@ -311,7 +296,7 @@ RSpec.describe 'User interacts with awards' do
end
context 'execute /award quick action' do
- it 'toggles the emoji award on noteable', :js do
+ xit 'toggles the emoji award on noteable', :js do
execute_quick_action('/award :100:')
expect(find(noteable_award_counter)).to have_text("1")
@@ -330,7 +315,7 @@ RSpec.describe 'User interacts with awards' do
end
it 'has disabled emoji button' do
- expect(first('.award-control')[:class]).to have_text('disabled')
+ expect(first('[data-testid="award-button"]')[:class]).to have_text('disabled')
end
end
@@ -356,7 +341,7 @@ RSpec.describe 'User interacts with awards' do
end
def noteable_award_counter
- ".awards .active"
+ ".awards .is-active"
end
def toggle_smiley_emoji(status)
diff --git a/spec/features/issues/user_toggles_subscription_spec.rb b/spec/features/issues/user_toggles_subscription_spec.rb
index d91c187c840..35f4b415463 100644
--- a/spec/features/issues/user_toggles_subscription_spec.rb
+++ b/spec/features/issues/user_toggles_subscription_spec.rb
@@ -32,8 +32,8 @@ RSpec.describe "User toggles subscription", :js do
let(:project) { create(:project_empty_repo, :public, emails_disabled: true) }
it 'is disabled' do
- expect(page).to have_content('Notifications have been disabled by the project or group owner')
- expect(page).not_to have_selector('[data-testid="subscription-toggle"]')
+ expect(page).to have_content('Disabled by project owner')
+ expect(page).to have_button('Notifications', class: 'is-disabled')
end
end
end
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index c9dc764f93b..c700f878df6 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -426,8 +426,8 @@ RSpec.describe 'Copy as GFM', :js do
html = <<~HTML
<div class="md-suggestion">
- <div class="md-suggestion-header border-bottom-0 mt-2 qa-suggestion-diff-header js-suggestion-diff-header">
- <div class="qa-suggestion-diff-header js-suggestion-diff-header font-weight-bold">
+ <div class="md-suggestion-header border-bottom-0 mt-2 js-suggestion-diff-header">
+ <div class="js-suggestion-diff-header font-weight-bold">
Suggested change
<a href="/gitlab/help/user/discussions/index.md#suggest-changes" aria-label="Help" class="js-help-btn">
<svg aria-hidden="true" class="s16 ic-question-o link-highlight">
diff --git a/spec/features/markdown/math_spec.rb b/spec/features/markdown/math_spec.rb
index 441cff7045f..fa23fac2f96 100644
--- a/spec/features/markdown/math_spec.rb
+++ b/spec/features/markdown/math_spec.rb
@@ -13,14 +13,24 @@ RSpec.describe 'Math rendering', :js do
```math
a^2+b^2=c^2
```
+
+ This math is aligned
+
+ ```math
+ \\begin{align*}
+ a&=b+c \\\\
+ d+e&=f
+ \\end{align*}
+ ```
MATH
issue = create(:issue, project: project, description: description)
visit project_issue_path(project, issue)
- expect(page).to have_selector('.katex .mord.mathdefault', text: 'b')
- expect(page).to have_selector('.katex-display .mord.mathdefault', text: 'b')
+ expect(page).to have_selector('.katex .mord.mathnormal', text: 'b')
+ expect(page).to have_selector('.katex-display .mord.mathnormal', text: 'b')
+ expect(page).to have_selector('.katex-display .mtable .col-align-l .mord.mathnormal', text: 'f')
end
it 'only renders non XSS links' do
@@ -35,7 +45,9 @@ RSpec.describe 'Math rendering', :js do
visit project_issue_path(project, issue)
page.within '.description > .md' do
- expect(page).to have_selector('.katex-error')
+ # unfortunately there is no class selector for KaTeX's "unsupported command"
+ # formatting so we must match the style attribute
+ expect(page).to have_selector('.katex-html .mord[style*="color:"][style*="#cc0000"]', text: '\href')
expect(page).to have_selector('.katex-html a', text: 'Gitlab')
end
end
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index 19680a827bf..5b11d9cb919 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(page).to have_css('.review-bar-component')
- expect(find('.review-bar-content .btn-success')).to have_content('1')
+ expect(find('.review-bar-content .btn-confirm')).to have_content('1')
end
it 'publishes review' do
@@ -157,7 +157,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(find('.new .draft-note-component')).to have_content('Line is wrong')
expect(find('.old .draft-note-component')).to have_content('Another wrong line')
- expect(find('.review-bar-content .btn-success')).to have_content('2')
+ expect(find('.review-bar-content .btn-confirm')).to have_content('2')
end
end
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index 119cf31098c..617aceae54c 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe "User creates a merge request", :js do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:title) { "Some feature" }
before do
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index 63b463a2c5f..9e1b0135932 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
context 'when enabled after it was previously canceled' do
before do
click_button "Merge when pipeline succeeds"
- click_link "Cancel automatic merge"
+ click_link "Cancel"
wait_for_requests
@@ -87,7 +87,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
before do
merge_request.merge_params['force_remove_source_branch'] = '0'
merge_request.save!
- click_link "Cancel automatic merge"
+ click_link "Cancel"
end
it_behaves_like 'Merge when pipeline succeeds activator'
@@ -114,7 +114,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
end
it 'allows to cancel the automatic merge' do
- click_link "Cancel automatic merge"
+ click_link "Cancel"
expect(page).to have_button "Merge when pipeline succeeds"
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index a6dfae72912..83d9388914b 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Merge request > User posts notes', :js do
include NoteInteractionHelpers
let_it_be(:project) { create(:project, :repository) }
+
let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index 1b1152897fc..d9e3bfd6a9c 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe 'Merge request > User resolves conflicts', :js do
end
it "renders bad name without xss issues" do
- expect(find('.resolve-conflicts-form .resolve-info')).to have_content(bad_branch_name)
+ expect(find('[data-testid="resolve-info"]')).to have_content(bad_branch_name)
end
end
end
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index 9a3f97a0943..73e628bda98 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
expect(page).not_to have_selector('.diffs .diff-file .notes_holder')
end
- it 'shows resolved thread when toggled' do
+ it 'shows resolved thread when toggled', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/327439' do
find('.diff-comment-avatar-holders').click
expect(find('.diffs .diff-file .notes_holder')).to be_visible
diff --git a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
index 78c1b2a718e..35be21a646e 100644
--- a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Merge request > User cherry-picks', :js do
context 'Viewing a merged merge request' do
before do
- service = MergeRequests::MergeService.new(project, user, sha: merge_request.diff_head_sha)
+ service = MergeRequests::MergeService.new(project: project, current_user: user, params: { sha: merge_request.diff_head_sha })
perform_enqueued_jobs do
service.execute(merge_request)
diff --git a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
index ac38b2b854c..e250837f398 100644
--- a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Merge request > User sees merge button depending on unresolved t
context 'with unresolved threads' do
it 'does not allow to merge' do
expect(page).not_to have_button 'Merge'
- expect(page).to have_content('Before this can be merged,')
+ expect(page).to have_content('all threads must be resolved')
end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 733b5a97fea..85eb956033b 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -147,7 +147,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel automatic merge')
+ expect(page).to have_link('Cancel')
end
end
@@ -174,7 +174,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel automatic merge')
+ expect(page).to have_link('Cancel')
end
end
end
@@ -369,7 +369,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline is pending' do
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel automatic merge')
+ expect(page).to have_link('Cancel')
end
end
@@ -395,7 +395,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'waits the head pipeline' do
expect(page).to have_content('to be merged automatically when the pipeline succeeds')
- expect(page).to have_link('Cancel automatic merge')
+ expect(page).to have_link('Cancel')
end
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 0cb4107c21d..d9b5ec17a4a 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -445,7 +445,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
wait_for_requests
expect(page).not_to have_button('Merge')
- expect(page).to have_content('This merge request is in the process of being merged')
+ expect(page).to have_content('Merging!')
end
end
diff --git a/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb b/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb
index cbd68025b50..a764dd97878 100644
--- a/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Merge request > User sees MR from deleted forked project', :js d
end
before do
- MergeRequests::MergeService.new(project, user).execute(merge_request)
+ MergeRequests::MergeService.new(project: project, current_user: user).execute(merge_request)
forked_project.destroy!
sign_in(user)
visit project_merge_request_path(project, merge_request)
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index a5047c8d550..2d8fe10b987 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -239,7 +239,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
threads << Thread.new do
Sidekiq::Worker.skipping_transaction_check do
- @merge_request = MergeRequests::CreateService.new(project, user, merge_request_params).execute
+ @merge_request = MergeRequests::CreateService.new(project: project, current_user: user, params: merge_request_params).execute
end
end
diff --git a/spec/features/merge_request/user_sees_wip_help_message_spec.rb b/spec/features/merge_request/user_sees_wip_help_message_spec.rb
index 204df5b3995..0a6a3d82ee0 100644
--- a/spec/features/merge_request/user_sees_wip_help_message_spec.rb
+++ b/spec/features/merge_request/user_sees_wip_help_message_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Merge request > User sees draft help message' do
'It looks like you have some draft commits in this branch'
)
expect(page).to have_text(
- "Start the title with Draft: or WIP: to prevent a merge request that is a \
+ "Start the title with Draft: to prevent a merge request that is a \
work in progress from being merged before it's ready."
)
end
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index f1b44010f63..073706cf9d8 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe 'User views an open merge request' do
it 'does not show diverged commits count' do
page.within('.mr-source-target') do
- expect(page).not_to have_content(/([0-9]+ commit[s]? behind)/)
+ expect(page).not_to have_content(/([0-9]+ commits? behind)/)
end
end
end
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index 179bf84a729..0fe69c5ca5b 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
click_button 'Edit merge requests'
- expect(page).not_to have_css('.js-issue-status')
+ expect(page).not_to have_button 'Select status'
end
end
@@ -57,9 +57,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
it 'updates merge request with assignee' do
change_assignee(user.name)
- page.within('.merge-request .controls') do
- expect(find('.author-link')["title"]).to have_content(user.name)
- end
+ expect(find('.merge-request')).to have_link "Assigned to #{user.name}"
end
end
@@ -72,7 +70,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
it 'removes assignee from the merge request' do
change_assignee('Unassigned')
- expect(find('.merge-request .controls')).not_to have_css('.author-link')
+ expect(find('.merge-request')).not_to have_link "Assigned to #{user.name}"
end
end
end
@@ -109,35 +107,33 @@ RSpec.describe 'Merge requests > User mass updates', :js do
def change_status(text)
click_button 'Edit merge requests'
- find('#check-all-issues').click
- find('.js-issue-status').click
- find('.dropdown-menu-status a', text: text).click
+ check 'Select all'
+ click_button 'Select status'
+ click_link text
click_update_merge_requests_button
end
def change_assignee(text)
click_button 'Edit merge requests'
- find('#check-all-issues').click
- find('.js-update-assignee').click
- wait_for_requests
-
- page.within '.dropdown-menu-user' do
+ check 'Select all'
+ within 'aside[aria-label="Bulk update"]' do
+ click_button 'Select assignee'
+ wait_for_requests
click_link text
end
-
click_update_merge_requests_button
end
def change_milestone(text)
click_button 'Edit merge requests'
- find('#check-all-issues').click
- find('.issues-bulk-update .js-milestone-select').click
- find('.dropdown-menu-milestone a', text: text).click
+ check 'Select all'
+ click_button 'Select milestone'
+ click_link text
click_update_merge_requests_button
end
def click_update_merge_requests_button
- find('.update-selected-issues').click
+ click_button 'Update all'
wait_for_requests
end
end
diff --git a/spec/features/monitor_sidebar_link_spec.rb b/spec/features/monitor_sidebar_link_spec.rb
new file mode 100644
index 00000000000..bb5e581a034
--- /dev/null
+++ b/spec/features/monitor_sidebar_link_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Monitor dropdown sidebar', :aggregate_failures do
+ let_it_be_with_reload(:project) { create(:project, :internal, :repository) }
+
+ let(:user) { create(:user) }
+ let(:access_level) { ProjectFeature::PUBLIC }
+ let(:role) { nil }
+
+ before do
+ project.add_role(user, role) if role
+ project.project_feature.update_attribute(:operations_access_level, access_level)
+
+ sign_in(user)
+ visit project_issues_path(project)
+ end
+
+ shared_examples 'shows Monitor menu based on the access level' do
+ context 'when operations project feature is PRIVATE' do
+ let(:access_level) { ProjectFeature::PRIVATE }
+
+ it 'shows the `Monitor` menu' do
+ expect(page).to have_selector('a.shortcuts-monitor', text: 'Monitor')
+ end
+ end
+
+ context 'when operations project feature is DISABLED' do
+ let(:access_level) { ProjectFeature::DISABLED }
+
+ it 'does not show the `Monitor` menu' do
+ expect(page).not_to have_selector('a.shortcuts-monitor')
+ end
+ end
+ end
+
+ context 'user is not a member' do
+ it 'has the correct `Monitor` menu items', :aggregate_failures do
+ expect(page).to have_selector('a.shortcuts-monitor', text: 'Monitor')
+ expect(page).to have_link('Incidents', href: project_incidents_path(project))
+ expect(page).to have_link('Environments', href: project_environments_path(project))
+
+ expect(page).not_to have_link('Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).not_to have_link('Product Analytics', href: project_product_analytics_path(project))
+ expect(page).not_to have_link('Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link('Logs', href: project_logs_path(project))
+ expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
+ end
+
+ context 'when operations project feature is PRIVATE' do
+ let(:access_level) { ProjectFeature::PRIVATE }
+
+ it 'does not show the `Monitor` menu' do
+ expect(page).not_to have_selector('a.shortcuts-monitor')
+ end
+ end
+
+ context 'when operations project feature is DISABLED' do
+ let(:access_level) { ProjectFeature::DISABLED }
+
+ it 'does not show the `Operations` menu' do
+ expect(page).not_to have_selector('a.shortcuts-monitor')
+ end
+ end
+ end
+
+ context 'user has guest role' do
+ let(:role) { :guest }
+
+ it 'has the correct `Monitor` menu items' do
+ expect(page).to have_selector('a.shortcuts-monitor', text: 'Monitor')
+ expect(page).to have_link('Incidents', href: project_incidents_path(project))
+ expect(page).to have_link('Environments', href: project_environments_path(project))
+
+ expect(page).not_to have_link('Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link('Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).not_to have_link('Product Analytics', href: project_product_analytics_path(project))
+ expect(page).not_to have_link('Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link('Logs', href: project_logs_path(project))
+ expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
+ end
+
+ it_behaves_like 'shows Monitor menu based on the access level'
+ end
+
+ context 'user has reporter role' do
+ let(:role) { :reporter }
+
+ it 'has the correct `Monitor` menu items' do
+ expect(page).to have_link('Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link('Incidents', href: project_incidents_path(project))
+ expect(page).to have_link('Environments', href: project_environments_path(project))
+ expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
+
+ expect(page).not_to have_link('Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link('Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link('Logs', href: project_logs_path(project))
+ expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
+ end
+
+ it_behaves_like 'shows Monitor menu based on the access level'
+ end
+
+ context 'user has developer role' do
+ let(:role) { :developer }
+
+ it 'has the correct `Monitor` menu items' do
+ expect(page).to have_link('Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link('Alerts', href: project_alert_management_index_path(project))
+ expect(page).to have_link('Incidents', href: project_incidents_path(project))
+ expect(page).to have_link('Environments', href: project_environments_path(project))
+ expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
+ expect(page).to have_link('Logs', href: project_logs_path(project))
+
+ expect(page).not_to have_link('Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link('Kubernetes', href: project_clusters_path(project))
+ end
+
+ it_behaves_like 'shows Monitor menu based on the access level'
+ end
+
+ context 'user has maintainer role' do
+ let(:role) { :maintainer }
+
+ it 'has the correct `Monitor` menu items' do
+ expect(page).to have_link('Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link('Alerts', href: project_alert_management_index_path(project))
+ expect(page).to have_link('Incidents', href: project_incidents_path(project))
+ expect(page).to have_link('Environments', href: project_environments_path(project))
+ expect(page).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link('Product Analytics', href: project_product_analytics_path(project))
+ expect(page).to have_link('Serverless', href: project_serverless_functions_path(project))
+ expect(page).to have_link('Logs', href: project_logs_path(project))
+ expect(page).to have_link('Kubernetes', href: project_clusters_path(project))
+ end
+
+ it_behaves_like 'shows Monitor menu based on the access level'
+ end
+end
diff --git a/spec/features/operations_sidebar_link_spec.rb b/spec/features/operations_sidebar_link_spec.rb
deleted file mode 100644
index 798f9092db0..00000000000
--- a/spec/features/operations_sidebar_link_spec.rb
+++ /dev/null
@@ -1,144 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Operations dropdown sidebar', :aggregate_failures do
- let_it_be_with_reload(:project) { create(:project, :internal, :repository) }
- let(:user) { create(:user) }
- let(:access_level) { ProjectFeature::PUBLIC }
- let(:role) { nil }
-
- before do
- project.add_role(user, role) if role
- project.project_feature.update_attribute(:operations_access_level, access_level)
-
- sign_in(user)
- visit project_issues_path(project)
- end
-
- shared_examples 'shows Operations menu based on the access level' do
- context 'when operations project feature is PRIVATE' do
- let(:access_level) { ProjectFeature::PRIVATE }
-
- it 'shows the `Operations` menu' do
- expect(page).to have_selector('a.shortcuts-operations', text: 'Operations')
- end
- end
-
- context 'when operations project feature is DISABLED' do
- let(:access_level) { ProjectFeature::DISABLED }
-
- it 'does not show the `Operations` menu' do
- expect(page).not_to have_selector('a.shortcuts-operations')
- end
- end
- end
-
- context 'user is not a member' do
- it 'has the correct `Operations` menu items', :aggregate_failures do
- expect(page).to have_selector('a.shortcuts-operations', text: 'Operations')
- expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
- expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
-
- expect(page).not_to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
- expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
- expect(page).not_to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
- expect(page).not_to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
- expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
- expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
- expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
- end
-
- context 'when operations project feature is PRIVATE' do
- let(:access_level) { ProjectFeature::PRIVATE }
-
- it 'does not show the `Operations` menu' do
- expect(page).not_to have_selector('a.shortcuts-operations')
- end
- end
-
- context 'when operations project feature is DISABLED' do
- let(:access_level) { ProjectFeature::DISABLED }
-
- it 'does not show the `Operations` menu' do
- expect(page).not_to have_selector('a.shortcuts-operations')
- end
- end
- end
-
- context 'user has guest role' do
- let(:role) { :guest }
-
- it 'has the correct `Operations` menu items' do
- expect(page).to have_selector('a.shortcuts-operations', text: 'Operations')
- expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
- expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
-
- expect(page).not_to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
- expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
- expect(page).not_to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
- expect(page).not_to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
- expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
- expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
- expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
- end
-
- it_behaves_like 'shows Operations menu based on the access level'
- end
-
- context 'user has reporter role' do
- let(:role) { :reporter }
-
- it 'has the correct `Operations` menu items' do
- expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
- expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
- expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
- expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
- expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
-
- expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
- expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
- expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
- expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
- end
-
- it_behaves_like 'shows Operations menu based on the access level'
- end
-
- context 'user has developer role' do
- let(:role) { :developer }
-
- it 'has the correct `Operations` menu items' do
- expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
- expect(page).to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
- expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
- expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
- expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
- expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
- expect(page).to have_link(title: 'Logs', href: project_logs_path(project))
-
- expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
- expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
- end
-
- it_behaves_like 'shows Operations menu based on the access level'
- end
-
- context 'user has maintainer role' do
- let(:role) { :maintainer }
-
- it 'has the correct `Operations` menu items' do
- expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
- expect(page).to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
- expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
- expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
- expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
- expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
- expect(page).to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
- expect(page).to have_link(title: 'Logs', href: project_logs_path(project))
- expect(page).to have_link(title: 'Kubernetes', href: project_clusters_path(project))
- end
-
- it_behaves_like 'shows Operations menu based on the access level'
- end
-end
diff --git a/spec/features/populate_new_pipeline_vars_with_params_spec.rb b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
index 37fea5331a3..937f99558ad 100644
--- a/spec/features/populate_new_pipeline_vars_with_params_spec.rb
+++ b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
let(:page_path) { new_project_pipeline_path(project) }
before do
- stub_feature_flags(new_pipeline_form: false)
sign_in(user)
project.add_maintainer(user)
@@ -16,18 +15,18 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
end
it "var[key1]=value1 populates env_var variable correctly" do
- page.within('.ci-variable-list .js-row:nth-child(1)') do
- expect(find('.js-ci-variable-input-variable-type').value).to eq('env_var')
- expect(find('.js-ci-variable-input-key').value).to eq('key1')
- expect(find('.js-ci-variable-input-value').text).to eq('value1')
+ page.within(all("[data-testid='ci-variable-row']")[0]) do
+ expect(find("[data-testid='pipeline-form-ci-variable-type']").value).to eq('env_var')
+ expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key1')
+ expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value1')
end
end
it "file_var[key2]=value2 populates file variable correctly" do
- page.within('.ci-variable-list .js-row:nth-child(2)') do
- expect(find('.js-ci-variable-input-variable-type').value).to eq('file')
- expect(find('.js-ci-variable-input-key').value).to eq('key2')
- expect(find('.js-ci-variable-input-value').text).to eq('value2')
+ page.within(all("[data-testid='ci-variable-row']")[1]) do
+ expect(find("[data-testid='pipeline-form-ci-variable-type']").value).to eq('file')
+ expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key2')
+ expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value2')
end
end
end
diff --git a/spec/features/profiles/chat_names_spec.rb b/spec/features/profiles/chat_names_spec.rb
index ca888018cad..6270fa7347d 100644
--- a/spec/features/profiles/chat_names_spec.rb
+++ b/spec/features/profiles/chat_names_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Profile > Chat' do
let(:user) { create(:user) }
- let(:service) { create(:service) }
+ let(:integration) { create(:service) }
before do
sign_in(user)
@@ -15,7 +15,7 @@ RSpec.describe 'Profile > Chat' do
{ team_id: 'T00', team_domain: 'my_chat_team', user_id: 'U01', user_name: 'my_chat_user' }
end
- let!(:authorize_url) { ChatNames::AuthorizeUserService.new(service, params).execute }
+ let!(:authorize_url) { ChatNames::AuthorizeUserService.new(integration, params).execute }
let(:authorize_path) { URI.parse(authorize_url).request_uri }
before do
@@ -60,7 +60,7 @@ RSpec.describe 'Profile > Chat' do
end
describe 'visits chat accounts' do
- let!(:chat_name) { create(:chat_name, user: user, service: service) }
+ let!(:chat_name) { create(:chat_name, user: user, integration: integration) }
before do
visit profile_chat_names_path
diff --git a/spec/features/profiles/user_edit_preferences_spec.rb b/spec/features/profiles/user_edit_preferences_spec.rb
index 3129e4bd952..c724de04043 100644
--- a/spec/features/profiles/user_edit_preferences_spec.rb
+++ b/spec/features/profiles/user_edit_preferences_spec.rb
@@ -1,10 +1,16 @@
# frozen_string_literal: true
+
require 'spec_helper'
RSpec.describe 'User edit preferences profile', :js do
+ include StubLanguagesTranslationPercentage
+
+ # Empty value doesn't change the levels
+ let(:language_percentage_levels) { nil }
let(:user) { create(:user) }
before do
+ stub_languages_translation_percentage(language_percentage_levels)
stub_feature_flags(user_time_settings: true)
sign_in(user)
visit(profile_preferences_path)
@@ -63,17 +69,4 @@ RSpec.describe 'User edit preferences profile', :js do
expect(page).to have_content('Failed to save preferences.')
end
end
-
- describe 'User language' do
- let(:user) { create(:user, preferred_language: :es) }
-
- it 'shows the user preferred language by default' do
- expect(page).to have_select(
- 'user[preferred_language]',
- selected: 'Spanish - español',
- options: Gitlab::I18n.selectable_locales.values,
- visible: :all
- )
- end
- end
end
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index 57f7c7878e3..dddca15ae24 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -232,17 +232,6 @@ RSpec.describe 'User edit profile' do
expect(page.find('.issuable-assignees')).to have_content("#{user.name} (Busy)")
end
end
-
- context 'with set_user_availability_status feature flag disabled' do
- before do
- stub_feature_flags(set_user_availability_status: false)
- visit root_path(user)
- end
-
- it 'does not display the availability checkbox' do
- expect(page).not_to have_css('[data-testid="user-availability-checkbox"]')
- end
- end
end
context 'user menu' do
@@ -487,19 +476,6 @@ RSpec.describe 'User edit profile' do
expect(first_note).not_to have_css('.user-status-emoji')
end
end
-
- context 'with set_user_availability_status feature flag disabled' do
- before do
- stub_feature_flags(set_user_availability_status: false)
- visit root_path(user)
- end
-
- it 'does not display the availability checkbox' do
- open_user_status_modal
-
- expect(page).not_to have_css('[data-testid="user-availability-checkbox"]')
- end
- end
end
context 'User time preferences', :js do
diff --git a/spec/features/profiles/user_visits_notifications_tab_spec.rb b/spec/features/profiles/user_visits_notifications_tab_spec.rb
index 939e791c75d..e960cc76219 100644
--- a/spec/features/profiles/user_visits_notifications_tab_spec.rb
+++ b/spec/features/profiles/user_visits_notifications_tab_spec.rb
@@ -12,6 +12,14 @@ RSpec.describe 'User visits the notifications tab', :js do
visit(profile_notifications_path)
end
+ it 'turns on the receive product marketing emails setting' do
+ expect(page).to have_content('Notifications')
+
+ expect do
+ check 'Receive product marketing emails'
+ end.to change { user.reload.email_opted_in }.to(true)
+ end
+
it 'changes the project notifications setting' do
expect(page).to have_content('Notifications')
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index 327d8133411..62565eaabe1 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Project variables', :js do
it_behaves_like 'variable list'
it 'adds a new variable with an environment scope' do
- click_button('Add Variable')
+ click_button('Add variable')
page.within('#add-ci-variable') do
find('[data-qa-selector="ci_variable_key_field"] input').set('akey')
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 9de43e7d18c..96a321037a9 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe 'Project active tab' do
- let(:user) { create :user }
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
before do
- project.add_maintainer(user)
sign_in(user)
end
@@ -18,15 +18,28 @@ RSpec.describe 'Project active tab' do
end
context 'on project Home' do
- before do
- visit project_path(project)
+ context 'when feature flag :sidebar_refactor is enabled' do
+ before do
+ visit project_path(project)
+ end
+
+ it_behaves_like 'page has active tab', 'Project'
end
- it_behaves_like 'page has active tab', 'Project'
- it_behaves_like 'page has active sub tab', 'Details'
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'page has active tab', 'Project'
+ it_behaves_like 'page has active sub tab', 'Details'
+ end
context 'on project Home/Activity' do
before do
+ visit project_path(project)
click_tab('Activity')
end
@@ -56,20 +69,37 @@ RSpec.describe 'Project active tab' do
end
context 'on project Issues' do
+ let(:feature_flag_value) { true }
+
before do
+ stub_feature_flags(sidebar_refactor: feature_flag_value)
+
visit project_issues_path(project)
end
it_behaves_like 'page has active tab', 'Issues'
- %w(Milestones Labels).each do |sub_menu|
- context "on project Issues/#{sub_menu}" do
- before do
- click_tab(sub_menu)
- end
+ context "on project Issues/Milestones" do
+ before do
+ click_tab('Milestones')
+ end
- it_behaves_like 'page has active tab', 'Issues'
- it_behaves_like 'page has active sub tab', sub_menu
+ it_behaves_like 'page has active tab', 'Issues'
+ it_behaves_like 'page has active sub tab', 'Milestones'
+ end
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_value) { false }
+
+ %w(Milestones Labels).each do |sub_menu|
+ context "on project Issues/#{sub_menu}" do
+ before do
+ click_tab(sub_menu)
+ end
+
+ it_behaves_like 'page has active tab', 'Issues'
+ it_behaves_like 'page has active sub tab', sub_menu
+ end
end
end
end
diff --git a/spec/features/projects/badges/pipeline_badge_spec.rb b/spec/features/projects/badges/pipeline_badge_spec.rb
index c24ab5c4058..bfc924b5d9b 100644
--- a/spec/features/projects/badges/pipeline_badge_spec.rb
+++ b/spec/features/projects/badges/pipeline_badge_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Pipeline Badge' do
let_it_be(:project) { create(:project, :repository, :public) }
+
let(:ref) { project.default_branch }
context 'when the project has a pipeline' do
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index 7c564d76f70..3598aa2f423 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -119,6 +119,81 @@ RSpec.describe 'File blob', :js do
end
end
+ context 'when ref switch' do
+ def switch_ref_to(ref_name)
+ first('.qa-branches-select').click
+
+ page.within '.project-refs-form' do
+ click_link ref_name
+ wait_for_requests
+ end
+ end
+
+ it 'displays single highlighted line number of different ref' do
+ visit_blob('files/js/application.js', anchor: 'L1')
+
+ switch_ref_to('feature')
+
+ page.within '.blob-content' do
+ expect(find_by_id('LC1')[:class]).to include("hll")
+ end
+ end
+
+ it 'displays multiple highlighted line numbers of different ref' do
+ visit_blob('files/js/application.js', anchor: 'L1-3')
+
+ switch_ref_to('feature')
+
+ page.within '.blob-content' do
+ expect(find_by_id('LC1')[:class]).to include("hll")
+ expect(find_by_id('LC2')[:class]).to include("hll")
+ expect(find_by_id('LC3')[:class]).to include("hll")
+ end
+ end
+
+ it 'displays no highlighted number of different ref' do
+ Files::UpdateService.new(
+ project,
+ project.owner,
+ commit_message: 'Update',
+ start_branch: 'feature',
+ branch_name: 'feature',
+ file_path: 'files/js/application.js',
+ file_content: 'new content'
+ ).execute
+
+ project.commit('feature').diffs.diff_files.first
+
+ visit_blob('files/js/application.js', anchor: 'L3')
+ switch_ref_to('feature')
+
+ page.within '.blob-content' do
+ expect(page).not_to have_css('.hll')
+ end
+ end
+
+ context 'sucessfully change ref of similar name' do
+ before do
+ project.repository.create_branch('dev')
+ project.repository.create_branch('development')
+ end
+
+ it 'switch ref from longer to shorter ref name' do
+ visit_blob('files/js/application.js', ref: 'development')
+ switch_ref_to('dev')
+
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ end
+
+ it 'switch ref from shorter to longer ref name' do
+ visit_blob('files/js/application.js', ref: 'dev')
+ switch_ref_to('development')
+
+ expect(page.find('.file-title-name').text).to eq('application.js')
+ end
+ end
+ end
+
context 'visiting with a line number anchor' do
before do
visit_blob('files/markdown/ruby-style-guide.md', anchor: 'L1')
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index bebb4bb679b..53994ec018e 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe "User deletes branch", :js do
let_it_be(:user) { create(:user) }
+
let(:project) { create(:project, :repository) }
before do
diff --git a/spec/features/projects/commit/builds_spec.rb b/spec/features/projects/commit/builds_spec.rb
index 00ec9d49a10..7b10f72006f 100644
--- a/spec/features/projects/commit/builds_spec.rb
+++ b/spec/features/projects/commit/builds_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'project commit pipelines', :js do
wait_for_requests
page.within('.merge-request-info') do
- expect(page).not_to have_selector '.spinner'
+ expect(page).not_to have_selector '.gl-spinner'
expect(page).to have_content 'No related merge requests found'
end
end
diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb
index cd944436228..fce9fa4fb62 100644
--- a/spec/features/projects/commit/cherry_pick_spec.rb
+++ b/spec/features/projects/commit/cherry_pick_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Cherry-pick Commits', :js do
let_it_be(:user) { create(:user) }
let_it_be(:sha) { '7d3b0f7cff5f37573aea97cebfd5692ea1689924' }
+
let!(:project) { create_default(:project, :repository, namespace: user.namespace) }
let(:master_pickable_commit) { project.commit(sha) }
diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb
index 0fa4975bb25..6997c2d8338 100644
--- a/spec/features/projects/commit/user_comments_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_comments_on_commit_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe "User comments on commit", :js do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:comment_text) { "XML attached" }
before_all do
diff --git a/spec/features/projects/commit/user_reverts_commit_spec.rb b/spec/features/projects/commit/user_reverts_commit_spec.rb
index ad327b86aa7..1c6cf5eb258 100644
--- a/spec/features/projects/commit/user_reverts_commit_spec.rb
+++ b/spec/features/projects/commit/user_reverts_commit_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'User reverts a commit', :js do
include RepoHelpers
let_it_be(:user) { create(:user) }
+
let!(:project) { create_default(:project, :repository, namespace: user.namespace) }
before do
diff --git a/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb b/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
index 89ff2f4b26d..cc3c70e66ce 100644
--- a/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Project > Commit > View user status' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:commit_author) { create(:user, email: sample_commit.author_email) }
before do
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index 64e9968061c..bc3ef2af9b0 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -118,6 +118,34 @@ RSpec.describe "Compare", :js do
end
end
end
+
+ context "pagination" do
+ before do
+ stub_const("Projects::CompareController::COMMIT_DIFFS_PER_PAGE", 1)
+ end
+
+ it "shows an adjusted count for changed files on this page" do
+ visit project_compare_index_path(project, from: "feature", to: "master")
+
+ click_button('Compare')
+
+ expect(page).to have_content("Showing 1 changed file")
+ end
+
+ it "shows commits list only on the first page" do
+ visit project_compare_index_path(project, from: "feature", to: "master")
+ click_button('Compare')
+
+ expect(page).to have_content 'Commits (29)'
+
+ # go to the second page
+ within(".files .gl-pagination") do
+ click_on("2")
+ end
+
+ expect(page).not_to have_content 'Commits (29)'
+ end
+ end
end
describe "tags" do
diff --git a/spec/features/projects/confluence/user_views_confluence_page_spec.rb b/spec/features/projects/confluence/user_views_confluence_page_spec.rb
index d39c97291db..7ec724ed55d 100644
--- a/spec/features/projects/confluence/user_views_confluence_page_spec.rb
+++ b/spec/features/projects/confluence/user_views_confluence_page_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'User views the Confluence page' do
let_it_be(:user) { create(:user) }
+
let(:project) { create(:project, :public) }
before do
diff --git a/spec/features/projects/deploy_keys_spec.rb b/spec/features/projects/deploy_keys_spec.rb
index 6218578cac6..bf705cf875b 100644
--- a/spec/features/projects/deploy_keys_spec.rb
+++ b/spec/features/projects/deploy_keys_spec.rb
@@ -19,10 +19,11 @@ RSpec.describe 'Project deploy keys', :js do
it 'removes association between project and deploy key' do
visit project_settings_repository_path(project)
- page.within(find('.qa-deploy-keys-settings')) do
+ page.within(find('.rspec-deploy-keys-settings')) do
expect(page).to have_selector('.deploy-key', count: 1)
- accept_confirm { find('[data-testid="remove-icon"]').click }
+ click_button 'Remove'
+ click_button 'Remove deploy key'
wait_for_requests
diff --git a/spec/features/projects/diffs/diff_show_spec.rb b/spec/features/projects/diffs/diff_show_spec.rb
index 747277e2562..e47f36c4b7a 100644
--- a/spec/features/projects/diffs/diff_show_spec.rb
+++ b/spec/features/projects/diffs/diff_show_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Diff file viewer', :js do
+RSpec.describe 'Diff file viewer', :js, :with_clean_rails_cache do
let(:project) { create(:project, :public, :repository) }
def visit_commit(sha, anchor: nil)
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index ab82a4750d3..363fe8c35fe 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -54,17 +54,30 @@ RSpec.describe 'Edit Project Settings' do
end
context 'When external issue tracker is enabled and issues disabled on project settings' do
- it 'hides issues tab and show labels tab' do
+ before do
project.issues_enabled = false
project.save!
allow_next_instance_of(Project) do |instance|
allow(instance).to receive(:external_issue_tracker).and_return(JiraService.new)
end
+ end
+ it 'hides issues tab' do
visit project_path(project)
expect(page).not_to have_selector('.shortcuts-issues')
- expect(page).to have_selector('.shortcuts-labels')
+ expect(page).not_to have_selector('.shortcuts-labels')
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'hides issues tab and show labels tab' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ visit project_path(project)
+
+ expect(page).not_to have_selector('.shortcuts-issues')
+ expect(page).to have_selector('.shortcuts-labels')
+ end
end
end
diff --git a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
deleted file mode 100644
index cd796d45aba..00000000000
--- a/spec/features/projects/files/gitlab_ci_syntax_yml_dropdown_spec.rb
+++ /dev/null
@@ -1,69 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file' do
- include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
-
- let_it_be(:namespace) { create(:namespace) }
- let(:project) { create(:project, :repository, namespace: namespace) }
-
- before do
- sign_in project.owner
- stub_experiment(ci_syntax_templates_b: experiment_active)
- stub_experiment_for_subject(ci_syntax_templates_b: in_experiment_group)
-
- visit project_new_blob_path(project, 'master', file_name: '.gitlab-ci.yml')
- end
-
- context 'when experiment is not active' do
- let(:experiment_active) { false }
- let(:in_experiment_group) { false }
-
- it 'does not show the "Learn CI/CD syntax" template dropdown' do
- expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector')
- end
- end
-
- context 'when experiment is active' do
- let(:experiment_active) { true }
-
- context 'when the user is in the control group' do
- let(:in_experiment_group) { false }
-
- it 'does not show the "Learn CI/CD syntax" template dropdown' do
- expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector')
- end
- end
-
- context 'when the user is in the experimental group' do
- let(:in_experiment_group) { true }
-
- it 'allows the user to pick a "Learn CI/CD syntax" template from the dropdown', :js do
- expect(page).to have_css('.gitlab-ci-syntax-yml-selector')
-
- find('.js-gitlab-ci-syntax-yml-selector').click
-
- wait_for_requests
-
- within '.gitlab-ci-syntax-yml-selector' do
- find('.dropdown-input-field').set('Artifacts example')
- find('.dropdown-content .is-focused', text: 'Artifacts example').click
- end
-
- wait_for_requests
-
- expect(page).to have_css('.gitlab-ci-syntax-yml-selector .dropdown-toggle-text', text: 'Learn CI/CD syntax')
- expect(editor_get_value).to have_content('You can use artifacts to pass data to jobs in later stages.')
- end
-
- context 'when the group is created longer than 90 days ago' do
- let(:namespace) { create(:namespace, created_at: 91.days.ago) }
-
- it 'does not show the "Learn CI/CD syntax" template dropdown' do
- expect(page).not_to have_css('.gitlab-ci-syntax-yml-selector')
- end
- end
- end
- end
-end
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index c18ff9ddbbc..453cc14c267 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -131,8 +131,8 @@ RSpec.describe 'Projects > Files > User edits files', :js do
expect(page).to have_selector(:link_or_button, 'Fork')
expect(page).to have_selector(:link_or_button, 'Cancel')
expect(page).to have_content(
- "You're not allowed to edit files in this project directly. "\
- "Please fork this project, make your changes there, and submit a merge request."
+ "You can’t edit files directly in this project. "\
+ "Fork this project and submit a merge request with your changes."
)
end
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 2b7ea70fe5a..9a6d1961a02 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Project fork' do
include ProjectForksHelper
let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
+ let(:project) { create(:project, :public, :repository, description: 'some description') }
before do
sign_in(user)
@@ -228,7 +228,7 @@ RSpec.describe 'Project fork' do
click_link 'Fork'
page.within('.fork-thumbnail-container') do
- expect(page).to have_css('div.identicon')
+ expect(page).to have_css('span.identicon')
end
end
diff --git a/spec/features/projects/graph_spec.rb b/spec/features/projects/graph_spec.rb
index 72df84bf905..7e039a087c7 100644
--- a/spec/features/projects/graph_spec.rb
+++ b/spec/features/projects/graph_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe 'Project Graph', :js do
expect(page).to have_content 'Last week'
expect(page).to have_content 'Last month'
expect(page).to have_content 'Last year'
- expect(page).to have_content 'Duration for the last 30 commits'
+ expect(page).to have_content 'Pipeline durations for the last 30 commits'
end
end
end
diff --git a/spec/features/projects/services/user_activates_asana_spec.rb b/spec/features/projects/integrations/user_activates_asana_spec.rb
index cf2290383e8..cf2290383e8 100644
--- a/spec/features/projects/services/user_activates_asana_spec.rb
+++ b/spec/features/projects/integrations/user_activates_asana_spec.rb
diff --git a/spec/features/projects/services/user_activates_assembla_spec.rb b/spec/features/projects/integrations/user_activates_assembla_spec.rb
index 63cc424a641..63cc424a641 100644
--- a/spec/features/projects/services/user_activates_assembla_spec.rb
+++ b/spec/features/projects/integrations/user_activates_assembla_spec.rb
diff --git a/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb b/spec/features/projects/integrations/user_activates_atlassian_bamboo_ci_spec.rb
index 91db375be3a..91db375be3a 100644
--- a/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
+++ b/spec/features/projects/integrations/user_activates_atlassian_bamboo_ci_spec.rb
diff --git a/spec/features/projects/issues/design_management/user_views_design_images_spec.rb b/spec/features/projects/issues/design_management/user_views_design_images_spec.rb
index 4a4c33cb881..c3aefe05f75 100644
--- a/spec/features/projects/issues/design_management/user_views_design_images_spec.rb
+++ b/spec/features/projects/issues/design_management/user_views_design_images_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Users views raw design image files' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:design) { create(:design, :with_file, issue: issue, versions_count: 2) }
+
let(:newest_version) { design.versions.ordered.first }
let(:oldest_version) { design.versions.ordered.last }
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 18a6ad12240..a1416f3f563 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
visit project_job_path(project, job)
wait_for_requests
- expect(page).to have_selector('.build-job.active')
+ expect(page).to have_selector('[data-testid="active-job"]')
end
end
@@ -255,7 +255,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
it 'renders escaped tooltip name' do
- page.find('.active.build-job a').hover
+ page.find('[data-testid="active-job"]').hover
expect(page).to have_content('<img src=x onerror=alert(document.domain)> - passed')
end
end
diff --git a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
index 0a373b0d51a..4a25e28a14e 100644
--- a/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
+++ b/spec/features/projects/labels/issues_sorted_by_priority_spec.rb
@@ -33,14 +33,14 @@ RSpec.describe 'Issue prioritization' do
sign_in user
visit project_issues_path(project, sort: 'label_priority')
+ wait_for_requests
+
# Ensure we are indicating that issues are sorted by priority
- expect(page).to have_selector('.dropdown', text: 'Label priority')
+ expect(page).to have_button 'Label priority'
- page.within('.issues-holder') do
- issue_titles = all('.issues-list .issue-title-text').map(&:text)
+ issue_titles = all('.issues-list .issue-title-text').map(&:text)
- expect(issue_titles).to eq(%w(issue_4 issue_3 issue_5 issue_2 issue_1))
- end
+ expect(issue_titles).to eq(%w(issue_4 issue_3 issue_5 issue_2 issue_1))
end
end
@@ -72,15 +72,15 @@ RSpec.describe 'Issue prioritization' do
sign_in user
visit project_issues_path(project, sort: 'label_priority')
- expect(page).to have_selector('.dropdown', text: 'Label priority')
+ wait_for_requests
+
+ expect(page).to have_button 'Label priority'
- page.within('.issues-holder') do
- issue_titles = all('.issues-list .issue-title-text').map(&:text)
+ issue_titles = all('.issues-list .issue-title-text').map(&:text)
- expect(issue_titles[0..1]).to contain_exactly('issue_5', 'issue_8')
- expect(issue_titles[2..4]).to contain_exactly('issue_1', 'issue_3', 'issue_7')
- expect(issue_titles[5..-1]).to eq(%w(issue_2 issue_4 issue_6))
- end
+ expect(issue_titles[0..1]).to contain_exactly('issue_5', 'issue_8')
+ expect(issue_titles[2..4]).to contain_exactly('issue_1', 'issue_3', 'issue_7')
+ expect(issue_titles[5..-1]).to eq(%w(issue_2 issue_4 issue_6))
end
end
end
diff --git a/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb b/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
index 11aa53fd963..6f98883a412 100644
--- a/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
+++ b/spec/features/projects/labels/user_sees_links_to_issuables_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe 'Projects > Labels > User sees links to issuables' do
context 'with a group label' do
let_it_be(:group) { create(:group) }
+
let(:label) { create(:group_label, group: group, title: 'bug') }
context 'when merge requests and issues are enabled for the project' do
diff --git a/spec/features/projects/labels/user_views_labels_spec.rb b/spec/features/projects/labels/user_views_labels_spec.rb
index da8520ca8fb..7a6942b6259 100644
--- a/spec/features/projects/labels/user_views_labels_spec.rb
+++ b/spec/features/projects/labels/user_views_labels_spec.rb
@@ -5,6 +5,7 @@ require "spec_helper"
RSpec.describe "User views labels" do
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
+
let(:label_titles) { %w[bug enhancement feature] }
let!(:prioritized_label) { create(:label, project: project, title: 'prioritized-label-name', priority: 1) }
diff --git a/spec/features/projects/members/invite_group_spec.rb b/spec/features/projects/members/invite_group_spec.rb
index 83ba2533a73..4caf3e947c7 100644
--- a/spec/features/projects/members/invite_group_spec.rb
+++ b/spec/features/projects/members/invite_group_spec.rb
@@ -6,25 +6,48 @@ RSpec.describe 'Project > Members > Invite group', :js do
include Select2Helper
include ActionView::Helpers::DateHelper
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let(:maintainer) { create(:user) }
- before do
- stub_feature_flags(invite_members_group_modal: false)
+ using RSpec::Parameterized::TableSyntax
+
+ where(:invite_members_group_modal_enabled, :expected_invite_group_selector) do
+ true | 'button[data-qa-selector="invite_a_group_button"]'
+ false | '#invite-group-tab'
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(invite_members_group_modal: invite_members_group_modal_enabled)
+ end
+
+ it 'displays either the invite group button or the form with tabs based on the feature flag' do
+ project = create(:project, namespace: create(:group))
+
+ project.add_maintainer(maintainer)
+ sign_in(maintainer)
+
+ visit project_project_members_path(project)
+
+ expect(page).to have_selector(expected_invite_group_selector)
+ end
end
describe 'Share with group lock' do
+ let(:invite_group_selector) { 'button[data-qa-selector="invite_a_group_button"]' }
+
shared_examples 'the project can be shared with groups' do
- it 'the "Invite group" tab exists' do
+ it 'the "Invite a group" button exists' do
visit project_project_members_path(project)
- expect(page).to have_selector('#invite-group-tab')
+ expect(page).to have_selector(invite_group_selector)
end
end
shared_examples 'the project cannot be shared with groups' do
- it 'the "Invite group" tab does not exist' do
+ it 'the "Invite a group" button does not exist' do
visit project_project_members_path(project)
- expect(page).not_to have_selector('#invite-group-tab')
+ expect(page).not_to have_selector(invite_group_selector)
end
end
@@ -41,7 +64,9 @@ RSpec.describe 'Project > Members > Invite group', :js do
context 'when the group has "Share with group lock" disabled' do
it_behaves_like 'the project can be shared with groups'
- it 'the project can be shared with another group' do
+ it 'the project can be shared with another group when the feature flag invite_members_group_modal is disabled' do
+ stub_feature_flags(invite_members_group_modal: false)
+
visit project_project_members_path(project)
expect(page).not_to have_link 'Groups'
@@ -56,6 +81,22 @@ RSpec.describe 'Project > Members > Invite group', :js do
expect(members_table).to have_content(group_to_share_with.name)
end
+
+ it 'the project can be shared with another group when the feature flag invite_members_group_modal is enabled' do
+ stub_feature_flags(invite_members_group_modal: true)
+
+ visit project_project_members_path(project)
+
+ expect(page).not_to have_link 'Groups'
+
+ invite_group(group_to_share_with.name)
+
+ visit project_project_members_path(project)
+
+ click_link 'Groups'
+
+ expect(members_table).to have_content(group_to_share_with.name)
+ end
end
context 'when the group has "Share with group lock" enabled' do
@@ -127,13 +168,7 @@ RSpec.describe 'Project > Members > Invite group', :js do
visit project_project_members_path(project)
- click_on 'invite-group-tab'
-
- select2 group.id, from: '#link_group_id'
-
- fill_in 'expires_at_groups', with: 5.days.from_now.strftime('%Y-%m-%d')
- click_on 'invite-group-tab'
- find('.btn-confirm').click
+ invite_group(group.name, role: 'Guest', expires_at: 5.days.from_now)
end
it 'the group link shows the expiration time with a warning class' do
@@ -149,29 +184,23 @@ RSpec.describe 'Project > Members > Invite group', :js do
context 'with multiple groups to choose from' do
let(:project) { create(:project) }
- before do
+ it 'includes multiple groups' do
project.add_maintainer(maintainer)
sign_in(maintainer)
- create(:group).add_owner(maintainer)
- create(:group).add_owner(maintainer)
+ group1 = create(:group)
+ group1.add_owner(maintainer)
+ group2 = create(:group)
+ group2.add_owner(maintainer)
visit project_project_members_path(project)
- click_link 'Invite group'
+ click_on 'Invite a group'
+ click_on 'Select a group'
+ wait_for_requests
- find('.ajax-groups-select.select2-container')
-
- execute_script 'GROUP_SELECT_PER_PAGE = 1;'
- open_select2 '#link_group_id'
- end
-
- it 'infinitely scrolls' do
- expect(find('.select2-drop .select2-results')).to have_selector('.select2-result', count: 1)
-
- scroll_select2_to_bottom('.select2-drop .select2-results:visible')
-
- expect(find('.select2-drop .select2-results')).to have_selector('.select2-result', count: 2)
+ expect(page).to have_button(group1.name)
+ expect(page).to have_button(group2.name)
end
end
@@ -188,16 +217,19 @@ RSpec.describe 'Project > Members > Invite group', :js do
group_to_share_with.add_maintainer(maintainer)
end
- it 'the groups dropdown does not show ancestors' do
+ # This behavior should be changed to exclude the ancestor and project
+ # group from the options once issue is fixed for the modal:
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/329835
+ it 'the groups dropdown does show ancestors and the project group' do
visit project_project_members_path(project)
- click_on 'invite-group-tab'
- click_link 'Search for a group'
+ click_on 'Invite a group'
+ click_on 'Select a group'
+ wait_for_requests
- page.within '.select2-drop' do
- expect(page).to have_content(group_to_share_with.name)
- expect(page).not_to have_content(group.name)
- end
+ expect(page).to have_button(group_to_share_with.name)
+ expect(page).to have_button(group.name)
+ expect(page).to have_button(nested_group.name)
end
end
end
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 384b8ae9929..f1fc579bb8a 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'Project members list', :js do
- include Select2Helper
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let(:user1) { create(:user, name: 'John Doe') }
let(:user2) { create(:user, name: 'Mary Jane') }
@@ -12,8 +12,6 @@ RSpec.describe 'Project members list', :js do
let(:project) { create(:project, :internal, namespace: group) }
before do
- stub_feature_flags(invite_members_group_modal: true)
-
sign_in(user1)
group.add_owner(user1)
end
@@ -52,7 +50,7 @@ RSpec.describe 'Project members list', :js do
it 'add user to project' do
visit_members_page
- add_user(user2.name, 'Reporter')
+ invite_member(user2.name, role: 'Reporter')
page.within find_member_row(user2) do
expect(page).to have_button('Reporter')
@@ -100,7 +98,7 @@ RSpec.describe 'Project members list', :js do
it 'invite user to project' do
visit_members_page
- add_user('test@example.com', 'Reporter')
+ invite_member('test@example.com', role: 'Reporter')
click_link 'Invited'
@@ -171,25 +169,6 @@ RSpec.describe 'Project members list', :js do
private
- def add_user(id, role)
- click_on 'Invite members'
-
- page.within '#invite-members-modal' do
- fill_in 'Select members or type email addresses', with: id
-
- wait_for_requests
- click_button id
-
- click_button 'Guest'
- wait_for_requests
- click_button role
-
- click_button 'Invite'
- end
-
- page.refresh
- end
-
def visit_members_page
visit project_project_members_path(project)
end
diff --git a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
index d22097a2f6f..c1b14cf60e7 100644
--- a/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/projects/members/master_adds_member_with_expiration_date_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > Maintainer adds member with expiration date', :js do
- include Select2Helper
include ActiveSupport::Testing::TimeHelpers
include Spec::Support::Helpers::Features::MembersHelpers
+ include Spec::Support::Helpers::Features::InviteMembersModalHelper
let_it_be(:maintainer) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:new_member) { create(:user) }
before do
@@ -19,18 +20,9 @@ RSpec.describe 'Projects > Members > Maintainer adds member with expiration date
end
it 'expiration date is displayed in the members list' do
- stub_feature_flags(invite_members_group_modal: false)
-
visit project_project_members_path(project)
- page.within '.invite-users-form' do
- select2(new_member.id, from: '#user_ids', multiple: true)
-
- fill_in 'expires_at', with: 5.days.from_now.to_date
- find_field('expires_at').native.send_keys :enter
-
- click_on 'Invite'
- end
+ invite_member(new_member.name, role: 'Guest', expires_at: 5.days.from_now.to_date)
page.within find_member_row(new_member) do
expect(page).to have_content(/in \d days/)
diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb
index 93bbabcc3f8..335ae6794b7 100644
--- a/spec/features/projects/merge_request_button_spec.rb
+++ b/spec/features/projects/merge_request_button_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Merge Request button' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:forked_project) { fork_project(project, user, repository: true) }
shared_examples 'Merge request button only shown when allowed' do
diff --git a/spec/features/projects/milestones/gfm_autocomplete_spec.rb b/spec/features/projects/milestones/gfm_autocomplete_spec.rb
new file mode 100644
index 00000000000..547a5d11dec
--- /dev/null
+++ b/spec/features/projects/milestones/gfm_autocomplete_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'GFM autocomplete', :js do
+ let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
+ let_it_be(:group) { create(:group, name: 'Ancestor') }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:issue) { create(:issue, project: project, assignees: [user], title: 'My special issue') }
+ let_it_be(:label) { create(:label, project: project, title: 'special+') }
+ let_it_be(:milestone) { create(:milestone, resource_parent: project, title: "project milestone") }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ shared_examples 'displays autocomplete menu for all entities' do
+ it 'autocompletes all available entities' do
+ fill_in 'Description', with: User.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(user.name)
+
+ fill_in 'Description', with: Label.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(label.title)
+
+ fill_in 'Description', with: Milestone.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(milestone.title)
+
+ fill_in 'Description', with: Issue.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(issue.title)
+
+ fill_in 'Description', with: MergeRequest.reference_prefix
+ wait_for_requests
+ expect(find_autocomplete_menu).to be_visible
+ expect_autocomplete_entry(merge_request.title)
+ end
+ end
+
+ before_all do
+ group.add_maintainer(user)
+ end
+
+ describe 'new milestone page' do
+ before do
+ sign_in(user)
+ visit new_project_milestone_path(project)
+
+ wait_for_requests
+ end
+
+ it_behaves_like 'displays autocomplete menu for all entities'
+ end
+
+ describe 'update milestone page' do
+ before do
+ sign_in(user)
+ visit edit_project_milestone_path(project, milestone)
+
+ wait_for_requests
+ end
+
+ it_behaves_like 'displays autocomplete menu for all entities'
+ end
+
+ private
+
+ def find_autocomplete_menu
+ find('.atwho-view ul', visible: true)
+ end
+
+ def expect_autocomplete_entry(entry)
+ page.within('.atwho-container') do
+ expect(page).to have_content(entry)
+ end
+ end
+end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 7dc3ee63669..ee5bf99fd75 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -8,62 +8,168 @@ RSpec.describe 'Project navbar' do
include_context 'project navbar structure'
- let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
- before do
- insert_package_nav(_('Operations'))
- insert_infrastructure_registry_nav
- stub_config(registry: { enabled: false })
+ let(:user) { project.owner }
- project.add_maintainer(user)
+ before do
sign_in(user)
end
- it_behaves_like 'verified navigation bar' do
+ context 'when sidebar refactor feature flag is disabled' do
before do
- visit project_path(project)
+ stub_feature_flags(sidebar_refactor: false)
+ insert_package_nav(_('Operations'))
+ insert_infrastructure_registry_nav
+
+ insert_after_sub_nav_item(
+ _('Boards'),
+ within: _('Issues'),
+ new_sub_nav_item_name: _('Labels')
+ )
+
+ insert_after_nav_item(
+ _('Snippets'),
+ new_nav_item: {
+ nav_item: _('Members'),
+ nav_sub_items: []
+ }
+ )
+
+ stub_config(registry: { enabled: false })
end
- end
- context 'when value stream is available' do
- before do
- visit project_path(project)
+ it_behaves_like 'verified navigation bar' do
+ before do
+ visit project_path(project)
+ end
end
- it 'redirects to value stream when Analytics item is clicked' do
- page.within('.sidebar-top-level-items') do
- find('[data-qa-selector=analytics_anchor]').click
+ context 'when value stream is available' do
+ before do
+ visit project_path(project)
end
- wait_for_requests
+ it 'redirects to value stream when Analytics item is clicked' do
+ page.within('.sidebar-top-level-items') do
+ find('.shortcuts-analytics').click
+ end
+
+ wait_for_requests
- expect(page).to have_current_path(project_cycle_analytics_path(project))
+ expect(page).to have_current_path(project_cycle_analytics_path(project))
+ end
end
- end
- context 'when pages are available' do
- before do
- stub_config(pages: { enabled: true })
+ context 'when pages are available' do
+ before do
+ stub_config(pages: { enabled: true })
- insert_after_sub_nav_item(
- _('Operations'),
- within: _('Settings'),
- new_sub_nav_item_name: _('Pages')
- )
+ insert_after_sub_nav_item(
+ _('Operations'),
+ within: _('Settings'),
+ new_sub_nav_item_name: _('Pages')
+ )
- visit project_path(project)
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
end
- it_behaves_like 'verified navigation bar'
+ context 'when container registry is available' do
+ before do
+ stub_config(registry: { enabled: true })
+
+ insert_container_nav
+
+ visit project_path(project)
+ end
+
+ it_behaves_like 'verified navigation bar'
+ end
end
- context 'when container registry is available' do
+ context 'when sidebar refactor feature flag is enabled' do
+ let(:monitor_nav_item) do
+ {
+ nav_item: _('Monitor'),
+ nav_sub_items: monitor_menu_items
+ }
+ end
+
+ let(:monitor_menu_items) do
+ [
+ _('Metrics'),
+ _('Logs'),
+ _('Tracing'),
+ _('Error Tracking'),
+ _('Alerts'),
+ _('Incidents'),
+ _('Product Analytics')
+ ]
+ end
+
+ let(:project_information_nav_item) do
+ {
+ nav_item: _('Project information'),
+ nav_sub_items: [
+ _('Activity'),
+ _('Labels'),
+ _('Members')
+ ]
+ }
+ end
+
+ let(:settings_menu_items) do
+ [
+ _('General'),
+ _('Integrations'),
+ _('Webhooks'),
+ _('Access Tokens'),
+ _('Repository'),
+ _('CI/CD'),
+ _('Monitor')
+ ]
+ end
+
before do
+ stub_feature_flags(sidebar_refactor: true)
stub_config(registry: { enabled: true })
-
+ insert_package_nav(_('Monitor'))
+ insert_infrastructure_registry_nav
insert_container_nav
+ insert_after_sub_nav_item(
+ _('Monitor'),
+ within: _('Settings'),
+ new_sub_nav_item_name: _('Packages & Registries')
+ )
+
+ insert_after_nav_item(
+ _('Monitor'),
+ new_nav_item: {
+ nav_item: _('Infrastructure'),
+ nav_sub_items: [
+ _('Kubernetes clusters'),
+ _('Serverless platform'),
+ _('Terraform')
+ ]
+ }
+ )
+
+ insert_after_nav_item(
+ _('Security & Compliance'),
+ new_nav_item: {
+ nav_item: _('Deployments'),
+ nav_sub_items: [
+ _('Feature Flags'),
+ _('Environments'),
+ _('Releases')
+ ]
+ }
+ )
+
visit project_path(project)
end
diff --git a/spec/features/projects/new_project_from_template_spec.rb b/spec/features/projects/new_project_from_template_spec.rb
new file mode 100644
index 00000000000..1c8647d859a
--- /dev/null
+++ b/spec/features/projects/new_project_from_template_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'New project from template', :js do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+
+ visit new_project_path
+ end
+
+ context 'create from template' do
+ before do
+ page.find('a[href="#create_from_template"]').click
+ wait_for_requests
+ end
+
+ it 'shows template tabs' do
+ page.within('#create-from-template-pane') do
+ expect(page).to have_link('Built-in', href: '#built-in')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 7119039d5ff..a1523f9eb08 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -5,50 +5,48 @@ require 'spec_helper'
RSpec.describe 'New project', :js do
include Select2Helper
- context 'as a user' do
- let(:user) { create(:user) }
+ shared_examples 'combined_menu: feature flag examples' do
+ context 'as a user' do
+ let(:user) { create(:user) }
- before do
- sign_in(user)
- end
-
- context 'new repo experiment', :experiment do
- it 'when in control renders "project"' do
- stub_experiments(new_repo: :control)
+ before do
+ sign_in(user)
+ end
- visit new_project_path
+ context 'new repo experiment', :experiment do
+ it 'when in control renders "project"' do
+ stub_experiments(new_repo: :control)
- find('li.header-new.dropdown').click
+ visit new_project_path
- page.within('li.header-new.dropdown') do
- expect(page).to have_selector('a', text: 'New project')
- expect(page).to have_no_selector('a', text: 'New project/repository')
- end
+ find('li.header-new.dropdown').click
- expect(page).to have_selector('.blank-state-title', text: 'Create blank project')
- expect(page).to have_no_selector('.blank-state-title', text: 'Create blank project/repository')
- end
+ page.within('li.header-new.dropdown') do
+ expect(page).to have_selector('a', text: 'New project')
+ expect(page).to have_no_selector('a', text: 'New project/repository')
+ end
- it 'when in candidate renders "project/repository"' do
- stub_experiments(new_repo: :candidate)
+ expect(page).to have_selector('h3', text: 'Create blank project')
+ expect(page).to have_no_selector('h3', text: 'Create blank project/repository')
+ end
- visit new_project_path
+ it 'when in candidate renders "project/repository"' do
+ stub_experiments(new_repo: :candidate)
- find('li.header-new.dropdown').click
+ visit new_project_path
- page.within('li.header-new.dropdown') do
- expect(page).to have_selector('a', text: 'New project/repository')
- end
+ find('li.header-new.dropdown').click
- expect(page).to have_selector('.blank-state-title', text: 'Create blank project/repository')
- end
+ page.within('li.header-new.dropdown') do
+ expect(page).to have_selector('a', text: 'New project/repository')
+ end
- context 'with combined_menu feature disabled' do
- before do
- stub_feature_flags(combined_menu: false)
+ expect(page).to have_selector('h3', text: 'Create blank project/repository')
end
it 'when in control it renders "project" in the new projects dropdown' do
+ pending_on_combined_menu_flag
+
stub_experiments(new_repo: :control)
visit new_project_path
@@ -64,6 +62,8 @@ RSpec.describe 'New project', :js do
end
it 'when in candidate it renders "project/repository" in the new projects dropdown' do
+ pending_on_combined_menu_flag
+
stub_experiments(new_repo: :candidate)
visit new_project_path
@@ -76,337 +76,373 @@ RSpec.describe 'New project', :js do
end
end
end
- end
- it 'shows a message if multiple levels are restricted' do
- Gitlab::CurrentSettings.update!(
- restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
- )
+ it 'shows a message if multiple levels are restricted' do
+ Gitlab::CurrentSettings.update!(
+ restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
+ )
- visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
-
- expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
- end
+ visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
- it 'shows a message if all levels are restricted' do
- Gitlab::CurrentSettings.update!(
- restricted_visibility_levels: Gitlab::VisibilityLevel.values
- )
+ expect(page).to have_content 'Other visibility settings have been disabled by the administrator.'
+ end
- visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ it 'shows a message if all levels are restricted' do
+ Gitlab::CurrentSettings.update!(
+ restricted_visibility_levels: Gitlab::VisibilityLevel.values
+ )
- expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
- end
- end
-
- context 'as an admin' do
- let(:user) { create(:admin) }
+ visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
- before do
- sign_in(user)
+ expect(page).to have_content 'Visibility settings have been disabled by the administrator.'
+ end
end
- it 'shows "New project" page', :js do
- visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
-
- expect(page).to have_content('Project name')
- expect(page).to have_content('Project URL')
- expect(page).to have_content('Project slug')
-
- click_link('New project')
- find('[data-qa-selector="import_project_link"]').click
+ context 'as an admin' do
+ let(:user) { create(:admin) }
- expect(page).to have_link('GitHub')
- expect(page).to have_link('Bitbucket')
- expect(page).to have_link('GitLab.com')
- expect(page).to have_button('Repo by URL')
- expect(page).to have_link('GitLab export')
- end
-
- describe 'manifest import option' do
before do
+ sign_in(user)
+ end
+
+ it 'shows "New project" page', :js do
visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
- find('[data-qa-selector="import_project_link"]').click
- end
+ expect(page).to have_content('Project name')
+ expect(page).to have_content('Project URL')
+ expect(page).to have_content('Project slug')
- it { expect(page).to have_link('Manifest file') }
- end
+ click_link('New project')
+ find('[data-qa-selector="import_project_link"]').click
- context 'Visibility level selector', :js do
- Gitlab::VisibilityLevel.options.each do |key, level|
- it "sets selector to #{key}" do
- stub_application_setting(default_project_visibility: level)
+ expect(page).to have_link('GitHub')
+ expect(page).to have_link('Bitbucket')
+ expect(page).to have_link('GitLab.com')
+ expect(page).to have_button('Repo by URL')
+ expect(page).to have_link('GitLab export')
+ end
+ describe 'manifest import option' do
+ before do
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{level}")).to be_checked
- end
- end
- it "saves visibility level #{level} on validation error" do
- visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-selector="import_project_link"]').click
+ end
- choose(key)
- click_button('Create project')
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{level}")).to be_checked
- end
+ it 'has Manifest file' do
+ expect(page).to have_link('Manifest file')
end
end
- context 'when group visibility is private but default is internal' do
- let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
+ context 'Visibility level selector', :js do
+ Gitlab::VisibilityLevel.options.each do |key, level|
+ it "sets selector to #{key}" do
+ stub_application_setting(default_project_visibility: level)
- before do
- stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
- end
+ visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{level}")).to be_checked
+ end
+ end
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'has private selected' do
- visit new_project_path(namespace_id: group.id)
+ it "saves visibility level #{level} on validation error" do
+ visit new_project_path
find('[data-qa-selector="blank_project_link"]').click
+ choose(key)
+ click_button('Create project')
page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
+ expect(find_field("project_visibility_level_#{level}")).to be_checked
end
end
end
- context 'when admin mode is disabled' do
- it 'is not allowed' do
- visit new_project_path(namespace_id: group.id)
+ context 'when group visibility is private but default is internal' do
+ let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
- expect(page).to have_content('Not Found')
+ before do
+ stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
end
- end
- end
- context 'when group visibility is public but user requests private' do
- let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'has private selected' do
+ visit new_project_path(namespace_id: group.id)
+ find('[data-qa-selector="blank_project_link"]').click
- before do
- stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
- end
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
+ end
+ end
+ end
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'has private selected' do
- visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
- find('[data-qa-selector="blank_project_link"]').click
+ context 'when admin mode is disabled' do
+ it 'is not allowed' do
+ visit new_project_path(namespace_id: group.id)
- page.within('#blank-project-pane') do
- expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
+ expect(page).to have_content('Not Found')
end
end
end
- context 'when admin mode is disabled' do
- it 'is not allowed' do
- visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
+ context 'when group visibility is public but user requests private' do
+ let_it_be(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
- expect(page).to have_content('Not Found')
+ before do
+ stub_application_setting(default_project_visibility: Gitlab::VisibilityLevel::INTERNAL)
end
- end
- end
- end
- context 'Readme selector' do
- it 'shows the initialize with Readme checkbox on "Blank project" tab' do
- visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'has private selected' do
+ visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
+ find('[data-qa-selector="blank_project_link"]').click
- expect(page).to have_css('input#project_initialize_with_readme')
- expect(page).to have_content('Initialize repository with a README')
- end
+ page.within('#blank-project-pane') do
+ expect(find_field("project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).to be_checked
+ end
+ end
+ end
- it 'does not show the initialize with Readme checkbox on "Create from template" tab' do
- visit new_project_path
- find('[data-qa-selector="create_from_template_link"]').click
- first('.choose-template').click
+ context 'when admin mode is disabled' do
+ it 'is not allowed' do
+ visit new_project_path(namespace_id: group.id, project: { visibility_level: Gitlab::VisibilityLevel::PRIVATE })
- page.within '.project-fields-form' do
- expect(page).not_to have_css('input#project_initialize_with_readme')
- expect(page).not_to have_content('Initialize repository with a README')
+ expect(page).to have_content('Not Found')
+ end
+ end
end
end
- it 'does not show the initialize with Readme checkbox on "Import project" tab' do
- visit new_project_path
- find('[data-qa-selector="import_project_link"]').click
- first('.js-import-git-toggle-button').click
+ context 'Readme selector' do
+ it 'shows the initialize with Readme checkbox on "Blank project" tab' do
+ visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
- page.within '.toggle-import-form' do
- expect(page).not_to have_css('input#project_initialize_with_readme')
- expect(page).not_to have_content('Initialize repository with a README')
+ expect(page).to have_css('input#project_initialize_with_readme')
+ expect(page).to have_content('Initialize repository with a README')
end
- end
- end
- context 'Namespace selector' do
- context 'with user namespace' do
- before do
+ it 'does not show the initialize with Readme checkbox on "Create from template" tab' do
visit new_project_path
- find('[data-qa-selector="blank_project_link"]').click
+ find('[data-qa-selector="create_from_template_link"]').click
+ first('.choose-template').click
+
+ page.within '.project-fields-form' do
+ expect(page).not_to have_css('input#project_initialize_with_readme')
+ expect(page).not_to have_content('Initialize repository with a README')
+ end
end
- it 'selects the user namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: user.username)
+ it 'does not show the initialize with Readme checkbox on "Import project" tab' do
+ visit new_project_path
+ find('[data-qa-selector="import_project_link"]').click
+ first('.js-import-git-toggle-button').click
+
+ page.within '#import-project-pane' do
+ expect(page).not_to have_css('input#project_initialize_with_readme')
+ expect(page).not_to have_content('Initialize repository with a README')
end
end
end
- context 'with group namespace' do
- let(:group) { create(:group, :private) }
+ context 'Namespace selector' do
+ context 'with user namespace' do
+ before do
+ visit new_project_path
+ find('[data-qa-selector="blank_project_link"]').click
+ end
- before do
- group.add_owner(user)
- visit new_project_path(namespace_id: group.id)
- find('[data-qa-selector="blank_project_link"]').click
+ it 'selects the user namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: user.username)
+ end
+ end
end
- it 'selects the group namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: group.name)
+ context 'with group namespace' do
+ let(:group) { create(:group, :private) }
+
+ before do
+ group.add_owner(user)
+ visit new_project_path(namespace_id: group.id)
+ find('[data-qa-selector="blank_project_link"]').click
+ end
+
+ it 'selects the group namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: group.name)
+ end
end
end
- end
- context 'with subgroup namespace' do
- let(:group) { create(:group) }
- let(:subgroup) { create(:group, parent: group) }
+ context 'with subgroup namespace' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
- before do
- group.add_maintainer(user)
- visit new_project_path(namespace_id: subgroup.id)
- find('[data-qa-selector="blank_project_link"]').click
- end
+ before do
+ group.add_maintainer(user)
+ visit new_project_path(namespace_id: subgroup.id)
+ find('[data-qa-selector="blank_project_link"]').click
+ end
- it 'selects the group namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: subgroup.full_path)
+ it 'selects the group namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: subgroup.full_path)
+ end
end
end
- end
- context 'when changing namespaces dynamically', :js do
- let(:public_group) { create(:group, :public) }
- let(:internal_group) { create(:group, :internal) }
- let(:private_group) { create(:group, :private) }
+ context 'when changing namespaces dynamically', :js do
+ let(:public_group) { create(:group, :public) }
+ let(:internal_group) { create(:group, :internal) }
+ let(:private_group) { create(:group, :private) }
- before do
- public_group.add_owner(user)
- internal_group.add_owner(user)
- private_group.add_owner(user)
- visit new_project_path(namespace_id: public_group.id)
- find('[data-qa-selector="blank_project_link"]').click
- end
+ before do
+ public_group.add_owner(user)
+ internal_group.add_owner(user)
+ private_group.add_owner(user)
+ visit new_project_path(namespace_id: public_group.id)
+ find('[data-qa-selector="blank_project_link"]').click
+ end
- it 'enables the correct visibility options' do
- select2(user.namespace_id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
-
- select2(public_group.id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
-
- select2(internal_group.id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
-
- select2(private_group.id, from: '#project_namespace_id')
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).to be_disabled
- expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
+ it 'enables the correct visibility options' do
+ select2(user.namespace_id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
+
+ select2(public_group.id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).not_to be_disabled
+
+ select2(internal_group.id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
+
+ select2(private_group.id, from: '#project_namespace_id')
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PRIVATE}")).not_to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::INTERNAL}")).to be_disabled
+ expect(find("#project_visibility_level_#{Gitlab::VisibilityLevel::PUBLIC}")).to be_disabled
+ end
end
end
- end
- context 'Import project options', :js do
- before do
- visit new_project_path
- find('[data-qa-selector="import_project_link"]').click
- end
-
- context 'from git repository url, "Repo by URL"' do
+ context 'Import project options', :js do
before do
- first('.js-import-git-toggle-button').click
+ visit new_project_path
+ find('[data-qa-selector="import_project_link"]').click
end
- it 'does not autocomplete sensitive git repo URL' do
- autocomplete = find('#project_import_url')['autocomplete']
+ context 'from git repository url, "Repo by URL"' do
+ before do
+ first('.js-import-git-toggle-button').click
+ end
- expect(autocomplete).to eq('off')
- end
+ it 'does not autocomplete sensitive git repo URL' do
+ autocomplete = find('#project_import_url')['autocomplete']
- it 'shows import instructions' do
- git_import_instructions = first('.js-toggle-content')
+ expect(autocomplete).to eq('off')
+ end
- expect(git_import_instructions).to be_visible
- expect(git_import_instructions).to have_content 'Git repository URL'
- end
+ it 'shows import instructions' do
+ git_import_instructions = first('.js-toggle-content')
- it 'keeps "Import project" tab open after form validation error' do
- collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace)
+ expect(git_import_instructions).to be_visible
+ expect(git_import_instructions).to have_content 'Git repository URL'
+ end
- fill_in 'project_import_url', with: collision_project.http_url_to_repo
- fill_in 'project_name', with: collision_project.name
+ it 'reports error if repo URL does not end with .git' do
+ fill_in 'project_import_url', with: 'http://foo/bar'
+ fill_in 'project_name', with: 'import-project-without-git-suffix'
+ fill_in 'project_path', with: 'import-project-without-git-suffix'
- click_on 'Create project'
+ click_button 'Create project'
- expect(page).to have_css('#import-project-pane.active')
- expect(page).not_to have_css('.toggle-import-form.hide')
- end
- end
+ expect(page).to have_text('Please provide a valid URL ending with .git')
+ end
- context 'from GitHub' do
- before do
- first('.js-import-github').click
- end
+ it 'keeps "Import project" tab open after form validation error' do
+ collision_project = create(:project, name: 'test-name-collision', namespace: user.namespace)
+
+ fill_in 'project_import_url', with: collision_project.http_url_to_repo
+ fill_in 'project_name', with: collision_project.name
+
+ click_on 'Create project'
- it 'shows import instructions' do
- expect(page).to have_content('Authenticate with GitHub')
- expect(current_path).to eq new_import_github_path
+ expect(page).to have_css('#import-project-pane.active')
+ expect(page).not_to have_css('.toggle-import-form.hide')
+ end
end
- end
- context 'from manifest file' do
- before do
- first('.import_manifest').click
+ context 'from GitHub' do
+ before do
+ first('.js-import-github').click
+ end
+
+ it 'shows import instructions' do
+ expect(page).to have_content('Authenticate with GitHub')
+ expect(current_path).to eq new_import_github_path
+ end
end
- it 'shows import instructions' do
- expect(page).to have_content('Manifest file import')
- expect(current_path).to eq new_import_manifest_path
+ context 'from manifest file' do
+ before do
+ first('.import_manifest').click
+ end
+
+ it 'shows import instructions' do
+ expect(page).to have_content('Manifest file import')
+ expect(current_path).to eq new_import_manifest_path
+ end
end
end
- end
- context 'Namespace selector' do
- context 'with group with DEVELOPER_MAINTAINER_PROJECT_ACCESS project_creation_level' do
- let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
+ context 'Namespace selector' do
+ context 'with group with DEVELOPER_MAINTAINER_PROJECT_ACCESS project_creation_level' do
+ let(:group) { create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
- before do
- group.add_developer(user)
- visit new_project_path(namespace_id: group.id)
- find('[data-qa-selector="blank_project_link"]').click
- end
+ before do
+ group.add_developer(user)
+ visit new_project_path(namespace_id: group.id)
+ find('[data-qa-selector="blank_project_link"]').click
+ end
- it 'selects the group namespace' do
- page.within('#blank-project-pane') do
- expect(page).to have_select('project[namespace_id]', visible: false, selected: group.full_path)
+ it 'selects the group namespace' do
+ page.within('#blank-project-pane') do
+ expect(page).to have_select('project[namespace_id]', visible: false, selected: group.full_path)
+ end
end
end
end
end
end
+
+ context 'with combined_menu: feature flag on' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { true }
+
+ before do
+ stub_feature_flags(combined_menu: true)
+ end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ context 'with combined_menu feature flag off' do
+ let(:needs_rewrite_for_combined_menu_flag_on) { false }
+
+ before do
+ stub_feature_flags(combined_menu: false)
+ end
+
+ it_behaves_like 'combined_menu: feature flag examples'
+ end
+
+ def pending_on_combined_menu_flag
+ pending 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56587' if needs_rewrite_for_combined_menu_flag_on
+ end
end
diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb
index 24c9edb79e5..de9effe3dc7 100644
--- a/spec/features/projects/pages/user_adds_domain_spec.rb
+++ b/spec/features/projects/pages/user_adds_domain_spec.rb
@@ -5,6 +5,7 @@ RSpec.describe 'User adds pages domain', :js do
include LetsEncryptHelpers
let_it_be(:project) { create(:project, pages_https_only: false) }
+
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 4a0581bb5cf..70dc0bd04e8 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -739,6 +739,7 @@ RSpec.describe 'Pipeline', :js do
context 'when build requires resource', :sidekiq_inline do
let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:resource_group) { create(:ci_resource_group, project: project) }
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index e375bc10dbf..f1672af1019 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -457,22 +457,8 @@ RSpec.describe 'Pipelines', :js do
visit_project_pipelines
end
- it 'has artifacts' do
- expect(page).to have_selector('.build-artifacts')
- end
-
- it 'has artifacts download dropdown' do
- find('.js-pipeline-dropdown-download').click
-
- expect(page).to have_link(with_artifacts.file_type)
- end
-
- it 'has download attribute on download links' do
- find('.js-pipeline-dropdown-download').click
- expect(page).to have_selector('a', text: 'Download')
- page.all('.build-artifacts a', text: 'Download').each do |link|
- expect(link[:download]).to eq ''
- end
+ it 'has artifacts dropdown' do
+ expect(page).to have_selector('[data-testid="pipeline-multi-actions-dropdown"]')
end
end
@@ -488,7 +474,7 @@ RSpec.describe 'Pipelines', :js do
visit_project_pipelines
end
- it { expect(page).not_to have_selector('.build-artifacts') }
+ it { expect(page).not_to have_selector('[data-testid="artifact-item"]') }
end
context 'without artifacts' do
@@ -503,7 +489,7 @@ RSpec.describe 'Pipelines', :js do
visit_project_pipelines
end
- it { expect(page).not_to have_selector('.build-artifacts') }
+ it { expect(page).not_to have_selector('[data-testid="artifact-item"]') }
end
context 'with trace artifact' do
@@ -514,7 +500,7 @@ RSpec.describe 'Pipelines', :js do
end
it 'does not show trace artifact as artifacts' do
- expect(page).not_to have_selector('.build-artifacts')
+ expect(page).not_to have_selector('[data-testid="artifact-item"]')
end
end
end
@@ -657,26 +643,28 @@ RSpec.describe 'Pipelines', :js do
let(:project) { create(:project, :repository) }
before do
- stub_feature_flags(new_pipeline_form: false)
visit new_project_pipeline_path(project)
end
context 'for valid commit', :js do
before do
click_button project.default_branch
+ wait_for_requests
- page.within '.dropdown-menu' do
- click_link 'master'
- end
+ find('p', text: 'master').click
+ wait_for_requests
end
- context 'with gitlab-ci.yml' do
+ context 'with gitlab-ci.yml', :js do
before do
stub_ci_pipeline_to_return_yaml_file
end
it 'creates a new pipeline' do
- expect { click_on 'Run pipeline' }
+ expect do
+ click_on 'Run pipeline'
+ wait_for_requests
+ end
.to change { Ci::Pipeline.count }.by(1)
expect(Ci::Pipeline.last).to be_web
@@ -684,12 +672,15 @@ RSpec.describe 'Pipelines', :js do
context 'when variables are specified' do
it 'creates a new pipeline with variables' do
- page.within '.ci-variable-row-body' do
- fill_in "Input variable key", with: "key_name"
- fill_in "Input variable value", with: "value"
+ page.within(find("[data-testid='ci-variable-row']")) do
+ find("[data-testid='pipeline-form-ci-variable-key']").set('key_name')
+ find("[data-testid='pipeline-form-ci-variable-value']").set('value')
end
- expect { click_on 'Run pipeline' }
+ expect do
+ click_on 'Run pipeline'
+ wait_for_requests
+ end
.to change { Ci::Pipeline.count }.by(1)
expect(Ci::Pipeline.last.variables.map { |var| var.slice(:key, :secret_value) })
@@ -701,19 +692,17 @@ RSpec.describe 'Pipelines', :js do
context 'without gitlab-ci.yml' do
before do
click_on 'Run pipeline'
+ wait_for_requests
end
it { expect(page).to have_content('Missing CI config file') }
it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again' do
- click_button project.default_branch
-
stub_ci_pipeline_to_return_yaml_file
- page.within '.dropdown-menu' do
- click_link 'master'
+ expect do
+ click_on 'Run pipeline'
+ wait_for_requests
end
-
- expect { click_on 'Run pipeline' }
.to change { Ci::Pipeline.count }.by(1)
end
end
@@ -760,14 +749,13 @@ RSpec.describe 'Pipelines', :js do
let(:project) { create(:project, :repository) }
before do
- stub_feature_flags(new_pipeline_form: false)
visit new_project_pipeline_path(project)
end
describe 'new pipeline page' do
it 'has field to add a new pipeline' do
- expect(page).to have_selector('.js-branch-select')
- expect(find('.js-branch-select')).to have_content project.default_branch
+ expect(page).to have_selector('[data-testid="ref-select"]')
+ expect(find('[data-testid="ref-select"]')).to have_content project.default_branch
expect(page).to have_content('Run for')
end
end
@@ -776,10 +764,10 @@ RSpec.describe 'Pipelines', :js do
it 'shows filtered pipelines', :js do
click_button project.default_branch
- page.within '.dropdown-menu' do
- find('.dropdown-input-field').native.send_keys('fix')
+ page.within '[data-testid="ref-select"]' do
+ find('[data-testid="search-refs"]').native.send_keys('fix')
- page.within '.dropdown-content' do
+ page.within '.gl-new-dropdown-contents' do
expect(page).to have_content('fix')
end
end
diff --git a/spec/features/projects/product_analytics/events_spec.rb b/spec/features/projects/product_analytics/events_spec.rb
index 12f1c4d291a..05d12e12acb 100644
--- a/spec/features/projects/product_analytics/events_spec.rb
+++ b/spec/features/projects/product_analytics/events_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Product Analytics > Events' do
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:user) { create(:user) }
+
let(:event) { create(:product_analytics_event, project: project) }
before do
diff --git a/spec/features/projects/releases/user_views_releases_spec.rb b/spec/features/projects/releases/user_views_releases_spec.rb
index aabbc8cea7b..d8a55fc7f3b 100644
--- a/spec/features/projects/releases/user_views_releases_spec.rb
+++ b/spec/features/projects/releases/user_views_releases_spec.rb
@@ -19,143 +19,129 @@ RSpec.describe 'User views releases', :js do
project.add_guest(guest)
end
- shared_examples 'releases page' do
- context('when the user is a maintainer') do
- before do
- sign_in(maintainer)
+ context('when the user is a maintainer') do
+ before do
+ sign_in(maintainer)
+
+ visit project_releases_path(project)
+ end
- visit project_releases_path(project)
+ it 'sees the release' do
+ page.within("##{release_v1.tag}") do
+ expect(page).to have_content(release_v1.name)
+ expect(page).to have_content(release_v1.tag)
+ expect(page).not_to have_content('Upcoming Release')
end
+ end
- it 'sees the release' do
- page.within("##{release_v1.tag}") do
- expect(page).to have_content(release_v1.name)
- expect(page).to have_content(release_v1.tag)
- expect(page).not_to have_content('Upcoming Release')
+ context 'when there is a link as an asset' do
+ let!(:release_link) { create(:release_link, release: release_v1, url: url ) }
+ let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
+ let(:direct_asset_link) { Gitlab::Routing.url_helpers.project_release_url(project, release_v1) << "/downloads#{release_link.filepath}" }
+
+ it 'sees the link' do
+ page.within("##{release_v1.tag} .js-assets-list") do
+ expect(page).to have_link release_link.name, href: direct_asset_link
+ expect(page).not_to have_css('[data-testid="external-link-indicator"]')
end
end
- context 'when there is a link as an asset' do
- let!(:release_link) { create(:release_link, release: release_v1, url: url ) }
+ context 'when there is a link redirect' do
+ let!(:release_link) { create(:release_link, release: release_v1, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: url) }
let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
- let(:direct_asset_link) { Gitlab::Routing.url_helpers.project_release_url(project, release_v1) << "/downloads#{release_link.filepath}" }
- it 'sees the link' do
+ it 'sees the link', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/329301' do
page.within("##{release_v1.tag} .js-assets-list") do
expect(page).to have_link release_link.name, href: direct_asset_link
expect(page).not_to have_css('[data-testid="external-link-indicator"]')
end
end
+ end
- context 'when there is a link redirect' do
- let!(:release_link) { create(:release_link, release: release_v1, name: 'linux-amd64 binaries', filepath: '/binaries/linux-amd64', url: url) }
- let(:url) { "#{project.web_url}/-/jobs/1/artifacts/download" }
-
- it 'sees the link' do
- page.within("##{release_v1.tag} .js-assets-list") do
- expect(page).to have_link release_link.name, href: direct_asset_link
- expect(page).not_to have_css('[data-testid="external-link-indicator"]')
- end
- end
- end
-
- context 'when url points to external resource' do
- let(:url) { 'http://google.com/download' }
+ context 'when url points to external resource' do
+ let(:url) { 'http://google.com/download' }
- it 'sees that the link is external resource' do
- page.within("##{release_v1.tag} .js-assets-list") do
- expect(page).to have_css('[data-testid="external-link-indicator"]')
- end
+ it 'sees that the link is external resource', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/329302' do
+ page.within("##{release_v1.tag} .js-assets-list") do
+ expect(page).to have_css('[data-testid="external-link-indicator"]')
end
end
end
+ end
- context 'with an upcoming release' do
- it 'sees the upcoming tag' do
- page.within("##{release_v3.tag}") do
- expect(page).to have_content('Upcoming Release')
- end
+ context 'with an upcoming release' do
+ it 'sees the upcoming tag' do
+ page.within("##{release_v3.tag}") do
+ expect(page).to have_content('Upcoming Release')
end
end
+ end
- context 'with a tag containing a slash' do
- it 'sees the release' do
- page.within("##{release_v2.tag.parameterize}") do
- expect(page).to have_content(release_v2.name)
- expect(page).to have_content(release_v2.tag)
- end
+ context 'with a tag containing a slash' do
+ it 'sees the release' do
+ page.within("##{release_v2.tag.parameterize}") do
+ expect(page).to have_content(release_v2.name)
+ expect(page).to have_content(release_v2.tag)
end
end
+ end
- context 'sorting' do
- def sort_page(by:, direction:)
- within '[data-testid="releases-sort"]' do
- find('.dropdown-toggle').click
-
- click_button(by, class: 'dropdown-item')
-
- find('.sorting-direction-button').click if direction == :ascending
- end
- end
-
- shared_examples 'releases sort order' do
- it "sorts the releases #{description}" do
- card_titles = page.all('.release-block .card-title', minimum: expected_releases.count)
-
- card_titles.each_with_index do |title, index|
- expect(title).to have_content(expected_releases[index].name)
- end
- end
- end
+ context 'sorting' do
+ def sort_page(by:, direction:)
+ within '[data-testid="releases-sort"]' do
+ find('.dropdown-toggle').click
- context "when the page is sorted by the default sort order" do
- let(:expected_releases) { [release_v3, release_v2, release_v1] }
+ click_button(by, class: 'dropdown-item')
- it_behaves_like 'releases sort order'
+ find('.sorting-direction-button').click if direction == :ascending
end
+ end
- context "when the page is sorted by created_at ascending " do
- let(:expected_releases) { [release_v2, release_v1, release_v3] }
+ shared_examples 'releases sort order' do
+ it "sorts the releases #{description}" do
+ card_titles = page.all('.release-block .card-title', minimum: expected_releases.count)
- before do
- sort_page by: 'Created date', direction: :ascending
+ card_titles.each_with_index do |title, index|
+ expect(title).to have_content(expected_releases[index].name)
end
-
- it_behaves_like 'releases sort order'
end
end
- end
- context('when the user is a guest') do
- before do
- sign_in(guest)
- end
+ context "when the page is sorted by the default sort order" do
+ let(:expected_releases) { [release_v3, release_v2, release_v1] }
- it 'renders release info except for Git-related data' do
- visit project_releases_path(project)
+ it_behaves_like 'releases sort order'
+ end
- within('.release-block', match: :first) do
- expect(page).to have_content(release_v3.description)
+ context "when the page is sorted by created_at ascending " do
+ let(:expected_releases) { [release_v2, release_v1, release_v3] }
- # The following properties (sometimes) include Git info,
- # so they are not rendered for Guest users
- expect(page).not_to have_content(release_v3.name)
- expect(page).not_to have_content(release_v3.tag)
- expect(page).not_to have_content(release_v3.commit.short_id)
+ before do
+ sort_page by: 'Created date', direction: :ascending
end
+
+ it_behaves_like 'releases sort order'
end
end
end
- context 'when the graphql_releases_page feature flag is enabled' do
- it_behaves_like 'releases page'
- end
-
- context 'when the graphql_releases_page feature flag is disabled' do
+ context('when the user is a guest') do
before do
- stub_feature_flags(graphql_releases_page: false)
+ sign_in(guest)
end
- it_behaves_like 'releases page'
+ it 'renders release info except for Git-related data' do
+ visit project_releases_path(project)
+
+ within('.release-block', match: :first) do
+ expect(page).to have_content(release_v3.description)
+
+ # The following properties (sometimes) include Git info,
+ # so they are not rendered for Guest users
+ expect(page).not_to have_content(release_v3.name)
+ expect(page).not_to have_content(release_v3.tag)
+ expect(page).not_to have_content(release_v3.commit.short_id)
+ end
+ end
end
end
diff --git a/spec/features/projects/services/user_activates_issue_tracker_spec.rb b/spec/features/projects/services/user_activates_issue_tracker_spec.rb
index 1aec8883395..019d50a497b 100644
--- a/spec/features/projects/services/user_activates_issue_tracker_spec.rb
+++ b/spec/features/projects/services/user_activates_issue_tracker_spec.rb
@@ -87,6 +87,6 @@ RSpec.describe 'User activates issue tracker', :js do
it_behaves_like 'external issue tracker activation', tracker: 'Redmine'
it_behaves_like 'external issue tracker activation', tracker: 'YouTrack', skip_new_issue_url: true
it_behaves_like 'external issue tracker activation', tracker: 'Bugzilla'
- it_behaves_like 'external issue tracker activation', tracker: 'Custom Issue Tracker'
+ it_behaves_like 'external issue tracker activation', tracker: 'Custom issue tracker'
it_behaves_like 'external issue tracker activation', tracker: 'EWM', skip_test: true
end
diff --git a/spec/features/projects/settings/access_tokens_spec.rb b/spec/features/projects/settings/access_tokens_spec.rb
index 8083c851bb7..76d5d7308d1 100644
--- a/spec/features/projects/settings/access_tokens_spec.rb
+++ b/spec/features/projects/settings/access_tokens_spec.rb
@@ -99,7 +99,7 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
visit project_settings_access_tokens_path(personal_project)
expect(page).to have_selector('#new_project_access_token')
- expect(page).to have_text('You can generate an access token scoped to this project for each application to use the GitLab API.')
+ expect(page).to have_text('Generate project access tokens scoped to this project for your applications that need access to the GitLab API.')
end
end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index ca976997142..64138e0aeca 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -3,25 +3,35 @@
require 'spec_helper'
RSpec.describe 'Projects > Settings > For a forked project', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository, create_templates: :issue) }
- let(:role) { :maintainer }
+ let_it_be(:project) { create(:project, :repository, create_templates: :issue) }
+
+ let(:user) { project.owner}
before do
sign_in(user)
- project.add_role(user, role)
end
- describe 'Sidebar > Operations' do
- it 'renders the settings link in the sidebar' do
+ describe 'Sidebar > Monitor' do
+ it 'renders the menu in the sidebar' do
visit project_path(project)
wait_for_requests
- expect(page).to have_selector('a[title="Operations"]', visible: false)
+ expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Monitor"]', text: 'Monitor', visible: false)
+ end
+
+ context 'when feature flag sidebar_refactor is disabled' do
+ it 'renders the menu "Operations" in the sidebar' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ visit project_path(project)
+ wait_for_requests
+
+ expect(page).to have_selector('.sidebar-sub-level-items a[aria-label="Operations"]', text: 'Operations', visible: false)
+ end
end
end
- describe 'Settings > Operations' do
+ describe 'Settings > Monitor' do
describe 'Incidents' do
let(:create_issue) { 'Create an incident. Incidents are created for each alert triggered.' }
let(:send_email) { 'Send a single email notification to Owners and Maintainers for new alerts.' }
diff --git a/spec/features/projects/settings/packages_settings_spec.rb b/spec/features/projects/settings/packages_settings_spec.rb
index 0b40cbee582..62f31fd027b 100644
--- a/spec/features/projects/settings/packages_settings_spec.rb
+++ b/spec/features/projects/settings/packages_settings_spec.rb
@@ -3,36 +3,32 @@
require 'spec_helper'
RSpec.describe 'Projects > Settings > Packages', :js do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:user) { project.owner }
before do
sign_in(user)
- project.add_maintainer(user)
+
+ stub_config(packages: { enabled: packages_enabled })
+
+ visit edit_project_path(project)
end
context 'Packages enabled in config' do
- before do
- allow(Gitlab.config.packages).to receive(:enabled).and_return(true)
- end
+ let(:packages_enabled) { true }
it 'displays the packages toggle button' do
- visit edit_project_path(project)
-
- expect(page).to have_content('Packages')
+ expect(page).to have_button('Packages', class: 'gl-toggle')
expect(page).to have_selector('input[name="project[packages_enabled]"] + button', visible: true)
end
end
context 'Packages disabled in config' do
- before do
- allow(Gitlab.config.packages).to receive(:enabled).and_return(false)
- end
+ let(:packages_enabled) { false }
it 'does not show up in UI' do
- visit edit_project_path(project)
-
- expect(page).not_to have_content('Packages')
+ expect(page).not_to have_button('Packages', class: 'gl-toggle')
end
end
end
diff --git a/spec/features/projects/settings/project_settings_spec.rb b/spec/features/projects/settings/project_settings_spec.rb
index cd1c9ecde9c..71b319d192c 100644
--- a/spec/features/projects/settings/project_settings_spec.rb
+++ b/spec/features/projects/settings/project_settings_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Projects settings' do
let_it_be(:project) { create(:project) }
+
let(:user) { project.owner }
let(:panel) { find('.general-settings', match: :first) }
let(:button) { panel.find('.btn.gl-button.js-settings-toggle') }
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index bc60cdd2f8e..6a2769d11fd 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -11,105 +11,125 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
let(:container_registry_enabled) { true }
let(:container_registry_enabled_on_project) { true }
- subject { visit project_settings_ci_cd_path(project) }
+ shared_examples 'an expiration policy form' do
+ before do
+ project.update!(container_registry_enabled: container_registry_enabled_on_project)
+ project.container_expiration_policy.update!(enabled: true)
- before do
- project.update!(container_registry_enabled: container_registry_enabled_on_project)
- project.container_expiration_policy.update!(enabled: true)
+ sign_in(user)
+ stub_container_registry_config(enabled: container_registry_enabled)
+ end
- sign_in(user)
- stub_container_registry_config(enabled: container_registry_enabled)
- end
+ context 'as owner' do
+ it 'shows available section' do
+ subject
- context 'as owner' do
- it 'shows available section' do
- subject
+ settings_block = find('#js-registry-policies')
+ expect(settings_block).to have_text 'Clean up image tags'
+ end
- settings_block = find('#js-registry-policies')
- expect(settings_block).to have_text 'Clean up image tags'
- end
+ it 'saves cleanup policy submit the form' do
+ subject
- it 'saves cleanup policy submit the form' do
- subject
+ within '#js-registry-policies' do
+ select('Every day', from: 'Run cleanup')
+ select('50 tags per image name', from: 'Keep the most recent:')
+ fill_in('Keep tags matching:', with: 'stable')
+ select('7 days', from: 'Remove tags older than:')
+ fill_in('Remove tags matching:', with: '.*-production')
+
+ submit_button = find('[data-testid="save-button"')
+ expect(submit_button).not_to be_disabled
+ submit_button.click
+ end
- within '#js-registry-policies' do
- select('Every day', from: 'Run cleanup')
- select('50 tags per image name', from: 'Keep the most recent:')
- fill_in('Keep tags matching:', with: 'stable')
- select('7 days', from: 'Remove tags older than:')
- fill_in('Remove tags matching:', with: '.*-production')
+ expect(find('.gl-toast')).to have_content('Cleanup policy successfully saved.')
+ end
- submit_button = find('[data-testid="save-button"')
- expect(submit_button).not_to be_disabled
- submit_button.click
+ it 'does not save cleanup policy submit form with invalid regex' do
+ subject
+
+ within '#js-registry-policies' do
+ fill_in('Remove tags matching:', with: '*-production')
+
+ submit_button = find('[data-testid="save-button"')
+ expect(submit_button).not_to be_disabled
+ submit_button.click
+ end
+
+ expect(find('.gl-toast')).to have_content('Something went wrong while updating the cleanup policy.')
end
- toast = find('.gl-toast')
- expect(toast).to have_content('Cleanup policy successfully saved.')
end
- it 'does not save cleanup policy submit form with invalid regex' do
- subject
+ context 'with a project without expiration policy' do
+ where(:application_setting, :feature_flag, :result) do
+ true | true | :available_section
+ true | false | :available_section
+ false | true | :available_section
+ false | false | :disabled_message
+ end
- within '#js-registry-policies' do
- fill_in('Remove tags matching:', with: '*-production')
+ with_them do
+ before do
+ project.container_expiration_policy.destroy!
+ stub_feature_flags(container_expiration_policies_historic_entry: false)
+ stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
+ stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ end
- submit_button = find('[data-testid="save-button"')
- expect(submit_button).not_to be_disabled
- submit_button.click
+ it 'displays the expected result' do
+ subject
+
+ within '#js-registry-policies' do
+ case result
+ when :available_section
+ expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
+ when :disabled_message
+ expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
+ end
+ end
+ end
end
- toast = find('.gl-toast')
- expect(toast).to have_content('Something went wrong while updating the cleanup policy.')
end
- end
- context 'with a project without expiration policy' do
- where(:application_setting, :feature_flag, :result) do
- true | true | :available_section
- true | false | :available_section
- false | true | :available_section
- false | false | :disabled_message
- end
+ context 'when registry is disabled' do
+ let(:container_registry_enabled) { false }
+
+ it 'does not exists' do
+ subject
- with_them do
- before do
- project.container_expiration_policy.destroy!
- stub_feature_flags(container_expiration_policies_historic_entry: false)
- stub_application_setting(container_expiration_policies_enable_historic_entries: application_setting)
- stub_feature_flags(container_expiration_policies_historic_entry: project) if feature_flag
+ expect(page).not_to have_selector('#js-registry-policies')
end
+ end
- it 'displays the expected result' do
+ context 'when container registry is disabled on project' do
+ let(:container_registry_enabled_on_project) { false }
+
+ it 'does not exists' do
subject
- within '#js-registry-policies' do
- case result
- when :available_section
- expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
- when :disabled_message
- expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
- end
- end
+ expect(page).not_to have_selector('#js-registry-policies')
end
end
end
- context 'when registry is disabled' do
- let(:container_registry_enabled) { false }
+ context 'with sidebar feature flag off' do
+ subject { visit project_settings_ci_cd_path(project) }
- it 'does not exists' do
- subject
-
- expect(page).not_to have_selector('#js-registry-policies')
+ before do
+ stub_feature_flags(sidebar_refactor: false)
end
- end
- context 'when container registry is disabled on project' do
- let(:container_registry_enabled_on_project) { false }
+ it_behaves_like 'an expiration policy form'
+ end
- it 'does not exists' do
- subject
+ context 'with sidebar feature flag on' do
+ subject { visit project_settings_packages_and_registries_path(project) }
- expect(page).not_to have_selector('#js-registry-policies')
+ before do
+ stub_feature_flags(sidebar_refactor: true)
end
+
+ it_behaves_like 'an expiration policy form'
end
end
diff --git a/spec/features/projects/settings/repository_settings_spec.rb b/spec/features/projects/settings/repository_settings_spec.rb
index 2f257d299d8..f420a8a76b9 100644
--- a/spec/features/projects/settings/repository_settings_spec.rb
+++ b/spec/features/projects/settings/repository_settings_spec.rb
@@ -42,6 +42,7 @@ RSpec.describe 'Projects > Settings > Repository settings' do
context 'Deploy Keys', :js do
let_it_be(:private_deploy_key) { create(:deploy_key, title: 'private_deploy_key', public: false) }
let_it_be(:public_deploy_key) { create(:another_deploy_key, title: 'public_deploy_key', public: true) }
+
let(:new_ssh_key) { attributes_for(:key)[:key] }
it 'get list of keys' do
@@ -116,7 +117,8 @@ RSpec.describe 'Projects > Settings > Repository settings' do
project.deploy_keys << private_deploy_key
visit project_settings_repository_path(project)
- accept_confirm { find('.deploy-key', text: private_deploy_key.title).find('[data-testid="remove-icon"]').click }
+ click_button 'Remove'
+ click_button 'Remove deploy key'
expect(page).not_to have_content(private_deploy_key.title)
end
diff --git a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
index ebda5c9ff59..bf90e86c263 100644
--- a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
@@ -163,7 +163,8 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do
click_on('Save changes')
end
- find('.flash-notice')
+ wait_for_requests
+
radio = find_field('project_project_setting_attributes_squash_option_default_on')
expect(radio).to be_checked
@@ -178,7 +179,8 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do
click_on('Save changes')
end
- find('.flash-notice')
+ wait_for_requests
+
radio = find_field('project_project_setting_attributes_squash_option_always')
expect(radio).to be_checked
@@ -193,7 +195,8 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do
click_on('Save changes')
end
- find('.flash-notice')
+ wait_for_requests
+
radio = find_field('project_project_setting_attributes_squash_option_never')
expect(radio).to be_checked
@@ -220,7 +223,8 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do
click_on('Save changes')
end
- find('.flash-notice')
+ wait_for_requests
+
radio = find_field('project_project_setting_attributes_mr_default_target_self_true')
expect(radio).to be_checked
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index b237e7e8ce7..be4b6d6b82d 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -38,16 +38,12 @@ RSpec.describe 'Projects > Settings > User manages project members' do
end
it 'imports a team from another project', :js do
- stub_feature_flags(invite_members_group_modal: false)
-
project2.add_maintainer(user)
project2.add_reporter(user_mike)
visit(project_project_members_path(project))
- page.within('.invite-users-form') do
- click_link('Import')
- end
+ click_link('Import a project')
select2(project2.id, from: '#source_project_id')
click_button('Import project members')
@@ -55,6 +51,28 @@ RSpec.describe 'Projects > Settings > User manages project members' do
expect(find_member_row(user_mike)).to have_content('Reporter')
end
+ describe 'when the :invite_members_group_modal is disabled' do
+ before do
+ stub_feature_flags(invite_members_group_modal: false)
+ end
+
+ it 'imports a team from another project', :js do
+ project2.add_maintainer(user)
+ project2.add_reporter(user_mike)
+
+ visit(project_project_members_path(project))
+
+ page.within('.invite-users-form') do
+ click_link('Import')
+ end
+
+ select2(project2.id, from: '#source_project_id')
+ click_button('Import project members')
+
+ expect(find_member_row(user_mike)).to have_content('Reporter')
+ end
+ end
+
it 'shows all members of project shared group', :js do
group.add_owner(user)
group.add_developer(user_dmitriy)
diff --git a/spec/features/projects/snippets/user_views_snippets_spec.rb b/spec/features/projects/snippets/user_views_snippets_spec.rb
index bc8cba1dc31..40539b43ed5 100644
--- a/spec/features/projects/snippets/user_views_snippets_spec.rb
+++ b/spec/features/projects/snippets/user_views_snippets_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Projects > Snippets > User views snippets' do
let_it_be(:project) { create(:project) }
+
let(:user) { create(:user) }
def visit_project_snippets
diff --git a/spec/features/projects/user_changes_project_visibility_spec.rb b/spec/features/projects/user_changes_project_visibility_spec.rb
index 6935ad4be02..39b8cddd005 100644
--- a/spec/features/projects/user_changes_project_visibility_spec.rb
+++ b/spec/features/projects/user_changes_project_visibility_spec.rb
@@ -28,7 +28,9 @@ RSpec.describe 'User changes public project visibility', :js do
click_button 'Reduce project visibility'
end
- expect(page).to have_text("Project '#{project.name}' was successfully updated")
+ wait_for_requests
+
+ expect(project.reload).to be_private
end
end
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index ff6217d02a7..e2498928fa0 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -198,7 +198,7 @@ RSpec.describe 'Projects > User sees sidebar' do
expect(page).to have_content 'Project'
expect(page).to have_content 'Issues'
expect(page).to have_content 'Wiki'
- expect(page).to have_content 'Operations'
+ expect(page).to have_content 'Monitor'
expect(page).not_to have_content 'Repository'
expect(page).not_to have_content 'CI/CD'
diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb
index e357824a533..db451578ff8 100644
--- a/spec/features/projects/user_sees_user_popover_spec.rb
+++ b/spec/features/projects/user_sees_user_popover_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'User sees user popover', :js do
include Spec::Support::Helpers::Features::NotesHelpers
let_it_be(:project) { create(:project, :repository) }
+
let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index b6fde19e0d4..1350ecf6e75 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -68,14 +68,27 @@ RSpec.describe 'User uses shortcuts', :js do
end
context 'when navigating to the Project pages' do
- it 'redirects to the details page' do
+ it 'redirects to the project page' do
visit project_issues_path(project)
find('body').native.send_key('g')
find('body').native.send_key('p')
expect(page).to have_active_navigation('Project')
- expect(page).to have_active_sub_navigation('Details')
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'redirects to the details page' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ visit project_issues_path(project)
+
+ find('body').native.send_key('g')
+ find('body').native.send_key('p')
+
+ expect(page).to have_active_navigation('Project')
+ expect(page).to have_active_sub_navigation('Details')
+ end
end
it 'redirects to the activity page' do
@@ -165,28 +178,62 @@ RSpec.describe 'User uses shortcuts', :js do
end
end
- context 'when navigating to the Operations pages' do
+ context 'when navigating to the Deployments page' do
+ it 'redirects to the Environments page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('e')
+
+ expect(page).to have_active_navigation('Deployments')
+ expect(page).to have_active_sub_navigation('Environments')
+ end
+ end
+
+ context 'when navigating to the Monitor pages' do
it 'redirects to the Metrics page' do
find('body').native.send_key('g')
find('body').native.send_key('l')
- expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_navigation('Monitor')
expect(page).to have_active_sub_navigation('Metrics')
end
- it 'redirects to the Environments page' do
- find('body').native.send_key('g')
- find('body').native.send_key('e')
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
- expect(page).to have_active_navigation('Operations')
- expect(page).to have_active_sub_navigation('Environments')
+ it 'redirects to the Operations page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('l')
+
+ expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_sub_navigation('Metrics')
+ end
+
+ it 'redirects to the Kubernetes page with active Operations' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('k')
+
+ expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_sub_navigation('Kubernetes')
+ end
+
+ it 'redirects to the Environments page' do
+ find('body').native.send_key('g')
+ find('body').native.send_key('e')
+
+ expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_sub_navigation('Environments')
+ end
end
+ end
+ context 'when navigating to the Infrastructure pages' do
it 'redirects to the Kubernetes page' do
find('body').native.send_key('g')
find('body').native.send_key('k')
- expect(page).to have_active_navigation('Operations')
+ expect(page).to have_active_navigation('Infrastructure')
expect(page).to have_active_sub_navigation('Kubernetes')
end
end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index acfb7c2602a..b61a769185e 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -160,50 +160,92 @@ RSpec.describe 'Runners' do
end
end
- context 'when application settings have shared_runners_text' do
- let(:shared_runners_text) { 'custom **shared** runners description' }
- let(:shared_runners_html) { 'custom shared runners description' }
+ context 'shared runner text' do
+ context 'when application settings have no shared_runners_text' do
+ it 'user sees default shared runners description' do
+ visit project_runners_path(project)
- before do
- stub_application_setting(shared_runners_text: shared_runners_text)
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_content('The same shared runner executes code from multiple projects')
+ end
+ end
end
- it 'user sees shared runners description' do
- visit project_runners_path(project)
+ context 'when application settings have shared_runners_text' do
+ let(:shared_runners_text) { 'custom **shared** runners description' }
+ let(:shared_runners_html) { 'custom shared runners description' }
+
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
+
+ it 'user sees shared runners description' do
+ visit project_runners_path(project)
- expect(page.find('.shared-runners-description')).to have_content(shared_runners_html)
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).not_to have_content('The same shared runner executes code from multiple projects')
+ expect(page).to have_content(shared_runners_html)
+ end
+ end
end
- end
- end
- context 'when a project has disabled shared_runners' do
- let(:project) { create(:project, shared_runners_enabled: false) }
+ context 'when application settings have an unsafe link in shared_runners_text' do
+ let(:shared_runners_text) { '<a href="javascript:alert(\'xss\')">link</a>' }
- context 'when feature flag: vueify_shared_runners_toggle is disabled' do
- before do
- stub_feature_flags(vueify_shared_runners_toggle: false)
- project.add_maintainer(user)
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
+
+ it 'user sees no link' do
+ visit project_runners_path(project)
+
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_content('link')
+ expect(page).not_to have_link('link')
+ end
+ end
end
- it 'user enables shared runners' do
- visit project_runners_path(project)
+ context 'when application settings have an unsafe image in shared_runners_text' do
+ let(:shared_runners_text) { '<img src="404.png" onerror="alert(\'xss\')"/>' }
+
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
- click_on 'Enable shared runners'
+ it 'user sees image safely' do
+ visit project_runners_path(project)
- expect(page.find('.shared-runners-description')).to have_content('Disable shared runners')
- expect(page).not_to have_selector('#toggle-shared-runners-form')
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_css('img')
+ expect(page).not_to have_css('img[onerror]')
+ end
+ end
end
end
+ end
+
+ context 'enable shared runners in project settings', :js do
+ before do
+ project.add_maintainer(user)
+
+ visit project_runners_path(project)
+ end
- context 'when feature flag: vueify_shared_runners_toggle is enabled' do
- before do
- project.add_maintainer(user)
+ context 'when a project has enabled shared_runners' do
+ let(:project) { create(:project, shared_runners_enabled: true) }
+
+ it 'shared runners toggle is on' do
+ expect(page).to have_selector('[data-testid="toggle-shared-runners"]')
+ expect(page).to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
end
+ end
- it 'user enables shared runners' do
- visit project_runners_path(project)
+ context 'when a project has disabled shared_runners' do
+ let(:project) { create(:project, shared_runners_enabled: false) }
- expect(page).to have_selector('#toggle-shared-runners-form')
+ it 'shared runners toggle is off' do
+ expect(page).not_to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
end
end
end
diff --git a/spec/features/snippets/spam_snippets_spec.rb b/spec/features/snippets/spam_snippets_spec.rb
index 54a56ac962c..3748a916780 100644
--- a/spec/features/snippets/spam_snippets_spec.rb
+++ b/spec/features/snippets/spam_snippets_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'snippet editor with spam', skip: "Will be handled in https://git
Gitlab::CurrentSettings.update!(
akismet_enabled: true,
akismet_api_key: 'testkey',
+ spam_check_api_key: 'testkey',
recaptcha_enabled: true,
recaptcha_site_key: 'test site key',
recaptcha_private_key: 'test private key'
diff --git a/spec/features/unsubscribe_links_spec.rb b/spec/features/unsubscribe_links_spec.rb
index 966d90ab16b..b2d0f29808c 100644
--- a/spec/features/unsubscribe_links_spec.rb
+++ b/spec/features/unsubscribe_links_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Unsubscribe links', :sidekiq_might_not_need_inline do
let(:author) { create(:user) }
let(:project) { create(:project, :public) }
let(:params) { { title: 'A bug!', description: 'Fix it!', assignees: [recipient] } }
- let(:issue) { Issues::CreateService.new(project, author, params).execute }
+ let(:issue) { Issues::CreateService.new(project: project, current_user: author, params: params).execute }
let(:mail) { ActionMailer::Base.deliveries.last }
let(:body) { Capybara::Node::Simple.new(mail.default_part_body.to_s) }
diff --git a/spec/features/user_can_display_performance_bar_spec.rb b/spec/features/user_can_display_performance_bar_spec.rb
index b8f41925156..14b5964686f 100644
--- a/spec/features/user_can_display_performance_bar_spec.rb
+++ b/spec/features/user_can_display_performance_bar_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'User can display performance bar', :js do
end
end
- let(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
before do
allow(GitlabPerformanceBarStatsWorker).to receive(:perform_in)
@@ -123,4 +123,38 @@ RSpec.describe 'User can display performance bar', :js do
end
end
end
+
+ context 'flamegraphs' do
+ let_it_be(:user) { create(:user) }
+
+ before_all do
+ group.add_guest(user)
+ end
+
+ context 'when user has access' do
+ before do
+ stub_application_setting(performance_bar_allowed_group_id: group.id)
+
+ Warden.on_next_request do |proxy|
+ proxy.set_user(user)
+ end
+ end
+
+ it 'renders flamegraph when requested' do
+ visit root_path(performance_bar: 'flamegraph')
+
+ page.within_frame 'speedscope-iframe' do
+ expect(page).to have_content('Flamegraph for /')
+ end
+ end
+ end
+
+ context 'when user does not have access' do
+ it 'renders the original page' do
+ visit root_path(performance_bar: 'flamegraph')
+
+ expect(page).not_to have_selector('iframe#speedscope-iframe')
+ end
+ end
+ end
end
diff --git a/spec/features/users/add_email_to_existing_account_spec.rb b/spec/features/users/add_email_to_existing_account_spec.rb
index 9130b96b0e3..cf78fc4587f 100644
--- a/spec/features/users/add_email_to_existing_account_spec.rb
+++ b/spec/features/users/add_email_to_existing_account_spec.rb
@@ -4,13 +4,25 @@ require 'spec_helper'
RSpec.describe 'AdditionalEmailToExistingAccount' do
describe 'add secondary email associated with account' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:email) { create(:email, user: user) }
- it 'verifies confirmation of additional email' do
+ before do
sign_in(user)
+ end
+
+ it 'verifies confirmation of additional email' do
+ visit email_confirmation_path(confirmation_token: email.confirmation_token)
+
+ expect(page).to have_content 'Your email address has been successfully confirmed.'
+ end
+
+ it 'accepts any pending invites for an email confirmation' do
+ member = create(:group_member, :invited, invite_email: email.email)
- email = create(:email, user: user)
visit email_confirmation_path(confirmation_token: email.confirmation_token)
+
+ expect(member.reload.user).to eq(user)
expect(page).to have_content 'Your email address has been successfully confirmed.'
end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 5f70517224e..17a6abb99e0 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -57,6 +57,12 @@ RSpec.describe 'Signup' do
fill_in 'new_user_password', with: new_user.password
end
+ def confirm_email
+ new_user_token = User.find_by_email(new_user.email).confirmation_token
+
+ visit user_confirmation_path(confirmation_token: new_user_token)
+ end
+
describe 'username validation', :js do
before do
visit new_user_registration_path
@@ -191,7 +197,7 @@ RSpec.describe 'Signup' do
stub_feature_flags(soft_email_confirmation: false)
end
- it 'creates the user account and sends a confirmation email' do
+ it 'creates the user account and sends a confirmation email, and pre-fills email address after confirming' do
visit new_user_registration_path
fill_in_signup_form
@@ -199,6 +205,10 @@ RSpec.describe 'Signup' do
expect { click_button 'Register' }.to change { User.count }.by(1)
expect(current_path).to eq users_almost_there_path
expect(page).to have_content('Please check your email to confirm your account')
+
+ confirm_email
+
+ expect(find_field('Username or email').value).to eq(new_user.email)
end
end
diff --git a/spec/features/users/user_browses_projects_on_user_page_spec.rb b/spec/features/users/user_browses_projects_on_user_page_spec.rb
index 7d05b2ae27a..ded90be3924 100644
--- a/spec/features/users/user_browses_projects_on_user_page_spec.rb
+++ b/spec/features/users/user_browses_projects_on_user_page_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe 'Users > User browses projects on user page', :js do
end
before do
- Issues::CreateService.new(contributed_project, user, { title: 'Bug in old browser' }).execute
+ Issues::CreateService.new(project: contributed_project, current_user: user, params: { title: 'Bug in old browser' }).execute
event = create(:push_event, project: contributed_project, author: user)
create(:push_event_payload, event: event, commit_count: 3)
end
diff --git a/spec/features/whats_new_spec.rb b/spec/features/whats_new_spec.rb
index 55b96361f03..2938ea1b1e8 100644
--- a/spec/features/whats_new_spec.rb
+++ b/spec/features/whats_new_spec.rb
@@ -34,6 +34,24 @@ RSpec.describe "renders a `whats new` dropdown item" do
sign_in(user)
end
+ it 'renders dropdown item when feature enabled' do
+ Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[:all_tiers])
+
+ visit root_dashboard_path
+ find('.header-help-dropdown-toggle').click
+
+ expect(page).to have_button(text: "What's new")
+ end
+
+ it 'does not render dropdown item when feature disabled' do
+ Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[:disabled])
+
+ visit root_dashboard_path
+ find('.header-help-dropdown-toggle').click
+
+ expect(page).not_to have_button(text: "What's new")
+ end
+
it 'shows notification dot and count and removes it once viewed' do
visit root_dashboard_path
diff --git a/spec/finders/analytics/cycle_analytics/stage_finder_spec.rb b/spec/finders/analytics/cycle_analytics/stage_finder_spec.rb
new file mode 100644
index 00000000000..0275205028a
--- /dev/null
+++ b/spec/finders/analytics/cycle_analytics/stage_finder_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::StageFinder do
+ let(:project) { build(:project) }
+
+ let(:stage_id) { { id: Gitlab::Analytics::CycleAnalytics::DefaultStages.names.first } }
+
+ subject { described_class.new(parent: project, stage_id: stage_id[:id]).execute }
+
+ context 'when looking up in-memory default stage by name exists' do
+ it { expect(subject).not_to be_persisted }
+ it { expect(subject.name).to eq(stage_id[:id]) }
+ end
+
+ context 'when in-memory default stage cannot be found' do
+ before do
+ stage_id[:id] = 'unknown_default_stage'
+ end
+
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index d4795d786bc..4df026f2f5f 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -25,10 +25,12 @@ RSpec.describe Ci::RunnersFinder do
end
context 'filter by status' do
- it 'calls the corresponding scope on Ci::Runner' do
- expect(Ci::Runner).to receive(:paused).and_call_original
+ Ci::Runner::AVAILABLE_STATUSES.each do |status|
+ it "calls the corresponding :#{status} scope on Ci::Runner" do
+ expect(Ci::Runner).to receive(status.to_sym).and_call_original
- described_class.new(current_user: admin, params: { status_status: 'paused' }).execute
+ described_class.new(current_user: admin, params: { status_status: status }).execute
+ end
end
end
@@ -70,17 +72,6 @@ RSpec.describe Ci::RunnersFinder do
end
end
- context 'paginate' do
- it 'returns the runners for the specified page' do
- stub_const('Ci::RunnersFinder::NUMBER_OF_RUNNERS_PER_PAGE', 1)
- runner1 = create :ci_runner, created_at: '2018-07-12 07:00'
- runner2 = create :ci_runner, created_at: '2018-07-12 08:00'
-
- expect(described_class.new(current_user: admin, params: { page: 1 }).execute).to eq [runner2]
- expect(described_class.new(current_user: admin, params: { page: 2 }).execute).to eq [runner1]
- end
- end
-
context 'non admin user' do
it 'returns no runners' do
user = create :user
@@ -170,38 +161,6 @@ RSpec.describe Ci::RunnersFinder do
end
end
- context 'paginate' do
- using RSpec::Parameterized::TableSyntax
-
- let(:runners) do
- [[runner_project_7, runner_project_6, runner_project_5],
- [runner_project_4, runner_project_3, runner_project_2],
- [runner_project_1, runner_sub_group_4, runner_sub_group_3],
- [runner_sub_group_2, runner_sub_group_1, runner_group]]
- end
-
- where(:page, :index) do
- 1 | 0
- 2 | 1
- 3 | 2
- 4 | 3
- end
-
- before do
- stub_const('Ci::RunnersFinder::NUMBER_OF_RUNNERS_PER_PAGE', 3)
-
- group.add_owner(user)
- end
-
- with_them do
- let(:params) { { page: page } }
-
- it 'returns the runners for the specified page' do
- expect(subject).to eq(runners[index])
- end
- end
- end
-
context 'filter by search term' do
let(:params) { { search: 'runner_project_search' } }
diff --git a/spec/finders/concerns/packages/finder_helper_spec.rb b/spec/finders/concerns/packages/finder_helper_spec.rb
index c1740ee1796..bad4c482bc6 100644
--- a/spec/finders/concerns/packages/finder_helper_spec.rb
+++ b/spec/finders/concerns/packages/finder_helper_spec.rb
@@ -3,6 +3,30 @@
require 'spec_helper'
RSpec.describe ::Packages::FinderHelper do
+ describe '#packages_for_project' do
+ let_it_be_with_reload(:project1) { create(:project) }
+ let_it_be(:package1) { create(:package, project: project1) }
+ let_it_be(:package2) { create(:package, :error, project: project1) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:package3) { create(:package, project: project2) }
+
+ let(:finder_class) do
+ Class.new do
+ include ::Packages::FinderHelper
+
+ def execute(project1)
+ packages_for_project(project1)
+ end
+ end
+ end
+
+ let(:finder) { finder_class.new }
+
+ subject { finder.execute(project1) }
+
+ it { is_expected.to eq [package1]}
+ end
+
describe '#packages_visible_to_user' do
using RSpec::Parameterized::TableSyntax
@@ -12,6 +36,7 @@ RSpec.describe ::Packages::FinderHelper do
let_it_be_with_reload(:subgroup) { create(:group, parent: group) }
let_it_be_with_reload(:project2) { create(:project, namespace: subgroup) }
let_it_be(:package2) { create(:package, project: project2) }
+ let_it_be(:package3) { create(:package, :error, project: project2) }
let(:finder_class) do
Class.new do
diff --git a/spec/finders/deploy_tokens/tokens_finder_spec.rb b/spec/finders/deploy_tokens/tokens_finder_spec.rb
new file mode 100644
index 00000000000..7f19c5bf11b
--- /dev/null
+++ b/spec/finders/deploy_tokens/tokens_finder_spec.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DeployTokens::TokensFinder do
+ include AdminModeHelper
+
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id) }
+ let_it_be(:group) { create(:group) }
+
+ let!(:project_deploy_token) { create(:deploy_token, projects: [project]) }
+ let!(:revoked_project_deploy_token) { create(:deploy_token, projects: [project], revoked: true) }
+ let!(:expired_project_deploy_token) { create(:deploy_token, projects: [project], expires_at: '1988-01-11T04:33:04-0600') }
+ let!(:group_deploy_token) { create(:deploy_token, :group, groups: [group]) }
+ let!(:revoked_group_deploy_token) { create(:deploy_token, :group, groups: [group], revoked: true) }
+ let!(:expired_group_deploy_token) { create(:deploy_token, :group, groups: [group], expires_at: '1988-01-11T04:33:04-0600') }
+
+ describe "#execute" do
+ let(:params) { {} }
+
+ context 'when scope is :all' do
+ subject { described_class.new(admin, :all, params).execute }
+
+ before do
+ enable_admin_mode!(admin)
+ end
+
+ it 'returns all deploy tokens' do
+ expect(subject.size).to eq(6)
+ is_expected.to match_array([
+ project_deploy_token,
+ revoked_project_deploy_token,
+ expired_project_deploy_token,
+ group_deploy_token,
+ revoked_group_deploy_token,
+ expired_group_deploy_token
+ ])
+ end
+
+ context 'and active filter is applied' do
+ let(:params) { { active: true } }
+
+ it 'returns only active tokens' do
+ is_expected.to match_array([
+ project_deploy_token,
+ group_deploy_token
+ ])
+ end
+ end
+
+ context 'but user is not an admin' do
+ subject { described_class.new(user, :all, params).execute }
+
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+ end
+
+ context 'when scope is a Project' do
+ subject { described_class.new(user, project, params).execute }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns all deploy tokens for the project' do
+ is_expected.to match_array([
+ project_deploy_token,
+ revoked_project_deploy_token,
+ expired_project_deploy_token
+ ])
+ end
+
+ context 'and active filter is applied' do
+ let(:params) { { active: true } }
+
+ it 'returns only active tokens for the project' do
+ is_expected.to match_array([project_deploy_token])
+ end
+ end
+
+ context 'but user is not a member' do
+ subject { described_class.new(other_user, :all, params).execute }
+
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+ end
+
+ context 'when scope is a Group' do
+ subject { described_class.new(user, group, params).execute }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'returns all deploy tokens for the group' do
+ is_expected.to match_array([
+ group_deploy_token,
+ revoked_group_deploy_token,
+ expired_group_deploy_token
+ ])
+ end
+
+ context 'and active filter is applied' do
+ let(:params) { { active: true } }
+
+ it 'returns only active tokens for the group' do
+ is_expected.to match_array([group_deploy_token])
+ end
+ end
+
+ context 'but user is not a member' do
+ subject { described_class.new(other_user, :all, params).execute }
+
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { subject }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+ end
+
+ context 'when scope is nil' do
+ subject { described_class.new(user, nil, params).execute }
+
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb
index 0f659fa1dab..b294f1117f5 100644
--- a/spec/finders/deployments_finder_spec.rb
+++ b/spec/finders/deployments_finder_spec.rb
@@ -5,6 +5,58 @@ require 'spec_helper'
RSpec.describe DeploymentsFinder do
subject { described_class.new(params).execute }
+ describe "validation" do
+ context 'when both updated_at and finished_at filters are specified' do
+ let(:params) { { updated_before: 1.day.ago, finished_before: 1.day.ago } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ described_class::InefficientQueryError,
+ 'Both `updated_at` filter and `finished_at` filter can not be specified')
+ end
+ end
+
+ context 'when updated_at filter and id sorting' do
+ let(:params) { { updated_before: 1.day.ago, order_by: :id } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ described_class::InefficientQueryError,
+ '`updated_at` filter and `updated_at` sorting must be paired')
+ end
+ end
+
+ context 'when finished_at filter and id sorting' do
+ let(:params) { { finished_before: 1.day.ago, order_by: :id } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ described_class::InefficientQueryError,
+ '`finished_at` filter and `finished_at` sorting must be paired')
+ end
+ end
+
+ context 'when finished_at filter with failed status filter' do
+ let(:params) { { finished_before: 1.day.ago, order_by: :finished_at, status: :failed } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ described_class::InefficientQueryError,
+ '`finished_at` filter must be combined with `success` status filter.')
+ end
+ end
+
+ context 'when environment filter with non-project scope' do
+ let(:params) { { environment: 'production' } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ described_class::InefficientQueryError,
+ '`environment` filter must be combined with `project` scope.')
+ end
+ end
+ end
+
describe "#execute" do
context 'when project or group is missing' do
let(:params) { {} }
@@ -20,13 +72,24 @@ RSpec.describe DeploymentsFinder do
describe 'filtering' do
context 'when updated_at filters are specified' do
- let(:params) { { **base_params, updated_before: 1.day.ago, updated_after: 3.days.ago } }
- let!(:deployment_1) { create(:deployment, :success, project: project, updated_at: 2.days.ago) }
- let!(:deployment_2) { create(:deployment, :success, project: project, updated_at: 4.days.ago) }
- let!(:deployment_3) { create(:deployment, :success, project: project, updated_at: 1.hour.ago) }
+ let_it_be(:deployment_1) { create(:deployment, :success, project: project, updated_at: 48.hours.ago) }
+ let_it_be(:deployment_2) { create(:deployment, :success, project: project, updated_at: 47.hours.ago) }
+ let_it_be(:deployment_3) { create(:deployment, :success, project: project, updated_at: 4.days.ago) }
+ let_it_be(:deployment_4) { create(:deployment, :success, project: project, updated_at: 1.hour.ago) }
+ let(:params) { { **base_params, updated_before: 1.day.ago, updated_after: 3.days.ago, order_by: :updated_at } }
it 'returns deployments with matched updated_at' do
- is_expected.to match_array([deployment_1])
+ is_expected.to match_array([deployment_2, deployment_1])
+ end
+
+ context 'when deployments_finder_implicitly_enforce_ordering_for_updated_at_filter feature flag is disabled' do
+ before do
+ stub_feature_flags(deployments_finder_implicitly_enforce_ordering_for_updated_at_filter: false)
+ end
+
+ it 'returns deployments with matched updated_at' do
+ is_expected.to match_array([deployment_1, deployment_2])
+ end
end
end
@@ -72,30 +135,34 @@ RSpec.describe DeploymentsFinder do
let(:params) { { **base_params, order_by: order_by, sort: sort } }
- let!(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: 2.days.ago, updated_at: Time.now, finished_at: Time.now) }
- let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago, finished_at: 2.hours.ago) }
- let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'video', created_at: Time.now, updated_at: 1.hour.ago, finished_at: 1.hour.ago) }
+ let!(:deployment_1) { create(:deployment, :success, project: project, ref: 'master', created_at: 2.days.ago, updated_at: Time.now, finished_at: Time.now) }
+ let!(:deployment_2) { create(:deployment, :success, project: project, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago, finished_at: 2.hours.ago) }
+ let!(:deployment_3) { create(:deployment, :success, project: project, ref: 'video', created_at: Time.now, updated_at: 1.hour.ago, finished_at: 1.hour.ago) }
where(:order_by, :sort, :ordered_deployments) do
'created_at' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
'created_at' | 'desc' | [:deployment_3, :deployment_2, :deployment_1]
'id' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
'id' | 'desc' | [:deployment_3, :deployment_2, :deployment_1]
- 'iid' | 'asc' | [:deployment_3, :deployment_1, :deployment_2]
- 'iid' | 'desc' | [:deployment_2, :deployment_1, :deployment_3]
+ 'iid' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
+ 'iid' | 'desc' | [:deployment_3, :deployment_2, :deployment_1]
'ref' | 'asc' | [:deployment_2, :deployment_1, :deployment_3]
'ref' | 'desc' | [:deployment_3, :deployment_1, :deployment_2]
- 'updated_at' | 'asc' | [:deployment_2, :deployment_3, :deployment_1]
- 'updated_at' | 'desc' | [:deployment_1, :deployment_3, :deployment_2]
- 'finished_at' | 'asc' | [:deployment_2, :deployment_3, :deployment_1]
- 'finished_at' | 'desc' | [:deployment_1, :deployment_3, :deployment_2]
+ 'updated_at' | 'asc' | described_class::InefficientQueryError
+ 'updated_at' | 'desc' | described_class::InefficientQueryError
+ 'finished_at' | 'asc' | described_class::InefficientQueryError
+ 'finished_at' | 'desc' | described_class::InefficientQueryError
'invalid' | 'asc' | [:deployment_1, :deployment_2, :deployment_3]
- 'iid' | 'err' | [:deployment_3, :deployment_1, :deployment_2]
+ 'iid' | 'err' | [:deployment_1, :deployment_2, :deployment_3]
end
with_them do
it 'returns the deployments ordered' do
- expect(subject).to eq(ordered_deployments.map { |name| public_send(name) })
+ if ordered_deployments == described_class::InefficientQueryError
+ expect { subject }.to raise_error(described_class::InefficientQueryError)
+ else
+ expect(subject).to eq(ordered_deployments.map { |name| public_send(name) })
+ end
end
end
end
@@ -112,8 +179,20 @@ RSpec.describe DeploymentsFinder do
end
end
- describe 'tie-breaker for `finished_at` sorting' do
- let(:params) { { **base_params, order_by: 'updated_at', sort: 'asc' } }
+ describe 'transform `iid` sorting to `id` sorting' do
+ let(:params) { { **base_params, order_by: 'iid', sort: 'asc' } }
+
+ it 'sorts by only one column' do
+ expect(subject.order_values.size).to eq(1)
+ end
+
+ it 'sorts by `id`' do
+ expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql)
+ end
+ end
+
+ describe 'tie-breaker for `updated_at` sorting' do
+ let(:params) { { **base_params, updated_after: 1.day.ago, order_by: 'updated_at', sort: 'asc' } }
it 'sorts by two columns' do
expect(subject.order_values.size).to eq(2)
@@ -122,17 +201,62 @@ RSpec.describe DeploymentsFinder do
it 'adds `id` sorting as the second order column' do
order_value = subject.order_values[1]
- expect(order_value.to_sql).to eq(Deployment.arel_table[:id].desc.to_sql)
+ expect(order_value.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql)
end
- it 'uses the `id DESC` as tie-breaker when ordering' do
+ it 'uses the `id ASC` as tie-breaker when ordering' do
updated_at = Time.now
deployment_1 = create(:deployment, :success, project: project, updated_at: updated_at)
deployment_2 = create(:deployment, :success, project: project, updated_at: updated_at)
deployment_3 = create(:deployment, :success, project: project, updated_at: updated_at)
- expect(subject).to eq([deployment_3, deployment_2, deployment_1])
+ expect(subject).to eq([deployment_1, deployment_2, deployment_3])
+ end
+
+ context 'when sort direction is desc' do
+ let(:params) { { **base_params, updated_after: 1.day.ago, order_by: 'updated_at', sort: 'desc' } }
+
+ it 'uses the `id DESC` as tie-breaker when ordering' do
+ updated_at = Time.now
+
+ deployment_1 = create(:deployment, :success, project: project, updated_at: updated_at)
+ deployment_2 = create(:deployment, :success, project: project, updated_at: updated_at)
+ deployment_3 = create(:deployment, :success, project: project, updated_at: updated_at)
+
+ expect(subject).to eq([deployment_3, deployment_2, deployment_1])
+ end
+ end
+ end
+
+ describe 'enforce sorting to `updated_at` sorting' do
+ let(:params) { { **base_params, updated_before: 1.day.ago, order_by: 'id', sort: 'asc' } }
+
+ before do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ end
+
+ it 'sorts by only one column' do
+ expect(subject.order_values.size).to eq(2)
+ end
+
+ it 'sorts by `updated_at`' do
+ expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:updated_at].asc.to_sql)
+ expect(subject.order_values.second.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql)
+ end
+
+ context 'when deployments_finder_implicitly_enforce_ordering_for_updated_at_filter feature flag is disabled' do
+ before do
+ stub_feature_flags(deployments_finder_implicitly_enforce_ordering_for_updated_at_filter: false)
+ end
+
+ it 'sorts by only one column' do
+ expect(subject.order_values.size).to eq(1)
+ end
+
+ it 'sorts by `id`' do
+ expect(subject.order_values.first.to_sql).to eq(Deployment.arel_table[:id].asc.to_sql)
+ end
end
end
@@ -142,23 +266,76 @@ RSpec.describe DeploymentsFinder do
let!(:deployment_3) { create(:deployment, :success, project: project, finished_at: 5.hours.ago) }
context 'when filtering by finished_after and finished_before' do
- let(:params) { { **base_params, finished_after: 3.days.ago, finished_before: 1.day.ago } }
+ let(:params) { { **base_params, finished_after: 3.days.ago, finished_before: 1.day.ago, status: :success, order_by: :finished_at } }
it { is_expected.to match_array([deployment_1]) }
end
context 'when the finished_before parameter is missing' do
- let(:params) { { **base_params, finished_after: 3.days.ago } }
+ let(:params) { { **base_params, finished_after: 3.days.ago, status: :success, order_by: :finished_at } }
it { is_expected.to match_array([deployment_1, deployment_3]) }
end
context 'when finished_after is missing' do
- let(:params) { { **base_params, finished_before: 3.days.ago } }
+ let(:params) { { **base_params, finished_before: 3.days.ago, status: :success, order_by: :finished_at } }
it { is_expected.to match_array([deployment_2]) }
end
end
end
+
+ context 'at group scope' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+
+ let_it_be(:group_project_1) { create(:project, :public, :test_repo, group: group) }
+ let_it_be(:group_project_2) { create(:project, :public, :test_repo, group: group) }
+ let_it_be(:subgroup_project_1) { create(:project, :public, :test_repo, group: subgroup) }
+ let(:base_params) { { group: group } }
+
+ describe 'ordering' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:params) { { **base_params, order_by: order_by, sort: sort } }
+
+ let!(:group_project_1_deployment) { create(:deployment, :success, project: group_project_1, iid: 11, ref: 'master', created_at: 2.days.ago, updated_at: Time.now, finished_at: Time.now) }
+ let!(:group_project_2_deployment) { create(:deployment, :success, project: group_project_2, iid: 12, ref: 'feature', created_at: 1.day.ago, updated_at: 2.hours.ago, finished_at: 2.hours.ago) }
+ let!(:subgroup_project_1_deployment) { create(:deployment, :success, project: subgroup_project_1, iid: 8, ref: 'video', created_at: Time.now, updated_at: 1.hour.ago, finished_at: 1.hour.ago) }
+
+ where(:order_by, :sort) do
+ 'created_at' | 'asc'
+ 'created_at' | 'desc'
+ 'id' | 'asc'
+ 'id' | 'desc'
+ 'iid' | 'asc'
+ 'iid' | 'desc'
+ 'ref' | 'asc'
+ 'ref' | 'desc'
+ 'invalid' | 'asc'
+ 'iid' | 'err'
+ end
+
+ with_them do
+ it 'returns the deployments unordered' do
+ expect(subject.to_a).to contain_exactly(group_project_1_deployment,
+ group_project_2_deployment,
+ subgroup_project_1_deployment)
+ end
+ end
+ end
+
+ it 'avoids N+1 queries' do
+ execute_queries = -> { described_class.new({ group: group }).execute.first }
+ control_count = ActiveRecord::QueryRecorder.new { execute_queries }.count
+
+ new_project = create(:project, :repository, group: group)
+ new_env = create(:environment, project: new_project, name: "production")
+ create_list(:deployment, 2, status: :success, project: new_project, environment: new_env)
+ group.reload
+
+ expect { execute_queries }.not_to exceed_query_limit(control_count)
+ end
+ end
end
end
diff --git a/spec/finders/environment_names_finder_spec.rb b/spec/finders/environments/environment_names_finder_spec.rb
index fe00c800f0a..438f9e9ea7c 100644
--- a/spec/finders/environment_names_finder_spec.rb
+++ b/spec/finders/environments/environment_names_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe EnvironmentNamesFinder do
+RSpec.describe Environments::EnvironmentNamesFinder do
describe '#execute' do
let!(:group) { create(:group) }
let!(:public_project) { create(:project, :public, namespace: group) }
diff --git a/spec/finders/environments_by_deployments_finder_spec.rb b/spec/finders/environments/environments_by_deployments_finder_spec.rb
index f5fcc4ef72a..1b86aced67d 100644
--- a/spec/finders/environments_by_deployments_finder_spec.rb
+++ b/spec/finders/environments/environments_by_deployments_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe EnvironmentsByDeploymentsFinder do
+RSpec.describe Environments::EnvironmentsByDeploymentsFinder do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:environment) { create(:environment, :available, project: project) }
diff --git a/spec/finders/environments_finder_spec.rb b/spec/finders/environments/environments_finder_spec.rb
index c2022331ad9..68c0c524478 100644
--- a/spec/finders/environments_finder_spec.rb
+++ b/spec/finders/environments/environments_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe EnvironmentsFinder do
+RSpec.describe Environments::EnvironmentsFinder do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:environment) { create(:environment, :available, project: project) }
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index a2aac857bf5..27466ab563f 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -1178,6 +1178,7 @@ RSpec.describe IssuesFinder do
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
end
end
@@ -1186,6 +1187,7 @@ RSpec.describe IssuesFinder do
it 'returns true' do
expect(finder.use_cte_for_search?).to be_truthy
+ expect(finder.execute.to_sql).to match(/^WITH "issues" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
end
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 597d22801ca..3b835d366db 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -866,5 +866,36 @@ RSpec.describe MergeRequestsFinder do
end
end
end
+
+ describe '#count_by_state' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:labels) { create_list(:label, 2, project: project) }
+ let_it_be(:merge_requests) { create_list(:merge_request, 4, :unique_branches, author: user, target_project: project, source_project: project, labels: labels) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when filtering by multiple labels' do
+ it 'returns the correnct counts' do
+ counts = described_class.new(user, { label_name: labels.map(&:name) }).count_by_state
+
+ expect(counts[:all]).to eq(merge_requests.size)
+ end
+ end
+
+ context 'when filtering by approved_by_usernames' do
+ before do
+ merge_requests.each { |mr| mr.approved_by_users << user }
+ end
+
+ it 'returns the correnct counts' do
+ counts = described_class.new(user, { approved_by_usernames: [user.username] }).count_by_state
+
+ expect(counts[:all]).to eq(merge_requests.size)
+ end
+ end
+ end
end
end
diff --git a/spec/finders/packages/composer/packages_finder_spec.rb b/spec/finders/packages/composer/packages_finder_spec.rb
new file mode 100644
index 00000000000..d4328827de3
--- /dev/null
+++ b/spec/finders/packages/composer/packages_finder_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe ::Packages::Composer::PackagesFinder do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ let(:params) { {} }
+
+ describe '#execute' do
+ let_it_be(:composer_package) { create(:composer_package, project: project) }
+ let_it_be(:composer_package2) { create(:composer_package, project: project) }
+ let_it_be(:error_package) { create(:composer_package, :error, project: project) }
+ let_it_be(:composer_package3) { create(:composer_package) }
+
+ subject { described_class.new(user, group, params).execute }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to match_array([composer_package, composer_package2]) }
+ end
+end
diff --git a/spec/finders/packages/conan/package_finder_spec.rb b/spec/finders/packages/conan/package_finder_spec.rb
index 936a0e5ff4b..b26f8900090 100644
--- a/spec/finders/packages/conan/package_finder_spec.rb
+++ b/spec/finders/packages/conan/package_finder_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe ::Packages::Conan::PackageFinder do
subject { described_class.new(user, query: query).execute }
- context 'packages that are not visible to user' do
+ context 'packages that are not installable' do
+ let!(:conan_package3) { create(:conan_package, :error, project: project) }
let!(:non_visible_project) { create(:project, :private) }
let!(:non_visible_conan_package) { create(:conan_package, project: non_visible_project) }
let(:query) { "#{conan_package.name.split('/').first[0, 3]}%" }
diff --git a/spec/finders/packages/generic/package_finder_spec.rb b/spec/finders/packages/generic/package_finder_spec.rb
index ed34268e7a9..707f943b285 100644
--- a/spec/finders/packages/generic/package_finder_spec.rb
+++ b/spec/finders/packages/generic/package_finder_spec.rb
@@ -23,6 +23,13 @@ RSpec.describe ::Packages::Generic::PackageFinder do
expect(found_package).to eq(package)
end
+ it 'does not find uninstallable packages' do
+ error_package = create(:generic_package, :error, project: project)
+
+ expect { finder.execute!(error_package.name, error_package.version) }
+ .to raise_error(ActiveRecord::RecordNotFound)
+ end
+
it 'raises ActiveRecord::RecordNotFound if package is not found' do
expect { finder.execute!(package.name, '3.1.4') }
.to raise_error(ActiveRecord::RecordNotFound)
diff --git a/spec/finders/packages/go/package_finder_spec.rb b/spec/finders/packages/go/package_finder_spec.rb
index b6fad1e7061..dbcb8255d47 100644
--- a/spec/finders/packages/go/package_finder_spec.rb
+++ b/spec/finders/packages/go/package_finder_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Packages::Go::PackageFinder do
let_it_be(:mod) { create :go_module, project: project }
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.1' }
- let_it_be(:package) { create :golang_package, project: project, name: mod.name, version: 'v1.0.1' }
+ let_it_be_with_refind(:package) { create :golang_package, project: project, name: mod.name, version: 'v1.0.1' }
let(:finder) { described_class.new(project, mod_name, version_name) }
@@ -54,6 +54,17 @@ RSpec.describe Packages::Go::PackageFinder do
it { is_expected.to eq(package) }
end
+ context 'with an uninstallable package' do
+ let(:mod_name) { mod.name }
+ let(:version_name) { version.name }
+
+ before do
+ package.update_column(:status, 1)
+ end
+
+ it { is_expected.to eq(nil) }
+ end
+
context 'with an invalid name' do
let(:mod_name) { 'foo/bar' }
let(:version_name) { 'baz' }
diff --git a/spec/finders/packages/group_or_project_package_finder_spec.rb b/spec/finders/packages/group_or_project_package_finder_spec.rb
new file mode 100644
index 00000000000..aaeec8e70d2
--- /dev/null
+++ b/spec/finders/packages/group_or_project_package_finder_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::GroupOrProjectPackageFinder do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:finder) { described_class.new(user, project) }
+
+ describe 'execute' do
+ subject(:run_finder) { finder.execute }
+
+ it { expect { run_finder }.to raise_error(NotImplementedError) }
+ end
+
+ describe 'execute!' do
+ subject(:run_finder) { finder.execute! }
+
+ it { expect { run_finder }.to raise_error(NotImplementedError) }
+ end
+end
diff --git a/spec/finders/packages/group_packages_finder_spec.rb b/spec/finders/packages/group_packages_finder_spec.rb
index d6daf73aba2..29b2f0fffd7 100644
--- a/spec/finders/packages/group_packages_finder_spec.rb
+++ b/spec/finders/packages/group_packages_finder_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe Packages::GroupPackagesFinder do
end
context 'when there are processing packages' do
- let_it_be(:package4) { create(:nuget_package, project: project, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
+ let_it_be(:package4) { create(:nuget_package, :processing, project: project) }
it { is_expected.to match_array([package1, package2]) }
end
diff --git a/spec/finders/packages/maven/package_finder_spec.rb b/spec/finders/packages/maven/package_finder_spec.rb
index ca144292501..13c603f1ec4 100644
--- a/spec/finders/packages/maven/package_finder_spec.rb
+++ b/spec/finders/packages/maven/package_finder_spec.rb
@@ -6,13 +6,12 @@ RSpec.describe ::Packages::Maven::PackageFinder do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
- let_it_be(:package) { create(:maven_package, project: project) }
+ let_it_be_with_refind(:package) { create(:maven_package, project: project) }
let(:param_path) { nil }
- let(:param_project) { nil }
- let(:param_group) { nil }
+ let(:project_or_group) { nil }
let(:param_order_by_package_file) { false }
- let(:finder) { described_class.new(param_path, user, project: param_project, group: param_group, order_by_package_file: param_order_by_package_file) }
+ let(:finder) { described_class.new(user, project_or_group, path: param_path, order_by_package_file: param_order_by_package_file) }
before do
group.add_developer(user)
@@ -36,34 +35,28 @@ RSpec.describe ::Packages::Maven::PackageFinder do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ context 'with an uninstallable package' do
+ let(:param_path) { package.maven_metadatum.path }
+
+ before do
+ package.update_column(:status, 1)
+ end
+
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
end
context 'within the project' do
- let(:param_project) { project }
+ let(:project_or_group) { project }
it_behaves_like 'handling valid and invalid paths'
end
context 'within a group' do
- let(:param_group) { group }
-
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- expect(finder).to receive(:packages_visible_to_user).with(user, within_group: group).and_call_original
- end
-
- it_behaves_like 'handling valid and invalid paths'
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- expect(finder).not_to receive(:packages_visible_to_user)
- end
+ let(:project_or_group) { group }
- it_behaves_like 'handling valid and invalid paths'
- end
+ it_behaves_like 'handling valid and invalid paths'
end
context 'across all projects' do
@@ -83,7 +76,7 @@ RSpec.describe ::Packages::Maven::PackageFinder do
let_it_be(:package2) { create(:maven_package, project: project2, name: package_name, version: nil) }
let_it_be(:package3) { create(:maven_package, project: project3, name: package_name, version: nil) }
- let(:param_group) { group }
+ let(:project_or_group) { group }
let(:param_path) { package_name }
before do
@@ -93,38 +86,14 @@ RSpec.describe ::Packages::Maven::PackageFinder do
create(:package_file, :xml, package: package2)
end
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- expect(finder).not_to receive(:versionless_package?)
- end
-
- context 'without order by package file' do
- it { is_expected.to eq(package3) }
- end
-
- context 'with order by package file' do
- let(:param_order_by_package_file) { true }
-
- it { is_expected.to eq(package2) }
- end
+ context 'without order by package file' do
+ it { is_expected.to eq(package3) }
end
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- expect(finder).to receive(:versionless_package?).and_call_original
- end
+ context 'with order by package file' do
+ let(:param_order_by_package_file) { true }
- context 'without order by package file' do
- it { is_expected.to eq(package2) }
- end
-
- context 'with order by package file' do
- let(:param_order_by_package_file) { true }
-
- it { is_expected.to eq(package2) }
- end
+ it { is_expected.to eq(package2) }
end
end
end
@@ -146,7 +115,7 @@ RSpec.describe ::Packages::Maven::PackageFinder do
it_behaves_like 'Packages::Maven::PackageFinder examples'
it 'uses CTE in the query' do
- sql = described_class.new('some_path', user, group: group).send(:packages_with_path).to_sql
+ sql = described_class.new(user, group, path: package.maven_metadatum.path).send(:packages).to_sql
expect(sql).to include('WITH "maven_metadata_by_path" AS')
end
diff --git a/spec/finders/packages/npm/package_finder_spec.rb b/spec/finders/packages/npm/package_finder_spec.rb
index f021d800f31..a995f3b96c4 100644
--- a/spec/finders/packages/npm/package_finder_spec.rb
+++ b/spec/finders/packages/npm/package_finder_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
RSpec.describe ::Packages::Npm::PackageFinder do
let_it_be_with_reload(:project) { create(:project)}
- let_it_be(:package) { create(:npm_package, project: project) }
+ let_it_be_with_refind(:package) { create(:npm_package, project: project) }
let(:project) { package.project }
let(:package_name) { package.name }
@@ -46,6 +46,14 @@ RSpec.describe ::Packages::Npm::PackageFinder do
it { is_expected.to be_empty }
end
+
+ context 'with an uninstallable package' do
+ before do
+ package.update_column(:status, 1)
+ end
+
+ it { is_expected.to be_empty }
+ end
end
subject { finder.execute }
diff --git a/spec/finders/packages/nuget/package_finder_spec.rb b/spec/finders/packages/nuget/package_finder_spec.rb
index 10b5f6c8ec2..59cca2d06dc 100644
--- a/spec/finders/packages/nuget/package_finder_spec.rb
+++ b/spec/finders/packages/nuget/package_finder_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Packages::Nuget::PackageFinder do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project) { create(:project, namespace: subgroup) }
- let_it_be(:package1) { create(:nuget_package, project: project) }
+ let_it_be_with_refind(:package1) { create(:nuget_package, project: project) }
let_it_be(:package2) { create(:nuget_package, name: package1.name, version: '2.0.0', project: project) }
let_it_be(:package3) { create(:nuget_package, name: 'Another.Dummy.Package', project: project) }
let_it_be(:other_package_1) { create(:nuget_package, name: package1.name, version: package1.version) }
@@ -33,6 +33,14 @@ RSpec.describe Packages::Nuget::PackageFinder do
it { is_expected.to be_empty }
end
+ context 'with an uninstallable package' do
+ before do
+ package1.update_column(:status, 1)
+ end
+
+ it { is_expected.to contain_exactly(package2) }
+ end
+
context 'with valid version' do
let(:package_version) { '2.0.0' }
diff --git a/spec/finders/packages/package_finder_spec.rb b/spec/finders/packages/package_finder_spec.rb
index e8c7404a612..2bb4f05a41d 100644
--- a/spec/finders/packages/package_finder_spec.rb
+++ b/spec/finders/packages/package_finder_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe ::Packages::PackageFinder do
let_it_be(:project) { create(:project) }
- let_it_be(:maven_package) { create(:maven_package, project: project) }
+ let_it_be_with_refind(:maven_package) { create(:maven_package, project: project) }
describe '#execute' do
let(:package_id) { maven_package.id }
@@ -13,8 +13,18 @@ RSpec.describe ::Packages::PackageFinder do
it { is_expected.to eq(maven_package) }
+ context 'with non-displayable package' do
+ before do
+ maven_package.update_column(:status, 1)
+ end
+
+ it 'raises an exception' do
+ expect { subject }.to raise_exception(ActiveRecord::RecordNotFound)
+ end
+ end
+
context 'processing packages' do
- let_it_be(:nuget_package) { create(:nuget_package, project: project, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
+ let_it_be(:nuget_package) { create(:nuget_package, :processing, project: project) }
let(:package_id) { nuget_package.id }
it 'are not returned' do
diff --git a/spec/finders/packages/packages_finder_spec.rb b/spec/finders/packages/packages_finder_spec.rb
index 0add77a8478..b72f4aab3ec 100644
--- a/spec/finders/packages/packages_finder_spec.rb
+++ b/spec/finders/packages/packages_finder_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe ::Packages::PackagesFinder do
end
context 'with processing packages' do
- let_it_be(:nuget_package) { create(:nuget_package, project: project, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
+ let_it_be(:nuget_package) { create(:nuget_package, :processing, project: project) }
it { is_expected.to match_array([conan_package, maven_package]) }
end
diff --git a/spec/finders/packages/pypi/package_finder_spec.rb b/spec/finders/packages/pypi/package_finder_spec.rb
new file mode 100644
index 00000000000..7d9eb8a5cd1
--- /dev/null
+++ b/spec/finders/packages/pypi/package_finder_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Pypi::PackageFinder do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project2) { create(:project, group: group) }
+ let_it_be(:package1) { create(:pypi_package, project: project) }
+ let_it_be(:package2) { create(:pypi_package, project: project) }
+ let_it_be(:package3) { create(:pypi_package, project: project2) }
+
+ let(:package_file) { package2.package_files.first }
+ let(:params) do
+ {
+ filename: package_file.file_name,
+ sha256: package_file.file_sha256
+ }
+ end
+
+ describe 'execute' do
+ subject { described_class.new(user, scope, params).execute }
+
+ context 'within a project' do
+ let(:scope) { project }
+
+ it { is_expected.to eq(package2) }
+ end
+
+ context 'within a group' do
+ let(:scope) { group }
+
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+
+ context 'user with access' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to eq(package2) }
+ end
+ end
+ end
+end
diff --git a/spec/finders/packages/pypi/packages_finder_spec.rb b/spec/finders/packages/pypi/packages_finder_spec.rb
new file mode 100644
index 00000000000..a69c2317261
--- /dev/null
+++ b/spec/finders/packages/pypi/packages_finder_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Pypi::PackagesFinder do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project2) { create(:project, group: group) }
+ let_it_be(:package1) { create(:pypi_package, project: project) }
+ let_it_be(:package2) { create(:pypi_package, project: project) }
+ let_it_be(:package3) { create(:pypi_package, name: package2.name, project: project) }
+ let_it_be(:package4) { create(:pypi_package, name: package2.name, project: project2) }
+
+ let(:package_name) { package2.name }
+
+ describe 'execute!' do
+ subject { described_class.new(user, scope, package_name: package_name).execute! }
+
+ shared_examples 'when no package is found' do
+ context 'non-existing package' do
+ let(:package_name) { 'none' }
+
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+ end
+
+ shared_examples 'when package_name param is a non-normalized name' do
+ context 'non-existing package' do
+ let(:package_name) { package2.name.upcase.tr('-', '.') }
+
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+ end
+ end
+
+ context 'within a project' do
+ let(:scope) { project }
+
+ it { is_expected.to contain_exactly(package2, package3) }
+
+ it_behaves_like 'when no package is found'
+ it_behaves_like 'when package_name param is a non-normalized name'
+ end
+
+ context 'within a group' do
+ let(:scope) { group }
+
+ it { expect { subject }.to raise_error(ActiveRecord::RecordNotFound) }
+
+ context 'user with access to only one project' do
+ before do
+ project2.add_developer(user)
+ end
+
+ it { is_expected.to contain_exactly(package4) }
+
+ it_behaves_like 'when no package is found'
+ it_behaves_like 'when package_name param is a non-normalized name'
+
+ context ' user with access to multiple projects' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to contain_exactly(package2, package3, package4) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/projects/groups_finder_spec.rb b/spec/finders/projects/groups_finder_spec.rb
index 89d4edaec7c..7f01b73c7ca 100644
--- a/spec/finders/projects/groups_finder_spec.rb
+++ b/spec/finders/projects/groups_finder_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Projects::GroupsFinder do
let_it_be(:root_group) { create(:group, :public) }
let_it_be(:project_group) { create(:group, :public, parent: root_group) }
let_it_be(:shared_group_with_dev_access) { create(:group, :private, parent: root_group) }
- let_it_be(:shared_group_with_reporter_access) { create(:group, :private) }
+ let_it_be(:shared_group_with_reporter_access) { create(:group, :public) }
let_it_be(:public_project) { create(:project, :public, group: project_group) }
let_it_be(:private_project) { create(:project, :private, group: project_group) }
@@ -53,6 +53,24 @@ RSpec.describe Projects::GroupsFinder do
is_expected.to match_array([project_group, root_group, shared_group_with_dev_access])
end
end
+
+ context 'when shared_visible_only is on' do
+ let(:params) { super().merge(shared_visible_only: true) }
+
+ it 'returns ancestor and public shared groups' do
+ is_expected.to match_array([project_group, root_group, shared_group_with_reporter_access])
+ end
+
+ context 'when user has access to the private shared group' do
+ before do
+ shared_group_with_dev_access.add_guest(current_user)
+ end
+
+ it 'returns ancestor and shared groups user has access to' do
+ is_expected.to match_array([project_group, root_group, shared_group_with_reporter_access, shared_group_with_dev_access])
+ end
+ end
+ end
end
context 'when skip group option is on' do
@@ -74,6 +92,19 @@ RSpec.describe Projects::GroupsFinder do
it 'returns ancestor groups for this project' do
is_expected.to match_array([project_group, root_group])
end
+
+ context 'when visible shared groups are requested' do
+ let(:params) do
+ {
+ with_shared: true,
+ shared_visible_only: true
+ }
+ end
+
+ it 'returns ancestor groups and public shared groups for this project' do
+ is_expected.to match_array([project_group, root_group, shared_group_with_reporter_access])
+ end
+ end
end
end
end
diff --git a/spec/finders/projects/members/effective_access_level_finder_spec.rb b/spec/finders/projects/members/effective_access_level_finder_spec.rb
new file mode 100644
index 00000000000..1112dbd0d6e
--- /dev/null
+++ b/spec/finders/projects/members/effective_access_level_finder_spec.rb
@@ -0,0 +1,257 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Projects::Members::EffectiveAccessLevelFinder, '#execute' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ # The result set is being converted to json just for the ease of testing.
+ subject { described_class.new(project).execute.as_json }
+
+ context 'for a personal project' do
+ let_it_be(:project) { create(:project) }
+
+ shared_examples_for 'includes access level of the owner of the project as Maintainer' do
+ it 'includes access level of the owner of the project as Maintainer' do
+ expect(subject).to(
+ contain_exactly(
+ hash_including(
+ 'user_id' => project.namespace.owner.id,
+ 'access_level' => Gitlab::Access::MAINTAINER
+ )
+ )
+ )
+ end
+ end
+
+ context 'when the project owner is a member of the project' do
+ it_behaves_like 'includes access level of the owner of the project as Maintainer'
+ end
+
+ context 'when the project owner is not explicitly a member of the project' do
+ before do
+ project.members.find_by(user_id: project.namespace.owner.id).destroy!
+ end
+
+ it_behaves_like 'includes access level of the owner of the project as Maintainer'
+ end
+ end
+
+ context 'direct members of the project' do
+ it 'includes access levels of the direct members of the project' do
+ developer = create(:project_member, :developer, source: project)
+ maintainer = create(:project_member, :maintainer, source: project)
+
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => developer.user.id,
+ 'access_level' => Gitlab::Access::DEVELOPER
+ ),
+ hash_including(
+ 'user_id' => maintainer.user.id,
+ 'access_level' => Gitlab::Access::MAINTAINER
+ )
+ )
+ )
+ end
+
+ it 'does not include access levels of users who have requested access to the project' do
+ member_with_access_request = create(:project_member, :access_request, :developer, source: project)
+
+ expect(subject).not_to(
+ include(
+ hash_including(
+ 'user_id' => member_with_access_request.user.id
+ )
+ )
+ )
+ end
+
+ it 'includes access levels of users who are in non-active state' do
+ blocked_member = create(:project_member, :blocked, :developer, source: project)
+
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => blocked_member.user.id,
+ 'access_level' => Gitlab::Access::DEVELOPER
+ )
+ )
+ )
+ end
+ end
+
+ context 'for a project within a group' do
+ context 'project in a root group' do
+ it 'includes access levels of users who are direct members of the parent group' do
+ group_member = create(:group_member, :developer, source: group)
+
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => group_member.user.id,
+ 'access_level' => Gitlab::Access::DEVELOPER
+ )
+ )
+ )
+ end
+ end
+
+ context 'project in a subgroup' do
+ let_it_be(:project) { create(:project, group: create(:group, :nested)) }
+
+ it 'includes access levels of users who are members of the ancestors of the parent group' do
+ group_member = create(:group_member, :maintainer, source: project.group.parent)
+
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => group_member.user.id,
+ 'access_level' => Gitlab::Access::MAINTAINER
+ )
+ )
+ )
+ end
+ end
+
+ context 'user is both a member of the project and a member of the parent group' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ group.add_developer(user)
+ project.add_maintainer(user)
+ end
+
+ it 'includes the maximum access level among project and group membership' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user.id,
+ 'access_level' => Gitlab::Access::MAINTAINER
+ )
+ )
+ )
+ end
+ end
+
+ context 'members from group share' do
+ let_it_be(:shared_with_group) { create(:group) }
+ let_it_be(:user_from_shared_with_group) { create(:user) }
+
+ before do
+ shared_with_group.add_guest(user_from_shared_with_group)
+ create(:group_group_link, :developer, shared_group: project.group, shared_with_group: shared_with_group)
+ end
+
+ it 'includes the user from the group share with the right access level' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user_from_shared_with_group.id,
+ 'access_level' => Gitlab::Access::GUEST
+ )
+ )
+ )
+ end
+
+ context 'when the project also has the same user as a member, but with a different access level' do
+ before do
+ project.add_maintainer(user_from_shared_with_group)
+ end
+
+ it 'includes the maximum access level among project and group membership' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user_from_shared_with_group.id,
+ 'access_level' => Gitlab::Access::MAINTAINER
+ )
+ )
+ )
+ end
+ end
+
+ context "when the project's ancestor also has the same user as a member, but with a different access level" do
+ before do
+ project.group.add_maintainer(user_from_shared_with_group)
+ end
+
+ it 'includes the maximum access level among project and group membership' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user_from_shared_with_group.id,
+ 'access_level' => Gitlab::Access::MAINTAINER
+ )
+ )
+ )
+ end
+ end
+ end
+ end
+
+ context 'for a project that is shared with other group(s)' do
+ let_it_be(:shared_with_group) { create(:group) }
+ let_it_be(:user_from_shared_with_group) { create(:user) }
+
+ before do
+ create(:project_group_link, :developer, project: project, group: shared_with_group)
+ shared_with_group.add_maintainer(user_from_shared_with_group)
+ end
+
+ it 'includes the least among the specified access levels' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user_from_shared_with_group.id,
+ 'access_level' => Gitlab::Access::DEVELOPER
+ )
+ )
+ )
+ end
+
+ context 'when the group containing the project has forbidden group shares for any of its projects' do
+ let_it_be(:project) { create(:project, group: create(:group)) }
+
+ before do
+ project.namespace.update!(share_with_group_lock: true)
+ end
+
+ it 'does not include the users from any group shares' do
+ expect(subject).not_to(
+ include(
+ hash_including(
+ 'user_id' => user_from_shared_with_group.id
+ )
+ )
+ )
+ end
+ end
+ end
+
+ context 'a combination of all possible avenues of membership' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:shared_with_group) { create(:group) }
+
+ before do
+ create(:project_group_link, :maintainer, project: project, group: shared_with_group)
+ create(:group_group_link, :reporter, shared_group: project.group, shared_with_group: shared_with_group)
+
+ shared_with_group.add_maintainer(user)
+ group.add_guest(user)
+ project.add_developer(user)
+ end
+
+ it 'includes the highest access level from all avenues of memberships' do
+ expect(subject).to(
+ include(
+ hash_including(
+ 'user_id' => user.id,
+ 'access_level' => Gitlab::Access::MAINTAINER # From project_group_link
+ )
+ )
+ )
+ end
+ end
+end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index a178261e899..364e5de4ece 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -139,7 +139,7 @@ RSpec.describe ProjectsFinder do
describe 'filter by tags' do
before do
- public_project.tag_list.add('foo')
+ public_project.tag_list = 'foo'
public_project.save!
end
diff --git a/spec/finders/repositories/branch_names_finder_spec.rb b/spec/finders/repositories/branch_names_finder_spec.rb
index 4d8bfcc0f20..40f5d339832 100644
--- a/spec/finders/repositories/branch_names_finder_spec.rb
+++ b/spec/finders/repositories/branch_names_finder_spec.rb
@@ -5,21 +5,34 @@ require 'spec_helper'
RSpec.describe Repositories::BranchNamesFinder do
let(:project) { create(:project, :repository) }
- let(:branch_names_finder) { described_class.new(project.repository, search: 'conflict-*') }
-
describe '#execute' do
- subject(:execute) { branch_names_finder.execute }
-
- it 'filters branch names' do
- expect(execute).to contain_exactly(
- 'conflict-binary-file',
- 'conflict-resolvable',
- 'conflict-contains-conflict-markers',
- 'conflict-missing-side',
- 'conflict-start',
- 'conflict-non-utf8',
- 'conflict-too-large'
+ it 'returns all filtered branch names' do
+ expect(create_branch_names_finder(0, 100).execute).to contain_exactly(
+ 'snippet/edit-file',
+ 'snippet/multiple-files',
+ 'snippet/no-files',
+ 'snippet/rename-and-edit-file',
+ 'snippet/single-file'
)
end
+
+ it 'returns a limited number of offset filtered branch names' do
+ starting_names = create_branch_names_finder(0, 3).execute
+ offset_names = create_branch_names_finder(3, 2).execute
+
+ expect(starting_names.count).to eq(3)
+ expect(offset_names.count).to eq(2)
+
+ expect(offset_names).not_to include(*starting_names)
+
+ all_names = create_branch_names_finder(0, 100).execute
+ expect(all_names).to contain_exactly(*starting_names, *offset_names)
+ end
+
+ private
+
+ def create_branch_names_finder(offset, limit)
+ described_class.new(project.repository, search: 'snippet/*', offset: offset, limit: limit)
+ end
end
end
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
index 164975fdfb6..b7339288c51 100644
--- a/spec/finders/template_finder_spec.rb
+++ b/spec/finders/template_finder_spec.rb
@@ -21,7 +21,6 @@ RSpec.describe TemplateFinder do
:gitignores | 'Actionscript'
:gitlab_ci_ymls | 'Android'
:metrics_dashboard_ymls | 'Default'
- :gitlab_ci_syntax_ymls | 'Artifacts example'
end
with_them do
@@ -110,7 +109,6 @@ RSpec.describe TemplateFinder do
:gitlab_ci_ymls | described_class
:licenses | ::LicenseTemplateFinder
:metrics_dashboard_ymls | described_class
- :gitlab_ci_syntax_ymls | described_class
:issues | described_class
:merge_requests | described_class
end
@@ -160,7 +158,6 @@ RSpec.describe TemplateFinder do
:gitignores | 'Actionscript'
:gitlab_ci_ymls | 'Android'
:metrics_dashboard_ymls | 'Default'
- :gitlab_ci_syntax_ymls | 'Artifacts example'
end
with_them do
diff --git a/spec/finders/users_with_pending_todos_finder_spec.rb b/spec/finders/users_with_pending_todos_finder_spec.rb
deleted file mode 100644
index 565b65fbefe..00000000000
--- a/spec/finders/users_with_pending_todos_finder_spec.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe UsersWithPendingTodosFinder do
- describe '#execute' do
- it 'returns the users for all pending todos of a target' do
- issue = create(:issue)
- note = create(:note)
- todo = create(:todo, :pending, target: issue)
-
- create(:todo, :pending, target: note)
-
- users = described_class.new(issue).execute
-
- expect(users).to eq([todo.user])
- end
- end
-end
diff --git a/spec/fixtures/api/schemas/entities/dag_job.json b/spec/fixtures/api/schemas/entities/dag_job.json
index 171ac23ca06..1b1e16f6655 100644
--- a/spec/fixtures/api/schemas/entities/dag_job.json
+++ b/spec/fixtures/api/schemas/entities/dag_job.json
@@ -3,7 +3,7 @@
"required": ["name", "scheduling_type"],
"properties": {
"name": { "type": "string" },
- "scheduling_type": { "type": ["string", null] },
+ "scheduling_type": { "type": ["string", "null"] },
"needs": { "type": "array" }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
index 1a5b8150ed2..efc31a4f833 100644
--- a/spec/fixtures/api/schemas/entities/discussion.json
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -29,7 +29,7 @@
"web_url": { "type": "uri" },
"status_tooltip_html": { "type": ["string", "null"] },
"path": { "type": "string" }
- },
+ },
"required": [
"id",
"state",
@@ -39,8 +39,8 @@
"username"
]
},
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"system": { "type": "boolean" },
"noteable_id": { "type": "integer" },
"noteable_iid": { "type": ["integer", "null"] },
@@ -48,7 +48,7 @@
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
- "resolved_at": { "type": ["date", "null"] },
+ "resolved_at": { "type": ["string", "null"], "format": "date-time" },
"note": { "type": "string" },
"note_html": { "type": "string" },
"current_user": { "type": "object" },
diff --git a/spec/fixtures/api/schemas/entities/downloadable_artifact.json b/spec/fixtures/api/schemas/entities/downloadable_artifact.json
new file mode 100644
index 00000000000..01f355f8b55
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/downloadable_artifact.json
@@ -0,0 +1,19 @@
+{
+ "type": "object",
+ "required": ["artifacts"],
+ "properties": {
+ "artifacts": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "expire_at": { "type": ["string", "null"], "format": "date-time" },
+ "expired": { "type": "boolean" },
+ "path": { "type": "string" }
+ }
+ }
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/entities/github/commit.json b/spec/fixtures/api/schemas/entities/github/commit.json
index 698d933be07..a8fa11e5317 100644
--- a/spec/fixtures/api/schemas/entities/github/commit.json
+++ b/spec/fixtures/api/schemas/entities/github/commit.json
@@ -37,7 +37,7 @@
"properties" : {
"name": { "type": "string" },
"email": { "type": "string" },
- "date": { "type": "date" },
+ "date": { "type": "string", "format": "date-time" },
"type": { "type": "string" }
},
"additionalProperties": false
@@ -48,7 +48,7 @@
"properties" : {
"name": { "type": "string" },
"email": { "type": "string" },
- "date": { "type": "date" },
+ "date": { "type": "string", "format": "date-time" },
"type": { "type": "string" }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/entities/github/pull_request.json b/spec/fixtures/api/schemas/entities/github/pull_request.json
index 6c24879b800..beefc1e3fb7 100644
--- a/spec/fixtures/api/schemas/entities/github/pull_request.json
+++ b/spec/fixtures/api/schemas/entities/github/pull_request.json
@@ -28,21 +28,9 @@
"merged": {
"type": "boolean"
},
- "merged_at": {
- "type": [
- "date",
- "null"
- ]
- },
- "closed_at": {
- "type": [
- "date",
- "null"
- ]
- },
- "updated_at": {
- "type": "date"
- },
+ "merged_at": { "type": [ "string", "null" ], "format": "date-time" },
+ "closed_at": { "type": [ "string", "null" ], "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"assignee": {
"$ref": "user.json"
},
diff --git a/spec/fixtures/api/schemas/entities/issue.json b/spec/fixtures/api/schemas/entities/issue.json
index 9898819ef75..31743b58d98 100644
--- a/spec/fixtures/api/schemas/entities/issue.json
+++ b/spec/fixtures/api/schemas/entities/issue.json
@@ -24,7 +24,7 @@
"created_at": { "type": "date-time" },
"updated_at": { "type": "date-time" },
"branch_name": { "type": ["string", "null"] },
- "due_date": { "type": "date" },
+ "due_date": { "type": ["string", "null"], "format": "date-time" },
"confidential": { "type": "boolean" },
"discussion_locked": { "type": ["boolean", "null"] },
"updated_by_id": { "type": ["integer", "null"] },
diff --git a/spec/fixtures/api/schemas/entities/issue_board.json b/spec/fixtures/api/schemas/entities/issue_board.json
index d7e3c45b13b..56298cb124d 100644
--- a/spec/fixtures/api/schemas/entities/issue_board.json
+++ b/spec/fixtures/api/schemas/entities/issue_board.json
@@ -6,7 +6,7 @@
"title": { "type": "string" },
"confidential": { "type": "boolean" },
"closed": { "type": "boolean" },
- "due_date": { "type": "date" },
+ "due_date": { "type": ["string", "null"] },
"project_id": { "type": "integer" },
"relative_position": { "type": ["integer", "null"] },
"time_estimate": { "type": "integer" },
diff --git a/spec/fixtures/api/schemas/entities/issue_sidebar.json b/spec/fixtures/api/schemas/entities/issue_sidebar.json
index 717eb4992ea..1ab0a1144ab 100644
--- a/spec/fixtures/api/schemas/entities/issue_sidebar.json
+++ b/spec/fixtures/api/schemas/entities/issue_sidebar.json
@@ -6,7 +6,7 @@
"author_id": { "type": "integer" },
"project_id": { "type": "integer" },
"discussion_locked": { "type": ["boolean", "null"] },
- "due_date": { "type": "date" },
+ "due_date": { "type": ["string", "null"], "format": "date-time" },
"confidential": { "type": "boolean" },
"reference": { "type": "string" },
"current_user": {
diff --git a/spec/fixtures/api/schemas/entities/lint_job_entity.json b/spec/fixtures/api/schemas/entities/lint_job_entity.json
index b85f58d4291..77fed2258b5 100644
--- a/spec/fixtures/api/schemas/entities/lint_job_entity.json
+++ b/spec/fixtures/api/schemas/entities/lint_job_entity.json
@@ -39,18 +39,18 @@
"type": ["boolean"]
},
"environment": {
- "type": ["string", null]
+ "type": ["string", "null"]
},
"tag_list": {
"type": ["array"],
"items": { "type": "string" }
},
"only": {
- "type": ["array", "object", null],
+ "type": ["array", "object", "null"],
"items": { "type": ["string", "array"]}
},
"except": {
- "type": ["array", "object", null],
+ "type": ["array", "object", "null"],
"items": { "type": ["string", "array"]}
}
},
diff --git a/spec/fixtures/api/schemas/entities/lint_result_entity.json b/spec/fixtures/api/schemas/entities/lint_result_entity.json
index 502e1dac1ac..1564d4c0a17 100644
--- a/spec/fixtures/api/schemas/entities/lint_result_entity.json
+++ b/spec/fixtures/api/schemas/entities/lint_result_entity.json
@@ -14,7 +14,7 @@
"type": "boolean"
},
"jobs": {
- "type": ["array", null],
+ "type": ["array", "null"],
"items": {
"type": "object",
"$ref": "lint_job_entity.json"
diff --git a/spec/fixtures/api/schemas/evidences/issue.json b/spec/fixtures/api/schemas/evidences/issue.json
index 628c4c89312..71d11443154 100644
--- a/spec/fixtures/api/schemas/evidences/issue.json
+++ b/spec/fixtures/api/schemas/evidences/issue.json
@@ -17,8 +17,8 @@
"state": { "type": "string" },
"iid": { "type": "integer" },
"confidential": { "type": "boolean" },
- "created_at": { "type": "date" },
- "due_date": { "type": ["date", "null"] }
+ "created_at": { "type": "string", "format": "date-time" },
+ "due_date": { "type": ["string", "null"], "format": "date-time" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/evidences/milestone.json b/spec/fixtures/api/schemas/evidences/milestone.json
index ab27fdecde2..7fb54843d07 100644
--- a/spec/fixtures/api/schemas/evidences/milestone.json
+++ b/spec/fixtures/api/schemas/evidences/milestone.json
@@ -16,8 +16,8 @@
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"iid": { "type": "integer" },
- "created_at": { "type": "date" },
- "due_date": { "type": ["date", "null"] },
+ "created_at": { "type": "string" },
+ "due_date": { "type": ["string", "null"] },
"issues": {
"type": "array",
"items": { "$ref": "issue.json" }
diff --git a/spec/fixtures/api/schemas/evidences/project.json b/spec/fixtures/api/schemas/evidences/project.json
index 3a094bd276f..3e9fff723c1 100644
--- a/spec/fixtures/api/schemas/evidences/project.json
+++ b/spec/fixtures/api/schemas/evidences/project.json
@@ -10,7 +10,7 @@
"id": { "type": "integer" },
"name": { "type": "string" },
"description": { "type": ["string", "null"] },
- "created_at": { "type": "date" }
+ "created_at": { "type": "string", "format": "date-time" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/evidences/release.json b/spec/fixtures/api/schemas/evidences/release.json
index 529abfe490e..eef408e34e1 100644
--- a/spec/fixtures/api/schemas/evidences/release.json
+++ b/spec/fixtures/api/schemas/evidences/release.json
@@ -14,7 +14,7 @@
"tag_name": { "type": "string" },
"name": { "type": ["string", "null"] },
"description": { "type": "string" },
- "created_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
"project": { "$ref": "project.json" },
"milestones": {
"type": "array",
diff --git a/spec/fixtures/api/schemas/feature_flag.json b/spec/fixtures/api/schemas/feature_flag.json
index 5f8cedc1132..45b704e4b84 100644
--- a/spec/fixtures/api/schemas/feature_flag.json
+++ b/spec/fixtures/api/schemas/feature_flag.json
@@ -8,8 +8,8 @@
"id": { "type": "integer" },
"iid": { "type": ["integer", "null"] },
"version": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"name": { "type": "string" },
"active": { "type": "boolean" },
"description": { "type": ["string", "null"] },
diff --git a/spec/fixtures/api/schemas/feature_flag_scope.json b/spec/fixtures/api/schemas/feature_flag_scope.json
index 07c5eed532a..8e8fb0b3d40 100644
--- a/spec/fixtures/api/schemas/feature_flag_scope.json
+++ b/spec/fixtures/api/schemas/feature_flag_scope.json
@@ -10,8 +10,8 @@
"environment_scope": { "type": "string" },
"active": { "type": "boolean" },
"percentage": { "type": ["integer", "null"] },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string" },
+ "updated_at": { "type": "string" },
"strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json
index 31bb861ced5..acfef595b08 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json
@@ -8,29 +8,28 @@
"packageUsername",
"packageChannel",
"recipe",
- "recipePath",
- "packageName"
+ "recipePath"
],
"properties": {
"id": {
"type": "string"
},
- "created_at": {
+ "createdAt": {
"type": "string"
},
- "updated_at": {
+ "updatedAt": {
"type": "string"
},
- "package_username": {
+ "packageUsername": {
"type": "string"
},
- "package_channel": {
+ "packageChannel": {
"type": "string"
},
"recipe": {
"type": "string"
},
- "recipe_path": {
+ "recipePath": {
"type": "string"
}
}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
index 87b173eefc7..ca08e005e9d 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_details.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -12,7 +12,8 @@
"tags",
"pipelines",
"versions",
- "metadata"
+ "metadata",
+ "status"
],
"properties": {
"id": {
@@ -80,6 +81,8 @@
"anyOf": [
{ "$ref": "./package_composer_metadata.json" },
{ "$ref": "./package_conan_metadata.json" },
+ { "$ref": "./package_maven_metadata.json" },
+ { "$ref": "./package_nuget_metadata.json" },
{ "type": "null" }
]
},
@@ -91,6 +94,10 @@
"edges": { "type": "array" },
"nodes": { "type": "array" }
}
+ },
+ "status": {
+ "type": ["string"],
+ "enum": ["DEFAULT", "HIDDEN", "PROCESSING", "ERROR"]
}
}
}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_maven_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_maven_metadata.json
new file mode 100644
index 00000000000..64d482b2551
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_maven_metadata.json
@@ -0,0 +1,28 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["id", "createdAt", "updatedAt", "path", "appGroup", "appName"],
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "createdAt": {
+ "type": "string"
+ },
+ "updatedAt": {
+ "type": "string"
+ },
+ "path": {
+ "type": "string"
+ },
+ "appGroup": {
+ "type": "string"
+ },
+ "appVersion": {
+ "type": ["string", "null"]
+ },
+ "appName": {
+ "type": "string"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_nuget_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_nuget_metadata.json
new file mode 100644
index 00000000000..131b9e97287
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_nuget_metadata.json
@@ -0,0 +1,19 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["id", "licenseUrl", "projectUrl", "iconUrl"],
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "licenseUrl": {
+ "type": "string"
+ },
+ "projectUrl": {
+ "type": "string"
+ },
+ "iconUrl": {
+ "type": "string"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/issue.json b/spec/fixtures/api/schemas/issue.json
index 84c39ad65e9..aefba89d9e2 100644
--- a/spec/fixtures/api/schemas/issue.json
+++ b/spec/fixtures/api/schemas/issue.json
@@ -11,9 +11,10 @@
"project_id": { "type": ["integer", "null"] },
"title": { "type": "string" },
"confidential": { "type": "boolean" },
- "due_date": { "type": ["date", "null"] },
+ "due_date": { "type": ["string", "null"] },
"relative_position": { "type": ["integer", "null"] },
"time_estimate": { "type": "integer" },
+ "type": { "type": "string", "enum": ["ISSUE", "INCIDENT", "TEST_CASE", "REQUIREMENT"] },
"issue_sidebar_endpoint": { "type": "string" },
"toggle_subscription_endpoint": { "type": "string" },
"assignable_labels_endpoint": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/pipeline_schedule.json b/spec/fixtures/api/schemas/pipeline_schedule.json
index d01801a15fa..8a175ba081f 100644
--- a/spec/fixtures/api/schemas/pipeline_schedule.json
+++ b/spec/fixtures/api/schemas/pipeline_schedule.json
@@ -6,17 +6,21 @@
"ref": { "type": "string" },
"cron": { "type": "string" },
"cron_timezone": { "type": "string" },
- "next_run_at": { "type": "date" },
+ "next_run_at": { "type": "string" },
"active": { "type": "boolean" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "last_pipeline": {
+ "created_at": { "type": ["string", "null"], "format": "date-time" },
+ "updated_at": { "type": ["string", "null"], "format": "date-time" },
+ "last_pipeline": {
"type": ["object", "null"],
"properties": {
"id": { "type": "integer" },
+ "project_id": { "type": "integer" },
"sha": { "type": "string" },
"ref": { "type": "string" },
- "status": { "type": "string" }
+ "status": { "type": "string" },
+ "web_url": { "type": ["string", "null"] },
+ "created_at": { "type": ["string", "null"], "format": "date-time" },
+ "updated_at": { "type": ["string", "null"], "format": "date-time" }
},
"additionalProperties": false
},
@@ -40,7 +44,7 @@
}
},
"required": [
- "id", "description", "ref", "cron", "cron_timezone", "next_run_at",
+ "id", "description", "ref", "cron", "cron_timezone", "next_run_at",
"active", "created_at", "updated_at", "owner"
],
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/public_api/v4/board.json b/spec/fixtures/api/schemas/public_api/v4/board.json
index 89a21c29969..c3a140c1bd7 100644
--- a/spec/fixtures/api/schemas/public_api/v4/board.json
+++ b/spec/fixtures/api/schemas/public_api/v4/board.json
@@ -30,6 +30,7 @@
"properties": {
"id": { "type": "integer" },
"avatar_url": { "type": ["string", "null"] },
+ "readme_url": { "type": ["string", "null"] },
"description": { "type": ["string", "null"] },
"default_branch": { "type": ["string", "null"] },
"tag_list": { "type": "array" },
@@ -42,8 +43,9 @@
"path_with_namespace": { "type": "string" },
"star_count": { "type": "integer" },
"forks_count": { "type": "integer" },
- "created_at": { "type": "date" },
- "last_activity_at": { "type": "date" }
+ "created_at": { "type": "string", "format": "date-time" },
+ "namespace": {"type": "object" },
+ "last_activity_at": { "type": "string", "format": "date-time" }
},
"additionalProperties": false
},
@@ -70,7 +72,7 @@
"id": { "type": "integer" },
"color": {
"type": "string",
- "pattern": "^#[0-9A-Fa-f]{3}{1,2}+$"
+ "pattern": "#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})"
},
"description": { "type": ["string", "null"] },
"name": { "type": "string" }
diff --git a/spec/fixtures/api/schemas/public_api/v4/commit/basic.json b/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
index da99e99c692..227b5a20af3 100644
--- a/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/commit/basic.json
@@ -19,7 +19,7 @@
"id": { "type": ["string", "null"] },
"short_id": { "type": ["string", "null"] },
"title": { "type": "string" },
- "created_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
"parent_ids": {
"type": ["array", "null"],
"items": {
@@ -30,10 +30,10 @@
"message": { "type": "string" },
"author_name": { "type": "string" },
"author_email": { "type": "string" },
- "authored_date": { "type": "date" },
+ "authored_date": { "type": "string", "format": "date-time" },
"committer_name": { "type": "string" },
"committer_email": { "type": "string" },
- "committed_date": { "type": "date" },
+ "committed_date": { "type": "string", "format": "date-time" },
"web_url": { "type": "string" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/commit_note.json b/spec/fixtures/api/schemas/public_api/v4/commit_note.json
index 02081989271..305febea75c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/commit_note.json
+++ b/spec/fixtures/api/schemas/public_api/v4/commit_note.json
@@ -14,6 +14,6 @@
"line": { "type": ["integer", "null"] },
"line_type": { "type": ["string", "null"] },
"author": { "$ref": "user/basic.json" },
- "created_at": { "type": "date" }
+ "created_at": { "type": "string", "format": "date-time" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/deploy_token.json b/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
index 7cb9f136b0d..c4d3f944aea 100644
--- a/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
+++ b/spec/fixtures/api/schemas/public_api/v4/deploy_token.json
@@ -17,9 +17,7 @@
"username": {
"type": "string"
},
- "expires_at": {
- "type": "date"
- },
+ "expires_at": { "type": "string" },
"scopes": {
"type": "array",
"items": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/environment.json b/spec/fixtures/api/schemas/public_api/v4/environment.json
index 57352017f03..b90bfe8de55 100644
--- a/spec/fixtures/api/schemas/public_api/v4/environment.json
+++ b/spec/fixtures/api/schemas/public_api/v4/environment.json
@@ -18,7 +18,10 @@
{ "$ref": "deployment.json" }
]
},
- "state": { "type": "string" }
+ "state": { "type": "string" },
+ "enable_advanced_logs_querying": { "type": "boolean" },
+ "logs_api_path": { "type": "string" },
+ "gitlab_managed_apps_logs_path": { "type": "string" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag.json
index 0f304e9ee73..1de19419a30 100644
--- a/spec/fixtures/api/schemas/public_api/v4/feature_flag.json
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag.json
@@ -6,8 +6,8 @@
"description": { "type": ["string", "null"] },
"active": {"type": "boolean" },
"version": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"scopes": { "type": "array", "items": { "$ref": "feature_flag_scope.json" } },
"strategies": { "type": "array", "items": { "$ref": "operations/strategy.json" } }
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json
index a11ae5705cc..64237ee1a6d 100644
--- a/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag_detailed_scopes.json
@@ -13,8 +13,8 @@
"id": { "type": "integer" },
"environment_scope": { "type": "string" },
"active": { "type": "boolean" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json b/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json
index 18402af482e..c8e77e2418c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json
+++ b/spec/fixtures/api/schemas/public_api/v4/feature_flag_scope.json
@@ -9,8 +9,8 @@
"id": { "type": "integer" },
"environment_scope": { "type": "string" },
"active": { "type": "boolean" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"strategies": { "type": "array", "items": { "$ref": "feature_flag_strategy.json" } }
},
"additionalProperties": false
diff --git a/spec/fixtures/api/schemas/public_api/v4/issue.json b/spec/fixtures/api/schemas/public_api/v4/issue.json
index 69ecba8b6f3..3173a8ebfb5 100644
--- a/spec/fixtures/api/schemas/public_api/v4/issue.json
+++ b/spec/fixtures/api/schemas/public_api/v4/issue.json
@@ -8,9 +8,9 @@
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
"discussion_locked": { "type": ["boolean", "null"] },
- "closed_at": { "type": "date" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "closed_at": { "type": ["string", "null"] },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"labels": {
"type": "array",
"items": {
@@ -27,10 +27,10 @@
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "due_date": { "type": "date" },
- "start_date": { "type": "date" }
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
+ "due_date": { "type": "string" , "format": "date-time" },
+ "start_date": { "type": "string", "format": "date-time" }
},
"additionalProperties": false
},
@@ -83,7 +83,7 @@
"user_notes_count": { "type": "integer" },
"upvotes": { "type": "integer" },
"downvotes": { "type": "integer" },
- "due_date": { "type": ["date", "null"] },
+ "due_date": { "type": ["string", "null"] },
"confidential": { "type": "boolean" },
"web_url": { "type": "uri" },
"time_stats": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/issue_link.json b/spec/fixtures/api/schemas/public_api/v4/issue_link.json
index 588d63c2dcf..33184be07c3 100644
--- a/spec/fixtures/api/schemas/public_api/v4/issue_link.json
+++ b/spec/fixtures/api/schemas/public_api/v4/issue_link.json
@@ -15,8 +15,8 @@
"type": "string",
"enum": ["relates_to", "blocks", "is_blocked_by"]
},
- "link_created_at": { "type": "date" },
- "link_updated_at": { "type": "date" }
+ "link_created_at": { "type": "string" },
+ "link_updated_at": { "type": "string" }
},
"required" : [ "source_issue", "target_issue", "link_type" ]
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/job.json b/spec/fixtures/api/schemas/public_api/v4/job.json
index b50479841a9..afed4f23017 100644
--- a/spec/fixtures/api/schemas/public_api/v4/job.json
+++ b/spec/fixtures/api/schemas/public_api/v4/job.json
@@ -12,6 +12,7 @@
"started_at",
"finished_at",
"duration",
+ "queued_duration",
"user",
"commit",
"pipeline",
@@ -34,6 +35,7 @@
"started_at": { "type": ["null", "string"] },
"finished_at": { "type": ["null", "string"] },
"duration": { "type": ["null", "number"] },
+ "queued_duration": { "type": ["null", "number"] },
"user": { "$ref": "user/basic.json" },
"commit": {
"oneOf": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/label_basic.json b/spec/fixtures/api/schemas/public_api/v4/label_basic.json
index a501bc2ec56..a4653c67ed2 100644
--- a/spec/fixtures/api/schemas/public_api/v4/label_basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/label_basic.json
@@ -6,7 +6,8 @@
"color",
"description",
"description_html",
- "text_color"
+ "text_color",
+ "remove_on_close"
],
"properties": {
"id": { "type": "integer" },
@@ -20,7 +21,8 @@
"text_color": {
"type": "string",
"pattern": "^#[0-9A-Fa-f]{3}{1,2}$"
- }
+ },
+ "remove_on_close": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/members.json b/spec/fixtures/api/schemas/public_api/v4/members.json
index 695f00b0040..adfc3c519ca 100644
--- a/spec/fixtures/api/schemas/public_api/v4/members.json
+++ b/spec/fixtures/api/schemas/public_api/v4/members.json
@@ -10,7 +10,7 @@
"avatar_url": { "type": ["string", "null"] },
"web_url": { "type": ["string", "null"] },
"access_level": { "type": "integer" },
- "expires_at": { "type": ["date", "null"] },
+ "expires_at": { "type": ["string", "null"], "format": "date-time" },
"is_using_seat": { "type": "boolean" }
},
"required": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_request.json b/spec/fixtures/api/schemas/public_api/v4/merge_request.json
index 3bf1299a1d8..c31e91cfef8 100644
--- a/spec/fixtures/api/schemas/public_api/v4/merge_request.json
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_request.json
@@ -20,7 +20,7 @@
},
"additionalProperties": false
},
- "merged_at": { "type": ["date", "null"] },
+ "merged_at": { "type": ["string", "null"] },
"closed_by": {
"type": ["object", "null"],
"properties": {
@@ -33,9 +33,9 @@
},
"additionalProperties": false
},
- "closed_at": { "type": ["date", "null"] },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "closed_at": { "type": ["string", "null"], "format": "date-time" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"target_branch": { "type": "string" },
"source_branch": { "type": "string" },
"upvotes": { "type": "integer" },
@@ -88,10 +88,10 @@
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "due_date": { "type": "date" },
- "start_date": { "type": "date" }
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
+ "due_date": { "type": "string", "format": "date-time" },
+ "start_date": { "type": "string", "format": "date-time" }
},
"additionalProperties": false
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json b/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json
index 45507e3e400..f176e5ee261 100644
--- a/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json
+++ b/spec/fixtures/api/schemas/public_api/v4/merge_request_simple.json
@@ -8,8 +8,8 @@
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"web_url": { "type": "uri" }
},
"required": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/milestone.json b/spec/fixtures/api/schemas/public_api/v4/milestone.json
index c8c6a7b6ae1..e7e0e57f02f 100644
--- a/spec/fixtures/api/schemas/public_api/v4/milestone.json
+++ b/spec/fixtures/api/schemas/public_api/v4/milestone.json
@@ -8,10 +8,10 @@
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "start_date": { "type": "date" },
- "due_date": { "type": "date" },
+ "created_at": { "type": "string" },
+ "updated_at": { "type": "string" },
+ "start_date": { "type": ["string", "null"], "format": "date-time" },
+ "due_date": { "type": ["string", "null"], "format": "date-time" },
"expired": { "type": ["boolean", "null"] },
"web_url": { "type": "string" }
},
diff --git a/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json b/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
index f008ed7d55f..d09d1634eb9 100644
--- a/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
+++ b/spec/fixtures/api/schemas/public_api/v4/milestone_with_stats.json
@@ -8,10 +8,10 @@
"title": { "type": "string" },
"description": { "type": ["string", "null"] },
"state": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
- "start_date": { "type": "date" },
- "due_date": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
+ "start_date": { "type": ["string", "null"], "format": "date-time" },
+ "due_date": { "type": ["string", "null"], "format": "date-time" },
"expired": { "type": ["boolean", "null"] },
"web_url": { "type": "string" },
"issue_stats": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/notes.json b/spec/fixtures/api/schemas/public_api/v4/notes.json
index c4510207882..25e8f9cbed6 100644
--- a/spec/fixtures/api/schemas/public_api/v4/notes.json
+++ b/spec/fixtures/api/schemas/public_api/v4/notes.json
@@ -22,8 +22,8 @@
]
},
"commands_changes": { "type": "object", "additionalProperties": true },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"system": { "type": "boolean" },
"noteable_id": { "type": "integer" },
"noteable_iid": { "type": "integer" },
@@ -31,7 +31,7 @@
"resolved": { "type": "boolean" },
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
- "resolved_at": { "type": ["date", "null"] },
+ "resolved_at": { "type": ["string", "null"] },
"confidential": { "type": ["boolean", "null"] }
},
"required": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
index 724df5a437d..0fa59bc3bec 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/nuget/packages_metadata.json
@@ -2,7 +2,6 @@
"type": "object",
"required": ["count", "items"],
"properties": {
- "count": { "const": 0 },
"items": {
"type": "array",
"items": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/package.json b/spec/fixtures/api/schemas/public_api/v4/packages/package.json
index 08909efd10c..607e0df1886 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/package.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/package.json
@@ -4,6 +4,7 @@
"name",
"version",
"package_type",
+ "status",
"_links",
"versions"
],
@@ -20,6 +21,9 @@
"package_type": {
"type": "string"
},
+ "status": {
+ "type": "string"
+ },
"_links": {
"type": "object",
"required": [
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/package_files.json b/spec/fixtures/api/schemas/public_api/v4/packages/package_files.json
index 93b6dcde080..332e42fdbe5 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/package_files.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/package_files.json
@@ -8,6 +8,8 @@
"package_id": { "type": "integer" },
"file_name": { "type": "string" },
"file_sha1": { "type": "string" },
+ "file_sha256": { "type": "string" },
+ "file_md5": { "type": "string" },
"pipelines": {
"items": { "$ref": "../pipeline.json" }
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/module.json b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/module.json
new file mode 100644
index 00000000000..f0e2c8e1765
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/module.json
@@ -0,0 +1,12 @@
+{
+ "type": "object",
+ "required" : ["versions"],
+ "optional" : ["source"],
+ "properties" : {
+ "source": { "type": "string" },
+ "versions": {
+ "minItems": 0,
+ "items": { "$ref": "./version.json" }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/modules.json b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/modules.json
new file mode 100644
index 00000000000..f31a41b17de
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/modules.json
@@ -0,0 +1,4 @@
+{
+ "type": "array",
+ "items": { "$ref": "./module.json" }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/version.json b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/version.json
new file mode 100644
index 00000000000..4fd5595c423
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/version.json
@@ -0,0 +1,38 @@
+{
+ "type": "object",
+ "required": ["version", "submodules", "root"],
+ "properties": {
+ "version": {
+ "type": "string"
+ },
+ "submodules": {
+ "type": "array",
+ "maxItems": 0
+ },
+ "root": {
+ "type": "object",
+ "required": ["dependencies", "providers"],
+ "properties": {
+ "dependencies": {
+ "type": "array",
+ "maxItems": 0
+ },
+ "providers": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ["name", "version"],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "version": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/versions.json b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/versions.json
new file mode 100644
index 00000000000..647e9d11d8b
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/terraform/modules/v1/versions.json
@@ -0,0 +1,9 @@
+{
+ "type": "object",
+ "required" : ["modules"],
+ "properties" : {
+ "modules": {
+ "items": { "$ref": "./module.json" }
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
index 721b8d4641f..66d4be529b1 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/basic.json
@@ -6,7 +6,7 @@
"project_id": { "type": "integer" },
"verified": { "type": "boolean" },
"verification_code": { "type": ["string", "null"] },
- "enabled_until": { "type": ["date", "null"] },
+ "enabled_until": { "type": ["string", "null"], "format": "date-time" },
"auto_ssl_enabled": { "type": "boolean" },
"certificate_expiration": {
"type": "object",
diff --git a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
index 3dd80a6f11b..bbbc610eb27 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pages_domain/detail.json
@@ -5,7 +5,7 @@
"url": { "type": "uri" },
"verified": { "type": "boolean" },
"verification_code": { "type": ["string", "null"] },
- "enabled_until": { "type": ["date", "null"] },
+ "enabled_until": { "type": ["string", "null"] },
"auto_ssl_enabled": { "type": "boolean" },
"certificate": {
"type": "object",
diff --git a/spec/fixtures/api/schemas/public_api/v4/pipeline/detail.json b/spec/fixtures/api/schemas/public_api/v4/pipeline/detail.json
index 63e130d4055..d3cabdde45c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/pipeline/detail.json
+++ b/spec/fixtures/api/schemas/public_api/v4/pipeline/detail.json
@@ -13,11 +13,11 @@
{ "$ref": "../user/basic.json" }
]
},
- "created_at": { "type": ["date", "null"] },
- "updated_at": { "type": ["date", "null"] },
- "started_at": { "type": ["date", "null"] },
- "finished_at": { "type": ["date", "null"] },
- "committed_at": { "type": ["date", "null"] },
+ "created_at": { "type": ["string", "null"], "format": "date-time" },
+ "updated_at": { "type": ["string", "null"], "format": "date-time" },
+ "started_at": { "type": ["string", "null"], "format": "date-time" },
+ "finished_at": { "type": ["string", "null"], "format": "date-time" },
+ "committed_at": { "type": ["string", "null"], "format": "date-time" },
"duration": { "type": ["number", "null"] },
"coverage": { "type": ["string", "null"] },
"detailed_status": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/project.json b/spec/fixtures/api/schemas/public_api/v4/project.json
new file mode 100644
index 00000000000..4a3149f2bdc
--- /dev/null
+++ b/spec/fixtures/api/schemas/public_api/v4/project.json
@@ -0,0 +1,45 @@
+{
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "name_with_namespace": { "type": "string" },
+ "description": { "type": ["string", "null"] },
+ "path": { "type": "string" },
+ "path_with_namespace": { "type": "string" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "default_branch": { "type": ["string", "null"] },
+ "tag_list": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "ssh_url_to_repo": { "type": "string" },
+ "http_url_to_repo": { "type": "string" },
+ "web_url": { "type": "string" },
+ "readme_url": { "type": ["string", "null"] },
+ "avatar_url": { "type": ["string", "null"] },
+ "star_count": { "type": "integer" },
+ "forks_count": { "type": "integer" },
+ "last_activity_at": { "type": "string", "format": "date-time" },
+ "namespace": {
+ "type": "object",
+ "properties" : {
+ "id": { "type": "integer" },
+ "name": { "type": "string" },
+ "path": { "type": "string" },
+ "kind": { "type": "string" },
+ "full_path": { "type": "string" },
+ "parent_id": { "type": ["integer", "null"] }
+ }
+ }
+ },
+ "required": [
+ "id", "name", "name_with_namespace", "description", "path",
+ "path_with_namespace", "created_at", "default_branch", "tag_list",
+ "ssh_url_to_repo", "http_url_to_repo", "web_url", "readme_url", "avatar_url",
+ "star_count", "forks_count", "last_activity_at", "namespace"
+ ],
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/public_api/v4/project/identity.json b/spec/fixtures/api/schemas/public_api/v4/project/identity.json
index e35ab023d44..138c0142d02 100644
--- a/spec/fixtures/api/schemas/public_api/v4/project/identity.json
+++ b/spec/fixtures/api/schemas/public_api/v4/project/identity.json
@@ -16,6 +16,6 @@
"name_with_namespace": { "type": "string" },
"path": { "type": "string" },
"path_with_namespace": { "type": "string" },
- "created_at": { "type": "date" }
+ "created_at": { "type": "string", "format": "date-time" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json b/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json
index 6f8a2ff58e5..cfaa1c28bb7 100644
--- a/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json
+++ b/spec/fixtures/api/schemas/public_api/v4/project_repository_storage_move.json
@@ -10,7 +10,7 @@
],
"properties" : {
"id": { "type": "integer" },
- "created_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
"state": { "type": "string" },
"source_storage_name": { "type": "string" },
"destination_storage_name": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/projects.json b/spec/fixtures/api/schemas/public_api/v4/projects.json
index af5670ebd33..20f26a7f805 100644
--- a/spec/fixtures/api/schemas/public_api/v4/projects.json
+++ b/spec/fixtures/api/schemas/public_api/v4/projects.json
@@ -1,48 +1,4 @@
{
"type": "array",
- "items": {
- "type": "object",
- "properties" : {
- "id": { "type": "integer" },
- "name": { "type": "string" },
- "name_with_namespace": { "type": "string" },
- "description": { "type": ["string", "null"] },
- "path": { "type": "string" },
- "path_with_namespace": { "type": "string" },
- "created_at": { "type": "date" },
- "default_branch": { "type": ["string", "null"] },
- "tag_list": {
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "ssh_url_to_repo": { "type": "string" },
- "http_url_to_repo": { "type": "string" },
- "web_url": { "type": "string" },
- "readme_url": { "type": ["string", "null"] },
- "avatar_url": { "type": ["string", "null"] },
- "star_count": { "type": "integer" },
- "forks_count": { "type": "integer" },
- "last_activity_at": { "type": "date" },
- "namespace": {
- "type": "object",
- "properties" : {
- "id": { "type": "integer" },
- "name": { "type": "string" },
- "path": { "type": "string" },
- "kind": { "type": "string" },
- "full_path": { "type": "string" },
- "parent_id": { "type": ["integer", "null"] }
- }
- }
- },
- "required": [
- "id", "name", "name_with_namespace", "description", "path",
- "path_with_namespace", "created_at", "default_branch", "tag_list",
- "ssh_url_to_repo", "http_url_to_repo", "web_url", "avatar_url",
- "star_count", "last_activity_at", "namespace"
- ],
- "additionalProperties": false
- }
+ "items": { "$ref": "project.json" }
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release.json b/spec/fixtures/api/schemas/public_api/v4/release.json
index 69ac383b7fd..463924147be 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release.json
@@ -6,8 +6,8 @@
"tag_name": { "type": "string" },
"description": { "type": "string" },
"description_html": { "type": "string" },
- "created_at": { "type": "date" },
- "released_at": { "type": "date" },
+ "created_at": { "type": "string" , "format": "date-time"},
+ "released_at": { "type": "string", "format": "date-time" },
"upcoming_release": { "type": "boolean" },
"commit": {
"oneOf": [{ "type": "null" }, { "$ref": "commit/basic.json" }]
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/evidence.json b/spec/fixtures/api/schemas/public_api/v4/release/evidence.json
index fbebac0acaa..2f3dfc40aec 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release/evidence.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release/evidence.json
@@ -8,7 +8,7 @@
"properties" : {
"sha": { "type": "string" },
"filepath": { "type": "string" },
- "collected_at": { "type": "date" }
+ "collected_at": { "type": "string", "format": "date-time" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
index 058b7b4b4ed..465e1193a64 100644
--- a/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
+++ b/spec/fixtures/api/schemas/public_api/v4/release/release_for_guest.json
@@ -5,8 +5,8 @@
"name": { "type": "string" },
"description": { "type": "string" },
"description_html": { "type": "string" },
- "created_at": { "type": "date" },
- "released_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "released_at": { "type": "string", "format": "date-time" },
"upcoming_release": { "type": "boolean" },
"milestones": {
"type": "array",
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
index f51e7e8edc5..b0633a6ff2d 100644
--- a/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
+++ b/spec/fixtures/api/schemas/public_api/v4/snippet_repository_storage_move.json
@@ -10,7 +10,7 @@
],
"properties" : {
"id": { "type": "integer" },
- "created_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
"state": { "type": "string" },
"source_storage_name": { "type": "string" },
"destination_storage_name": { "type": "string" },
diff --git a/spec/fixtures/api/schemas/public_api/v4/snippets.json b/spec/fixtures/api/schemas/public_api/v4/snippets.json
index de658e01657..65299901128 100644
--- a/spec/fixtures/api/schemas/public_api/v4/snippets.json
+++ b/spec/fixtures/api/schemas/public_api/v4/snippets.json
@@ -21,8 +21,8 @@
"visibility": { "type": "string" },
"web_url": { "type": "string" },
"raw_url": { "type": "string" },
- "created_at": { "type": "date" },
- "updated_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
+ "updated_at": { "type": "string", "format": "date-time" },
"author": {
"type": "object",
"properties": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/public.json b/spec/fixtures/api/schemas/public_api/v4/user/public.json
index ee848eda9ed..0955c70aef0 100644
--- a/spec/fixtures/api/schemas/public_api/v4/user/public.json
+++ b/spec/fixtures/api/schemas/public_api/v4/user/public.json
@@ -41,7 +41,7 @@
},
"avatar_url": { "type": "string" },
"web_url": { "type": "string" },
- "created_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
"bio": { "type": ["string", "null"] },
"location": { "type": ["string", "null"] },
"skype": { "type": "string" },
@@ -49,11 +49,11 @@
"twitter": { "type": "string "},
"website_url": { "type": "string" },
"organization": { "type": ["string", "null"] },
- "last_sign_in_at": { "type": "date" },
- "confirmed_at": { "type": ["date", "null"] },
+ "last_sign_in_at": { "type": ["string", "null"], "format": "date-time" },
+ "confirmed_at": { "type": ["string", "null"] },
"color_scheme_id": { "type": "integer" },
"projects_limit": { "type": "integer" },
- "current_sign_in_at": { "type": "date" },
+ "current_sign_in_at": { "type": ["string", "null"], "format": "date-time" },
"identities": {
"type": "array",
"items": {
diff --git a/spec/fixtures/api/schemas/registry/tag.json b/spec/fixtures/api/schemas/registry/tag.json
index 48f8402b65b..3667f42136d 100644
--- a/spec/fixtures/api/schemas/registry/tag.json
+++ b/spec/fixtures/api/schemas/registry/tag.json
@@ -29,9 +29,7 @@
"total_size": {
"type": "integer"
},
- "created_at": {
- "type": "date"
- },
+ "created_at": { "type": "string", "format": "date-time" },
"destroy_path": {
"type": "string"
}
diff --git a/spec/fixtures/api/schemas/release.json b/spec/fixtures/api/schemas/release.json
index b0296e5e62d..fe4f8cd2157 100644
--- a/spec/fixtures/api/schemas/release.json
+++ b/spec/fixtures/api/schemas/release.json
@@ -7,7 +7,7 @@
"ref": { "type": "string "},
"description": { "type": "string" },
"description_html": { "type": "string" },
- "created_at": { "type": "date" },
+ "created_at": { "type": "string", "format": "date-time" },
"commit": {
"oneOf": [{ "type": "null" }, { "$ref": "public_api/v4/commit/basic.json" }]
},
diff --git a/spec/fixtures/bulk_imports/labels.ndjson.gz b/spec/fixtures/bulk_imports/labels.ndjson.gz
new file mode 100644
index 00000000000..6bb10a53346
--- /dev/null
+++ b/spec/fixtures/bulk_imports/labels.ndjson.gz
Binary files differ
diff --git a/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml
new file mode 100644
index 00000000000..704e94a04d8
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event.yml
@@ -0,0 +1,25 @@
+---
+description:
+category: Groups::EmailCampaignsController
+action: click
+label_description:
+property_description:
+value_description:
+extra_properties:
+identifiers:
+#- project
+#- user
+#- namespace
+product_section:
+product_stage:
+product_group:
+product_category:
+milestone: "13.11"
+introduced_by_url:
+distributions:
+- ce
+- ee
+tiers:
+- free
+- premium
+- ultimate
diff --git a/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml
new file mode 100644
index 00000000000..b20bb9702d2
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/snowplow_event_definition_generator/sample_event_ee.yml
@@ -0,0 +1,23 @@
+---
+description:
+category: Groups::EmailCampaignsController
+action: click
+label_description:
+property_description:
+value_description:
+extra_properties:
+identifiers:
+#- project
+#- user
+#- namespace
+product_section:
+product_stage:
+product_group:
+product_category:
+milestone: "13.11"
+introduced_by_url:
+distributions:
+- ee
+tiers:
+#- premium
+- ultimate
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
new file mode 100644
index 00000000000..c51b5bf6e01
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_ee.yml
@@ -0,0 +1,19 @@
+---
+key_path: counts_weekly.test_metric
+name: test metric name
+description:
+product_section:
+product_stage:
+product_group:
+product_category:
+value_type: number
+status: implemented
+milestone: "13.9"
+introduced_by_url:
+time_frame: 7d
+data_source:
+distribution:
+- ee
+tier:
+#- premium
+- ultimate
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
index 47fc1d7e376..c1ed9783308 100644
--- a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
@@ -15,8 +15,8 @@ time_frame: 7d
data_source:
distribution:
- ce
-# Add here corresponding tiers
-# tier:
-# - free
-# - premium
-# - ultimate
+- ee
+tier:
+- free
+- premium
+- ultimate
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json
index ed8fa58393f..7d98179789f 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [],
"properties": {
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_full_syntax.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_full_syntax.json
index e251e59de29..c40befcf8ce 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_full_syntax.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_full_syntax.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"type", "options"
@@ -6,7 +7,7 @@
"properties": {
"type": { "enum": ["custom"] },
"label": { "type": "string" },
- "options": { "$ref": "custom_variable_options.json" }
+ "options": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_options.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_options.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_options.json
index f351d3ba340..de72b947eed 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_options.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_options.json
@@ -1,10 +1,11 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": ["values"],
"properties": {
"values": {
"type": "array",
- "items": { "$ref": "custom_variable_values.json" }
+ "items": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_values.json" }
}
},
"additionalProperties": false
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_values.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_values.json
index 430d66a9691..f3b801fa979 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_values.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_values.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": ["value"],
"properties": {
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/dashboard.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/dashboard.json
index 259c41bf091..40453c61a65 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/dashboard.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/dashboard.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"dashboard",
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json
index 7d2b409a0f6..b47b81fc103 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_dashboard.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": ["panel_groups"],
"properties": {
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_panel_groups.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_panel_groups.json
index bf05c054e2f..063016c22fd 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_panel_groups.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/embedded_panel_groups.json
@@ -1,10 +1,11 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": ["panels"],
"properties": {
"panels": {
"type": "array",
- "items": { "$ref": "panels.json" }
+ "items": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json" }
}
},
"additionalProperties": false
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/links.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/links.json
index be180114052..ca67cfa4b0e 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/links.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/links.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "array",
"required": ["url"],
"properties": {
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_full_syntax.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_full_syntax.json
index 6eb2c0e51e2..a74b557dabe 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_full_syntax.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_full_syntax.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"type", "options"
@@ -6,7 +7,7 @@
"properties": {
"type": { "enum": "metric_label_values" },
"label": { "type": "string" },
- "options": { "$ref": "metric_label_values_variable_options.json" }
+ "options": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_options.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_options.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_options.json
index 304372ed876..5662cc625a3 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_options.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_options.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"series_selector", "label", "prometheus_endpoint_path"
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
index b23b0ea15d2..8ee207b7ebf 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"label",
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
index 71c0981d9ec..392aa0e4480 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"group",
@@ -8,7 +9,7 @@
"group": { "type": "string" },
"panels": {
"type": "array",
- "items": { "$ref": "panels.json" }
+ "items": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json" }
},
"has_custom_metrics": { "type": "boolean" }
},
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
index b4809a85101..3224e7cfe3f 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"title",
@@ -10,11 +11,11 @@
"id": { "type": "string" },
"type": { "type": "string" },
"y_label": { "type": "string" },
- "y_axis": { "$ref": "axis.json" },
+ "y_axis": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/axis.json" },
"max_value": { "type": "number" },
"metrics": {
"type": "array",
- "items": { "$ref": "metrics.json" }
+ "items": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json" }
}
},
"additionalProperties": false
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/templating.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/templating.json
index c82d2fcb02c..439f7b6b044 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/templating.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/templating.json
@@ -1,8 +1,9 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": ["variables"],
"properties": {
- "variables": { "$ref": "variables.json" }
+ "variables": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/variables.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_full_syntax.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_full_syntax.json
index 1818b2775f0..c4382326854 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_full_syntax.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_full_syntax.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": [
"type", "options"
@@ -6,7 +7,7 @@
"properties": {
"type": { "enum": ["text"] },
"label": { "type": "string" },
- "options": { "$ref": "text_variable_options.json" }
+ "options": { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_options.json" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_options.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_options.json
index ccb2e168fd1..ee998e46a7e 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_options.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_options.json
@@ -1,4 +1,5 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"default_value": { "type": "string" }
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/variables.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/variables.json
index aec129111e0..1cf5ae2eaa4 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/variables.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/variables.json
@@ -1,16 +1,17 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9_]*$": {
"anyOf": [
- { "$ref": "text_variable_full_syntax.json" },
+ { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/text_variable_full_syntax.json" },
{ "type": "string" },
{
"type": "array",
"items": { "type": "string" }
},
- { "$ref": "custom_variable_full_syntax.json" },
- { "$ref": "metric_label_values_variable_full_syntax.json" }
+ { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/custom_variable_full_syntax.json" },
+ { "$ref": "spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metric_label_values_variable_full_syntax.json" }
]
}
},
diff --git a/spec/fixtures/packages/helm/rook-ceph-v1.5.8.tgz b/spec/fixtures/packages/helm/rook-ceph-v1.5.8.tgz
new file mode 100644
index 00000000000..85e1c2d1ca4
--- /dev/null
+++ b/spec/fixtures/packages/helm/rook-ceph-v1.5.8.tgz
Binary files differ
diff --git a/spec/fixtures/packages/terraform_module/module-system-v1.0.0.tgz b/spec/fixtures/packages/terraform_module/module-system-v1.0.0.tgz
new file mode 100644
index 00000000000..42f1e7d9924
--- /dev/null
+++ b/spec/fixtures/packages/terraform_module/module-system-v1.0.0.tgz
Binary files differ
diff --git a/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json b/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json
index c3ee2bc4cac..5489330fc1d 100644
--- a/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json
+++ b/spec/fixtures/pipeline_artifacts/code_quality_mr_diff.json
@@ -1,23 +1,25 @@
{
- "files": {
- "file_a.rb": [
- {
- "line": 10,
- "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
- "severity": "major"
- },
- {
- "line": 10,
- "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
- "severity": "minor"
- }
- ],
- "file_b.rb": [
- {
- "line": 10,
- "description": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count.",
- "severity": "minor"
- }
- ]
+ "merge_request_123456789": {
+ "files": {
+ "file_a.rb": [
+ {
+ "line": 10,
+ "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
+ "severity": "major"
+ },
+ {
+ "line": 10,
+ "description": "Method `new_array` has 12 arguments (exceeds 4 allowed). Consider refactoring.",
+ "severity": "minor"
+ }
+ ],
+ "file_b.rb": [
+ {
+ "line": 10,
+ "description": "This cop checks for methods with too many parameters.\nThe maximum number of parameters is configurable.\nKeyword arguments can optionally be excluded from the total count.",
+ "severity": "minor"
+ }
+ ]
+ }
}
}
diff --git a/spec/fixtures/product_intelligence/survey_response_schema.json b/spec/fixtures/product_intelligence/survey_response_schema.json
new file mode 100644
index 00000000000..11454116d83
--- /dev/null
+++ b/spec/fixtures/product_intelligence/survey_response_schema.json
@@ -0,0 +1,52 @@
+{
+ "description": "Schema for a Gitlab survey_response event",
+ "self": {
+ "vendor": "com.gitlab",
+ "name": "survey_response",
+ "version": "1-0-0",
+ "format": "jsonschema"
+ },
+ "type": "object",
+ "additionalProperties": false,
+ "required": ["survey_id", "response"],
+ "properties": {
+ "survey_id": {
+ "description": "Survey ID",
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 2147483647
+ },
+ "response": {
+ "description": "Response",
+ "type": "string",
+ "maxLength": 10000
+ },
+ "instance_id": {
+ "description": "Instance ID",
+ "type": ["integer", "null"],
+ "minimum": 0,
+ "maximum": 2147483647
+ },
+ "user_id": {
+ "description": "User ID",
+ "type": ["integer", "null"],
+ "minimum": 0,
+ "maximum": 2147483647
+ },
+ "email": {
+ "description": "Email",
+ "type": ["string", "null"],
+ "maxLength": 255
+ },
+ "name": {
+ "description": "Name",
+ "type": ["string", "null"],
+ "maxLength": 255
+ },
+ "username": {
+ "description": "Username",
+ "type": ["string", "null"],
+ "maxLength": 255
+ }
+ }
+}
diff --git a/spec/fixtures/security_reports/master/gl-sast-report-minimal.json b/spec/fixtures/security_reports/master/gl-sast-report-minimal.json
new file mode 100644
index 00000000000..60a67453c9b
--- /dev/null
+++ b/spec/fixtures/security_reports/master/gl-sast-report-minimal.json
@@ -0,0 +1,68 @@
+{
+ "version": "14.0.0",
+ "vulnerabilities": [
+ {
+ "category": "sast",
+ "name": "Cipher with no integrity",
+ "message": "Cipher with no integrity",
+ "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:CIPHER_INTEGRITY",
+ "severity": "Medium",
+ "confidence": "High",
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
+ },
+ "location": {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
+ },
+ "identifiers": [
+ {
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-CIPHER_INTEGRITY",
+ "value": "CIPHER_INTEGRITY",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY"
+ }
+ ],
+ "tracking": {
+ "type": "source",
+ "items": [
+ {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 29,
+ "end_line": 29,
+ "signatures": [
+ {
+ "algorithm": "hash",
+ "value": "HASHVALUE"
+ },
+ {
+ "algorithm": "scope_offset",
+ "value": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:App[0]:insecureCypher[0]:2"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ ],
+ "remediations": [],
+ "scan": {
+ "scanner": {
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs",
+ "url": "https://spotbugs.github.io",
+ "vendor": {
+ "name": "GitLab"
+ },
+ "version": "4.0.2"
+ },
+ "type": "sast",
+ "status": "success",
+ "start_time": "placeholder-value",
+ "end_time": "placeholder-value"
+ }
+}
diff --git a/spec/fixtures/security_reports/master/gl-sast-report.json b/spec/fixtures/security_reports/master/gl-sast-report.json
index 9da9fdc3832..3323c1fffe3 100644
--- a/spec/fixtures/security_reports/master/gl-sast-report.json
+++ b/spec/fixtures/security_reports/master/gl-sast-report.json
@@ -154,8 +154,8 @@
"items": [
{
"file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "start_line": 47,
- "end_line": 47,
+ "start_line": 29,
+ "end_line": 29,
"signatures": [
{
"algorithm": "hash",
diff --git a/spec/fixtures/whats_new/20201225_01_04.yml b/spec/fixtures/whats_new/20201225_01_04.yml
new file mode 100644
index 00000000000..0dfd0d780c7
--- /dev/null
+++ b/spec/fixtures/whats_new/20201225_01_04.yml
@@ -0,0 +1,19 @@
+---
+- title: View epics on a board
+ body: |
+ ## View epics on a board
+ self-managed: true
+ gitlab-com: false
+ packages: ["Free", "Premium", "Ultimate"]
+- title: View Jira issue details in GitLab
+ body: |
+ ## View Jira issue details in GitLab
+ self-managed: true
+ gitlab-com: false
+ packages: ["Premium", "Ultimate"]
+- title: Integrate any IT alerting tool with GitLab
+ body: |
+ ## Integrate any IT alerting tool with GitLab
+ self-managed: true
+ gitlab-com: false
+ packages: ["Ultimate"] \ No newline at end of file
diff --git a/spec/frontend/__helpers__/mock_apollo_helper.js b/spec/frontend/__helpers__/mock_apollo_helper.js
index bd97a06071a..520d6c72541 100644
--- a/spec/frontend/__helpers__/mock_apollo_helper.js
+++ b/spec/frontend/__helpers__/mock_apollo_helper.js
@@ -1,5 +1,5 @@
import { InMemoryCache } from 'apollo-cache-inmemory';
-import { createMockClient } from 'mock-apollo-client';
+import { createMockClient as createMockApolloClient } from 'mock-apollo-client';
import VueApollo from 'vue-apollo';
const defaultCacheOptions = {
@@ -7,13 +7,13 @@ const defaultCacheOptions = {
addTypename: false,
};
-export default (handlers = [], resolvers = {}, cacheOptions = {}) => {
+export function createMockClient(handlers = [], resolvers = {}, cacheOptions = {}) {
const cache = new InMemoryCache({
...defaultCacheOptions,
...cacheOptions,
});
- const mockClient = createMockClient({ cache, resolvers });
+ const mockClient = createMockApolloClient({ cache, resolvers });
if (Array.isArray(handlers)) {
handlers.forEach(([query, value]) => mockClient.setRequestHandler(query, value));
@@ -21,7 +21,12 @@ export default (handlers = [], resolvers = {}, cacheOptions = {}) => {
throw new Error('You should pass an array of handlers to mock Apollo client');
}
+ return mockClient;
+}
+
+export default function createMockApollo(handlers, resolvers, cacheOptions) {
+ const mockClient = createMockClient(handlers, resolvers, cacheOptions);
const apolloProvider = new VueApollo({ defaultClient: mockClient });
return apolloProvider;
-};
+}
diff --git a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
index 33c29cea6d8..0b86c10ea46 100644
--- a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
+++ b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
@@ -7,6 +7,7 @@ exports[`~/access_tokens/components/expires_at_field should render datepicker wi
container=""
displayfield="true"
firstday="0"
+ inputlabel="Enter date"
mindate="Mon Jul 06 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
placeholder="YYYY-MM-DD"
theme=""
diff --git a/spec/frontend/actioncable_link_spec.js b/spec/frontend/actioncable_link_spec.js
new file mode 100644
index 00000000000..c785151f8fd
--- /dev/null
+++ b/spec/frontend/actioncable_link_spec.js
@@ -0,0 +1,110 @@
+import { print } from 'graphql';
+import gql from 'graphql-tag';
+import cable from '~/actioncable_consumer';
+import ActionCableLink from '~/actioncable_link';
+
+// Mock uuids module for determinism
+jest.mock('~/lib/utils/uuids', () => ({
+ uuids: () => ['testuuid'],
+}));
+
+const TEST_OPERATION = {
+ query: gql`
+ query foo {
+ project {
+ id
+ }
+ }
+ `,
+ operationName: 'foo',
+ variables: [],
+};
+
+/**
+ * Create an observer that passes calls to the given spy.
+ *
+ * This helps us assert which calls were made in what order.
+ */
+const createSpyObserver = (spy) => ({
+ next: (...args) => spy('next', ...args),
+ error: (...args) => spy('error', ...args),
+ complete: (...args) => spy('complete', ...args),
+});
+
+const notify = (...notifications) => {
+ notifications.forEach((data) => cable.subscriptions.notifyAll('received', data));
+};
+
+const getSubscriptionCount = () => cable.subscriptions.subscriptions.length;
+
+describe('~/actioncable_link', () => {
+ let cableLink;
+
+ beforeEach(() => {
+ jest.spyOn(cable.subscriptions, 'create');
+
+ cableLink = new ActionCableLink();
+ });
+
+ describe('request', () => {
+ let subscription;
+ let spy;
+
+ beforeEach(() => {
+ spy = jest.fn();
+ subscription = cableLink.request(TEST_OPERATION).subscribe(createSpyObserver(spy));
+ });
+
+ afterEach(() => {
+ subscription.unsubscribe();
+ });
+
+ it('creates a subscription', () => {
+ expect(getSubscriptionCount()).toBe(1);
+ expect(cable.subscriptions.create).toHaveBeenCalledWith(
+ {
+ channel: 'GraphqlChannel',
+ nonce: 'testuuid',
+ ...TEST_OPERATION,
+ query: print(TEST_OPERATION.query),
+ },
+ { received: expect.any(Function) },
+ );
+ });
+
+ it('when "unsubscribe", unsubscribes underlying cable subscription', () => {
+ subscription.unsubscribe();
+
+ expect(getSubscriptionCount()).toBe(0);
+ });
+
+ it('when receives data, triggers observer until no ".more"', () => {
+ notify(
+ { result: 'test result', more: true },
+ { result: 'test result 2', more: true },
+ { result: 'test result 3' },
+ { result: 'test result 4' },
+ );
+
+ expect(spy.mock.calls).toEqual([
+ ['next', 'test result'],
+ ['next', 'test result 2'],
+ ['next', 'test result 3'],
+ ['complete'],
+ ]);
+ });
+
+ it('when receives errors, triggers observer', () => {
+ notify(
+ { result: 'test result', more: true },
+ { result: 'test result 2', errors: ['boom!'], more: true },
+ { result: 'test result 3' },
+ );
+
+ expect(spy.mock.calls).toEqual([
+ ['next', 'test result'],
+ ['error', ['boom!']],
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
index d32e582e498..2832de98769 100644
--- a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
+++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
@@ -40,7 +40,7 @@ describe('AddContextCommitsModal', () => {
store,
propsData: {
contextCommitsPath: '',
- targetBranch: 'master',
+ targetBranch: 'main',
mergeRequestIid: 1,
projectId: 1,
...props,
diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
new file mode 100644
index 00000000000..7c20bbe21c8
--- /dev/null
+++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
@@ -0,0 +1,134 @@
+import { GlTable, GlBadge, GlEmptyState, GlLink } from '@gitlab/ui';
+import { GlSingleStat } from '@gitlab/ui/dist/charts';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import DevopsScore from '~/analytics/devops_report/components/devops_score.vue';
+import {
+ devopsScoreMetricsData,
+ devopsReportDocsPath,
+ noDataImagePath,
+ devopsScoreTableHeaders,
+} from '../mock_data';
+
+describe('DevopsScore', () => {
+ let wrapper;
+
+ const createComponent = ({ devopsScoreMetrics = devopsScoreMetricsData } = {}) => {
+ wrapper = extendedWrapper(
+ mount(DevopsScore, {
+ provide: {
+ devopsScoreMetrics,
+ devopsReportDocsPath,
+ noDataImagePath,
+ },
+ }),
+ );
+ };
+
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findCol = (testId) => findTable().find(`[data-testid="${testId}"]`);
+ const findUsageCol = () => findCol('usageCol');
+ const findDevopsScoreApp = () => wrapper.findByTestId('devops-score-app');
+
+ describe('with no data', () => {
+ beforeEach(() => {
+ createComponent({ devopsScoreMetrics: {} });
+ });
+
+ describe('empty state', () => {
+ it('displays the empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('displays the correct message', () => {
+ expect(findEmptyState().text()).toBe(
+ 'Data is still calculating... It may be several days before you see feature usage data. See example DevOps Score page in our documentation.',
+ );
+ });
+
+ it('contains a link to the feature documentation', () => {
+ expect(wrapper.findComponent(GlLink).exists()).toBe(true);
+ });
+ });
+
+ it('does not display the devops score app', () => {
+ expect(findDevopsScoreApp().exists()).toBe(false);
+ });
+ });
+
+ describe('with data', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not display the empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+
+ it('displays the devops score app', () => {
+ expect(findDevopsScoreApp().exists()).toBe(true);
+ });
+
+ describe('devops score app', () => {
+ it('displays the title note', () => {
+ expect(wrapper.findByTestId('devops-score-note-text').text()).toBe(
+ 'DevOps score metrics are based on usage over the last 30 days. Last updated: 2020-06-29 08:16.',
+ );
+ });
+
+ it('displays the single stat section', () => {
+ const component = wrapper.findComponent(GlSingleStat);
+
+ expect(component.exists()).toBe(true);
+ expect(component.props('value')).toBe(devopsScoreMetricsData.averageScore.value);
+ });
+
+ describe('devops score table', () => {
+ it('displays the table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+
+ describe('table headings', () => {
+ let headers;
+
+ beforeEach(() => {
+ headers = findTable().findAll("[data-testid='header']");
+ });
+
+ it('displays the correct number of headings', () => {
+ expect(headers).toHaveLength(devopsScoreTableHeaders.length);
+ });
+
+ describe.each(devopsScoreTableHeaders)('header fields', ({ label, index }) => {
+ let headerWrapper;
+
+ beforeEach(() => {
+ headerWrapper = headers.at(index);
+ });
+
+ it(`displays the correct table heading text for "${label}"`, () => {
+ expect(headerWrapper.text()).toContain(label);
+ });
+ });
+ });
+
+ describe('table columns', () => {
+ describe('Your usage', () => {
+ it('displays the corrrect value', () => {
+ expect(findUsageCol().text()).toContain('3.2');
+ });
+
+ it('displays the corrrect badge', () => {
+ const badge = findUsageCol().find(GlBadge);
+
+ expect(badge.exists()).toBe(true);
+ expect(badge.props('variant')).toBe('muted');
+ expect(badge.text()).toBe('Low');
+ });
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/analytics/devops_score/mock_data.js b/spec/frontend/admin/analytics/devops_score/mock_data.js
new file mode 100644
index 00000000000..ae0c01a2661
--- /dev/null
+++ b/spec/frontend/admin/analytics/devops_score/mock_data.js
@@ -0,0 +1,46 @@
+export const devopsScoreTableHeaders = [
+ {
+ index: 0,
+ label: '',
+ },
+ {
+ index: 1,
+ label: 'Your usage',
+ },
+ {
+ index: 2,
+ label: 'Leader usage',
+ },
+ {
+ index: 3,
+ label: 'Score',
+ },
+];
+
+export const devopsScoreMetricsData = {
+ createdAt: '2020-06-29 08:16',
+ cards: [
+ {
+ title: 'Issues created per active user',
+ usage: '3.2',
+ leadInstance: '10.2',
+ score: '0',
+ scoreLevel: {
+ label: 'Low',
+ variant: 'muted',
+ },
+ },
+ ],
+ averageScore: {
+ value: '10',
+ scoreLevel: {
+ label: 'High',
+ icon: 'check-circle',
+ variant: 'success',
+ },
+ },
+};
+
+export const devopsReportDocsPath = 'docs-path';
+
+export const noDataImagePath = 'image-path';
diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js
index 5e232f34311..5db5b8a90a9 100644
--- a/spec/frontend/admin/users/components/actions/actions_spec.js
+++ b/spec/frontend/admin/users/components/actions/actions_spec.js
@@ -71,6 +71,7 @@ describe('Action components', () => {
});
describe('DELETE_ACTION_COMPONENTS', () => {
+ const oncallSchedules = [{ name: 'schedule1' }, { name: 'schedule2' }];
it.each(DELETE_ACTIONS)('renders a dropdown item for "%s"', async (action) => {
initComponent({
component: Actions[capitalizeFirstCharacter(action)],
@@ -80,6 +81,7 @@ describe('Action components', () => {
delete: '/delete',
block: '/block',
},
+ oncallSchedules,
},
stubs: { SharedDeleteAction },
});
@@ -92,6 +94,9 @@ describe('Action components', () => {
expect(sharedAction.attributes('data-delete-user-url')).toBe('/delete');
expect(sharedAction.attributes('data-gl-modal-action')).toBe(kebabCase(action));
expect(sharedAction.attributes('data-username')).toBe('John Doe');
+ expect(sharedAction.attributes('data-oncall-schedules')).toBe(
+ JSON.stringify(oncallSchedules),
+ );
expect(findDropdownItem().exists()).toBe(true);
});
});
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index 0745d961f25..debe964e7aa 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -1,5 +1,5 @@
import { GlDropdownDivider } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Actions from '~/admin/users/components/actions';
import AdminUserActions from '~/admin/users/components/user_actions.vue';
import { I18N_USER_ACTIONS } from '~/admin/users/constants';
@@ -14,12 +14,14 @@ describe('AdminUserActions component', () => {
const user = users[0];
const userPaths = generateUserPaths(paths, user.username);
- const findEditButton = () => wrapper.find('[data-testid="edit"]');
- const findActionsDropdown = () => wrapper.find('[data-testid="actions"');
- const findDropdownDivider = () => wrapper.find(GlDropdownDivider);
+ const findUserActions = (id) => wrapper.findByTestId(`user-actions-${id}`);
+ const findEditButton = (id = user.id) => findUserActions(id).find('[data-testid="edit"]');
+ const findActionsDropdown = (id = user.id) =>
+ findUserActions(id).find('[data-testid="dropdown-toggle"]');
+ const findDropdownDivider = () => wrapper.findComponent(GlDropdownDivider);
const initComponent = ({ actions = [] } = {}) => {
- wrapper = shallowMount(AdminUserActions, {
+ wrapper = shallowMountExtended(AdminUserActions, {
propsData: {
user: {
...user,
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index 424b0deebd3..708c9e1979e 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -1,16 +1,36 @@
-import { GlTable } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { GlTable, GlSkeletonLoader } from '@gitlab/ui';
+import { createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import AdminUserActions from '~/admin/users/components/user_actions.vue';
import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
import AdminUsersTable from '~/admin/users/components/users_table.vue';
+import getUsersGroupCountsQuery from '~/admin/users/graphql/queries/get_users_group_counts.query.graphql';
+import createFlash from '~/flash';
import AdminUserDate from '~/vue_shared/components/user_date.vue';
-import { users, paths } from '../mock_data';
+import { users, paths, createGroupCountResponse } from '../mock_data';
+
+jest.mock('~/flash');
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
describe('AdminUsersTable component', () => {
let wrapper;
+ const user = users[0];
+ const createFetchGroupCount = (data) =>
+ jest.fn().mockResolvedValue(createGroupCountResponse(data));
+ const fetchGroupCountsLoading = jest.fn().mockResolvedValue(new Promise(() => {}));
+ const fetchGroupCountsError = jest.fn().mockRejectedValue(new Error('Network error'));
+ const fetchGroupCountsResponse = createFetchGroupCount([{ id: user.id, groupCount: 5 }]);
+
+ const findUserGroupCount = (id) => wrapper.findByTestId(`user-group-count-${id}`);
+ const findUserGroupCountLoader = (id) => findUserGroupCount(id).find(GlSkeletonLoader);
const getCellByLabel = (trIdx, label) => {
return wrapper
.find(GlTable)
@@ -20,8 +40,16 @@ describe('AdminUsersTable component', () => {
.find(`[data-label="${label}"][role="cell"]`);
};
- const initComponent = (props = {}) => {
- wrapper = mount(AdminUsersTable, {
+ function createMockApolloProvider(resolverMock) {
+ const requestHandlers = [[getUsersGroupCountsQuery, resolverMock]];
+
+ return createMockApollo(requestHandlers);
+ }
+
+ const initComponent = (props = {}, resolverMock = fetchGroupCountsResponse) => {
+ wrapper = mountExtended(AdminUsersTable, {
+ localVue,
+ apolloProvider: createMockApolloProvider(resolverMock),
propsData: {
users,
paths,
@@ -36,8 +64,6 @@ describe('AdminUsersTable component', () => {
});
describe('when there are users', () => {
- const user = users[0];
-
beforeEach(() => {
initComponent();
});
@@ -69,4 +95,51 @@ describe('AdminUsersTable component', () => {
expect(wrapper.text()).toContain('No users found');
});
});
+
+ describe('group counts', () => {
+ describe('when fetching the data', () => {
+ beforeEach(() => {
+ initComponent({}, fetchGroupCountsLoading);
+ });
+
+ it('renders a loader for each user', () => {
+ expect(findUserGroupCountLoader(user.id).exists()).toBe(true);
+ });
+ });
+
+ describe('when the data has been fetched', () => {
+ beforeEach(() => {
+ initComponent();
+ });
+
+ it("renders the user's group count", () => {
+ expect(findUserGroupCount(user.id).text()).toBe('5');
+ });
+
+ describe("and a user's group count is null", () => {
+ beforeEach(() => {
+ initComponent({}, createFetchGroupCount([{ id: user.id, groupCount: null }]));
+ });
+
+ it("renders the user's group count as 0", () => {
+ expect(findUserGroupCount(user.id).text()).toBe('0');
+ });
+ });
+ });
+
+ describe('when there is an error while fetching the data', () => {
+ beforeEach(() => {
+ initComponent({}, fetchGroupCountsError);
+ });
+
+ it('creates a flash message and captures the error', () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'Could not load user group counts. Please refresh the page to try again.',
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/admin/users/mock_data.js b/spec/frontend/admin/users/mock_data.js
index c3918ef5173..4689ab36773 100644
--- a/spec/frontend/admin/users/mock_data.js
+++ b/spec/frontend/admin/users/mock_data.js
@@ -10,7 +10,7 @@ export const users = [
'https://secure.gravatar.com/avatar/054f062d8b1a42b123f17e13a173cda8?s=80\\u0026d=identicon',
badges: [
{ text: 'Admin', variant: 'success' },
- { text: "It's you!", variant: null },
+ { text: "It's you!", variant: 'muted' },
],
projectsCount: 0,
actions: [],
@@ -31,3 +31,16 @@ export const paths = {
deleteWithContributions: '/admin/users/id',
adminUser: '/admin/users/id',
};
+
+export const createGroupCountResponse = (groupCounts) => ({
+ data: {
+ users: {
+ nodes: groupCounts.map(({ id, groupCount }) => ({
+ id: `gid://gitlab/User/${id}`,
+ groupCount,
+ __typename: 'UserCore',
+ })),
+ __typename: 'UserCoreConnection',
+ },
+ },
+});
diff --git a/spec/frontend/admin/users/tabs_spec.js b/spec/frontend/admin/users/tabs_spec.js
deleted file mode 100644
index 39ba8618486..00000000000
--- a/spec/frontend/admin/users/tabs_spec.js
+++ /dev/null
@@ -1,37 +0,0 @@
-import initTabs from '~/admin/users/tabs';
-import Api from '~/api';
-
-jest.mock('~/api.js');
-jest.mock('~/lib/utils/common_utils');
-
-describe('tabs', () => {
- beforeEach(() => {
- setFixtures(`
- <div>
- <div class="js-users-tab-item">
- <a href="#users" data-testid='users-tab'>Users</a>
- </div>
- <div class="js-users-tab-item">
- <a href="#cohorts" data-testid='cohorts-tab'>Cohorts</a>
- </div>
- </div`);
-
- initTabs();
- });
-
- afterEach(() => {});
-
- describe('tracking', () => {
- it('tracks event when cohorts tab is clicked', () => {
- document.querySelector('[data-testid="cohorts-tab"]').click();
-
- expect(Api.trackRedisHllUserEvent).toHaveBeenCalledWith('i_analytics_cohorts');
- });
-
- it('does not track an event when users tab is clicked', () => {
- document.querySelector('[data-testid="users-tab"]').click();
-
- expect(Api.trackRedisHllUserEvent).not.toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index dece3dfbe5f..826fb820d9b 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -7,6 +7,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import mockAlerts from 'jest/vue_shared/alert_details/mocks/alerts.json';
import AlertManagementTable from '~/alert_management/components/alert_management_table.vue';
import { visitUrl } from '~/lib/utils/url_utility';
+import AlertDeprecationWarning from '~/vue_shared/components/alerts_deprecation_warning.vue';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import defaultProvideValues from '../mocks/alerts_provide_config.json';
@@ -14,6 +15,7 @@ import defaultProvideValues from '../mocks/alerts_provide_config.json';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
+ setUrlFragment: jest.requireActual('~/lib/utils/url_utility').setUrlFragment,
}));
describe('AlertManagementTable', () => {
@@ -39,6 +41,8 @@ describe('AlertManagementTable', () => {
resolved: 11,
all: 26,
};
+ const findDeprecationNotice = () =>
+ wrapper.findComponent(AlertDeprecationWarning).findComponent(GlAlert);
function mountComponent({ provide = {}, data = {}, loading = false, stubs = {} } = {}) {
wrapper = extendedWrapper(
@@ -47,6 +51,7 @@ describe('AlertManagementTable', () => {
...defaultProvideValues,
alertManagementEnabled: true,
userCanEnableAlertManagement: true,
+ hasManagedPrometheus: false,
...provide,
},
data() {
@@ -234,6 +239,20 @@ describe('AlertManagementTable', () => {
expect(visitUrl).toHaveBeenCalledWith('/1527542/details', true);
});
+ describe('deprecation notice', () => {
+ it('shows the deprecation notice when available', () => {
+ mountComponent({ provide: { hasManagedPrometheus: true } });
+
+ expect(findDeprecationNotice().exists()).toBe(true);
+ });
+
+ it('hides the deprecation notice when not available', () => {
+ mountComponent();
+
+ expect(findDeprecationNotice().exists()).toBe(false);
+ });
+ });
+
describe('alert issue links', () => {
beforeEach(() => {
mountComponent({
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
index 9912ac433a5..298596085ef 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
@@ -8,7 +8,6 @@ import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_for
import { typeSet } from '~/alerts_settings/constants';
import alertFields from '../mocks/alert_fields.json';
import parsedMapping from '../mocks/parsed_mapping.json';
-import { defaultAlertSettingsConfig } from './util';
const scrollIntoViewMock = jest.fn();
HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
@@ -29,7 +28,6 @@ describe('AlertsSettingsForm', () => {
...props,
},
provide: {
- ...defaultAlertSettingsConfig,
multiIntegrations,
},
mocks: {
@@ -50,7 +48,6 @@ describe('AlertsSettingsForm', () => {
const findFormToggle = () => wrapper.findComponent(GlToggle);
const findSamplePayloadSection = () => wrapper.findByTestId('sample-payload-section');
const findMappingBuilder = () => wrapper.findComponent(MappingBuilder);
-
const findSubmitButton = () => wrapper.findByTestId('integration-form-submit');
const findMultiSupportText = () => wrapper.findByTestId('multi-integrations-not-supported');
const findJsonTestSubmit = () => wrapper.findByTestId('send-test-alert');
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index dd8ce838dfd..595c3f1a289 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -20,6 +20,7 @@ import resetPrometheusTokenMutation from '~/alerts_settings/graphql/mutations/re
import updateCurrentHttpIntegrationMutation from '~/alerts_settings/graphql/mutations/update_current_http_integration.mutation.graphql';
import updateCurrentPrometheusIntegrationMutation from '~/alerts_settings/graphql/mutations/update_current_prometheus_integration.mutation.graphql';
import updatePrometheusIntegrationMutation from '~/alerts_settings/graphql/mutations/update_prometheus_integration.mutation.graphql';
+import getHttpIntegrationQuery from '~/alerts_settings/graphql/queries/get_http_integration.query.graphql';
import getIntegrationsQuery from '~/alerts_settings/graphql/queries/get_integrations.query.graphql';
import alertsUpdateService from '~/alerts_settings/services';
import {
@@ -47,7 +48,6 @@ import {
destroyIntegrationResponseWithErrors,
} from './mocks/apollo_mock';
import mockIntegrations from './mocks/integrations.json';
-import { defaultAlertSettingsConfig } from './util';
jest.mock('~/flash');
@@ -58,27 +58,12 @@ describe('AlertsSettingsWrapper', () => {
let fakeApollo;
let destroyIntegrationHandler;
useMockIntersectionObserver();
+
const httpMappingData = {
payloadExample: '{"test: : "field"}',
payloadAttributeMappings: [],
payloadAlertFields: [],
};
- const httpIntegrations = {
- list: [
- {
- id: mockIntegrations[0].id,
- ...httpMappingData,
- },
- {
- id: mockIntegrations[1].id,
- ...httpMappingData,
- },
- {
- id: mockIntegrations[2].id,
- httpMappingData,
- },
- ],
- };
const findLoader = () => wrapper.findComponent(IntegrationsList).findComponent(GlLoadingIcon);
const findIntegrationsList = () => wrapper.findComponent(IntegrationsList);
@@ -109,13 +94,14 @@ describe('AlertsSettingsWrapper', () => {
return { ...data };
},
provide: {
- ...defaultAlertSettingsConfig,
...provide,
},
mocks: {
$apollo: {
mutate: jest.fn(),
- query: jest.fn(),
+ addSmartQuery: jest.fn((_, options) => {
+ options.result.call(wrapper.vm);
+ }),
queries: {
integrations: {
loading,
@@ -143,9 +129,6 @@ describe('AlertsSettingsWrapper', () => {
wrapper = mount(AlertsSettingsWrapper, {
localVue,
apolloProvider: fakeApollo,
- provide: {
- ...defaultAlertSettingsConfig,
- },
});
}
@@ -158,17 +141,29 @@ describe('AlertsSettingsWrapper', () => {
beforeEach(() => {
createComponent({
data: {
- integrations: { list: mockIntegrations },
- httpIntegrations: { list: [] },
+ integrations: mockIntegrations,
currentIntegration: mockIntegrations[0],
},
loading: false,
});
});
- it('renders alerts integrations list and add new integration button by default', () => {
+ it('renders alerts integrations list', () => {
expect(findLoader().exists()).toBe(false);
expect(findIntegrations()).toHaveLength(mockIntegrations.length);
+ });
+
+ it('renders `Add new integration` button when multiple integrations are supported ', () => {
+ createComponent({
+ data: {
+ integrations: mockIntegrations,
+ currentIntegration: mockIntegrations[0],
+ },
+ provide: {
+ multiIntegrations: true,
+ },
+ loading: false,
+ });
expect(findAddIntegrationBtn().exists()).toBe(true);
});
@@ -177,6 +172,16 @@ describe('AlertsSettingsWrapper', () => {
});
it('hides `add new integration` button and displays setting form on btn click', async () => {
+ createComponent({
+ data: {
+ integrations: mockIntegrations,
+ currentIntegration: mockIntegrations[0],
+ },
+ provide: {
+ multiIntegrations: true,
+ },
+ loading: false,
+ });
const addNewIntegrationBtn = findAddIntegrationBtn();
expect(addNewIntegrationBtn.exists()).toBe(true);
await addNewIntegrationBtn.trigger('click');
@@ -186,7 +191,7 @@ describe('AlertsSettingsWrapper', () => {
it('shows loading indicator inside the IntegrationsList table', () => {
createComponent({
- data: { integrations: {} },
+ data: { integrations: [] },
loading: true,
});
expect(wrapper.find(IntegrationsList).exists()).toBe(true);
@@ -198,7 +203,7 @@ describe('AlertsSettingsWrapper', () => {
beforeEach(() => {
createComponent({
data: {
- integrations: { list: mockIntegrations },
+ integrations: mockIntegrations,
currentIntegration: mockIntegrations[0],
formVisible: true,
},
@@ -283,7 +288,7 @@ describe('AlertsSettingsWrapper', () => {
it('calls `$apollo.mutate` with `updatePrometheusIntegrationMutation`', () => {
createComponent({
data: {
- integrations: { list: mockIntegrations },
+ integrations: mockIntegrations,
currentIntegration: mockIntegrations[3],
formVisible: true,
},
@@ -374,39 +379,61 @@ describe('AlertsSettingsWrapper', () => {
});
});
- it('calls `$apollo.mutate` with `updateCurrentHttpIntegrationMutation` on HTTP integration edit', () => {
- createComponent({
- data: {
- integrations: { list: mockIntegrations },
- currentIntegration: mockIntegrations[0],
- httpIntegrations,
- },
- loading: false,
- });
+ describe('Edit integration', () => {
+ describe('HTTP', () => {
+ beforeEach(() => {
+ createComponent({
+ data: {
+ integrations: mockIntegrations,
+ currentIntegration: mockIntegrations[0],
+ currentHttpIntegration: { id: mockIntegrations[0].id, ...httpMappingData },
+ },
+ provide: {
+ multiIntegrations: true,
+ },
+ loading: false,
+ });
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValueOnce({});
+ findIntegrationsList().vm.$emit('edit-integration', updateHttpVariables);
+ });
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValueOnce({});
- findIntegrationsList().vm.$emit('edit-integration', updateHttpVariables);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateCurrentHttpIntegrationMutation,
- variables: { ...mockIntegrations[0], ...httpMappingData },
- });
- });
+ it('requests `currentHttpIntegration`', () => {
+ expect(wrapper.vm.$apollo.addSmartQuery).toHaveBeenCalledWith(
+ 'currentHttpIntegration',
+ expect.objectContaining({
+ query: getHttpIntegrationQuery,
+ result: expect.any(Function),
+ update: expect.any(Function),
+ variables: expect.any(Function),
+ }),
+ );
+ });
- it('calls `$apollo.mutate` with `updateCurrentPrometheusIntegrationMutation` on PROMETHEUS integration edit', () => {
- createComponent({
- data: {
- integrations: { list: mockIntegrations },
- currentIntegration: mockIntegrations[3],
- httpIntegrations,
- },
- loading: false,
+ it('calls `$apollo.mutate` with `updateCurrentHttpIntegrationMutation`', () => {
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: updateCurrentHttpIntegrationMutation,
+ variables: { ...mockIntegrations[0], ...httpMappingData },
+ });
+ });
});
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
- findIntegrationsList().vm.$emit('edit-integration', updatePrometheusVariables);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: updateCurrentPrometheusIntegrationMutation,
- variables: mockIntegrations[3],
+ describe('Prometheus', () => {
+ it('calls `$apollo.mutate` with `updateCurrentPrometheusIntegrationMutation`', () => {
+ createComponent({
+ data: {
+ integrations: mockIntegrations,
+ currentIntegration: mockIntegrations[3],
+ },
+ loading: false,
+ });
+
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
+ findIntegrationsList().vm.$emit('edit-integration', updatePrometheusVariables);
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: updateCurrentPrometheusIntegrationMutation,
+ variables: mockIntegrations[3],
+ });
+ });
});
});
diff --git a/spec/frontend/alerts_settings/components/util.js b/spec/frontend/alerts_settings/components/util.js
deleted file mode 100644
index 5c07f22f1c9..00000000000
--- a/spec/frontend/alerts_settings/components/util.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const PROMETHEUS_URL = '/prometheus/alerts/notify.json';
-const GENERIC_URL = '/alerts/notify.json';
-const KEY = 'abcedfg123';
-const INVALID_URL = 'http://invalid';
-const ACTIVE = false;
-
-export const defaultAlertSettingsConfig = {
- generic: {
- authorizationKey: KEY,
- formPath: INVALID_URL,
- url: GENERIC_URL,
- alertsSetupUrl: INVALID_URL,
- alertsUsageUrl: INVALID_URL,
- active: ACTIVE,
- },
- prometheus: {
- authorizationKey: KEY,
- prometheusFormPath: INVALID_URL,
- url: PROMETHEUS_URL,
- active: ACTIVE,
- },
- projectPath: '',
- multiIntegrations: true,
-};
diff --git a/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js b/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js
index 62b95c6078b..8a0800457c6 100644
--- a/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js
+++ b/spec/frontend/alerts_settings/utils/mapping_transformations_spec.js
@@ -1,4 +1,8 @@
-import { getMappingData, transformForSave } from '~/alerts_settings/utils/mapping_transformations';
+import {
+ getMappingData,
+ setFieldsLabels,
+ transformForSave,
+} from '~/alerts_settings/utils/mapping_transformations';
import alertFields from '../mocks/alert_fields.json';
import parsedMapping from '../mocks/parsed_mapping.json';
@@ -64,4 +68,33 @@ describe('Mapping Transformation Utilities', () => {
expect(result).toEqual([]);
});
});
+
+ describe('setFieldsLabels', () => {
+ const nonNestedFields = [{ label: 'title' }];
+ const nonNestedFieldsResult = { displayLabel: 'Title', tooltip: undefined };
+
+ const nestedFields = [
+ {
+ label: 'field/subfield',
+ },
+ ];
+ const nestedFieldsResult = { displayLabel: '...Subfield', tooltip: 'field.subfield' };
+
+ const nestedArrayFields = [
+ {
+ label: 'fields[1]/subfield',
+ },
+ ];
+
+ const nestedArrayFieldsResult = { displayLabel: '...Subfield', tooltip: 'fields[1].subfield' };
+
+ it.each`
+ type | fields | result
+ ${'not nested field'} | ${nonNestedFields} | ${nonNestedFieldsResult}
+ ${'nested field'} | ${nestedFields} | ${nestedFieldsResult}
+ ${'nested inside array'} | ${nestedArrayFields} | ${nestedArrayFieldsResult}
+ `('adds correct displayLabel and tooltip for $type', ({ fields, result }) => {
+ expect(setFieldsLabels(fields)[0]).toMatchObject(result);
+ });
+ });
});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index cb29dab86bf..139128e6d4a 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -930,7 +930,7 @@ describe('Api', () => {
describe('createBranch', () => {
it('creates new branch', (done) => {
- const ref = 'master';
+ const ref = 'main';
const branch = 'new-branch-name';
const dummyProjectPath = 'gitlab-org/gitlab-ce';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${encodeURIComponent(
@@ -1262,7 +1262,7 @@ describe('Api', () => {
)}/merge_requests`;
const options = {
source_branch: 'feature',
- target_branch: 'master',
+ target_branch: 'main',
title: 'Add feature',
};
diff --git a/spec/frontend/batch_comments/components/preview_dropdown_spec.js b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
new file mode 100644
index 00000000000..41be04d0b7e
--- /dev/null
+++ b/spec/frontend/batch_comments/components/preview_dropdown_spec.js
@@ -0,0 +1,71 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
+
+Vue.use(Vuex);
+
+let wrapper;
+
+const toggleActiveFileByHash = jest.fn();
+const scrollToDraft = jest.fn();
+
+function factory({ viewDiffsFileByFile = false, draftsCount = 1, sortedDrafts = [] } = {}) {
+ const store = new Vuex.Store({
+ modules: {
+ diffs: {
+ namespaced: true,
+ actions: {
+ toggleActiveFileByHash,
+ },
+ state: {
+ viewDiffsFileByFile,
+ },
+ },
+ batchComments: {
+ namespaced: true,
+ actions: { scrollToDraft },
+ getters: { draftsCount: () => draftsCount, sortedDrafts: () => sortedDrafts },
+ },
+ },
+ });
+
+ wrapper = shallowMountExtended(PreviewDropdown, {
+ store,
+ });
+}
+
+describe('Batch comments preview dropdown', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('clicking draft', () => {
+ it('it toggles active file when viewDiffsFileByFile is true', async () => {
+ factory({
+ viewDiffsFileByFile: true,
+ sortedDrafts: [{ id: 1, file_hash: 'hash' }],
+ });
+
+ wrapper.findByTestId('preview-item').vm.$emit('click');
+
+ await Vue.nextTick();
+
+ expect(toggleActiveFileByHash).toHaveBeenCalledWith(expect.anything(), 'hash');
+ expect(scrollToDraft).toHaveBeenCalledWith(expect.anything(), { id: 1, file_hash: 'hash' });
+ });
+
+ it('calls scrollToDraft', async () => {
+ factory({
+ viewDiffsFileByFile: false,
+ sortedDrafts: [{ id: 1 }],
+ });
+
+ wrapper.findByTestId('preview-item').vm.$emit('click');
+
+ await Vue.nextTick();
+
+ expect(scrollToDraft).toHaveBeenCalledWith(expect.anything(), { id: 1 });
+ });
+ });
+});
diff --git a/spec/frontend/behaviors/date_picker_spec.js b/spec/frontend/behaviors/date_picker_spec.js
new file mode 100644
index 00000000000..9f7701a0366
--- /dev/null
+++ b/spec/frontend/behaviors/date_picker_spec.js
@@ -0,0 +1,30 @@
+import * as Pikaday from 'pikaday';
+import initDatePickers from '~/behaviors/date_picker';
+import * as utils from '~/lib/utils/datetime_utility';
+
+jest.mock('pikaday');
+jest.mock('~/lib/utils/datetime_utility');
+
+describe('date_picker behavior', () => {
+ let pikadayMock;
+ let parseMock;
+
+ beforeEach(() => {
+ pikadayMock = jest.spyOn(Pikaday, 'default');
+ parseMock = jest.spyOn(utils, 'parsePikadayDate');
+ setFixtures(`
+ <div>
+ <input class="datepicker" value="2020-10-01" />
+ </div>
+ <div>
+ <input class="datepicker" value="" />
+ </div>`);
+ });
+
+ it('Instantiates Pickaday for every instance of a .datepicker class', () => {
+ initDatePickers();
+
+ expect(pikadayMock.mock.calls.length).toEqual(2);
+ expect(parseMock.mock.calls).toEqual([['2020-10-01'], ['']]);
+ });
+});
diff --git a/spec/frontend/behaviors/shortcuts/shortcut_spec.js b/spec/frontend/behaviors/shortcuts/shortcut_spec.js
new file mode 100644
index 00000000000..44bb74ce179
--- /dev/null
+++ b/spec/frontend/behaviors/shortcuts/shortcut_spec.js
@@ -0,0 +1,96 @@
+import { shallowMount } from '@vue/test-utils';
+import Shortcut from '~/behaviors/shortcuts/shortcut.vue';
+
+describe('Shortcut Vue Component', () => {
+ const render = (shortcuts) => shallowMount(Shortcut, { propsData: { shortcuts } }).html();
+
+ afterEach(() => {
+ delete window.gl.client;
+ });
+
+ describe.each([true, false])('With browser env isMac: %p', (isMac) => {
+ beforeEach(() => {
+ window.gl = { client: { isMac } };
+ });
+
+ it.each([
+ ['up', '<kbd>↑</kbd>'],
+ ['down', '<kbd>↓</kbd>'],
+ ['left', '<kbd>←</kbd>'],
+ ['right', '<kbd>→</kbd>'],
+ ['ctrl', '<kbd>Ctrl</kbd>'],
+ ['shift', '<kbd>Shift</kbd>'],
+ ['enter', '<kbd>Enter</kbd>'],
+ ['esc', '<kbd>Esc</kbd>'],
+ // Some normal ascii letter
+ ['a', '<kbd>a</kbd>'],
+ // An umlaut letter
+ ['ø', '<kbd>ø</kbd>'],
+ // A number
+ ['5', '<kbd>5</kbd>'],
+ ])('renders platform agnostic key %p as: %p', (key, rendered) => {
+ expect(render([key])).toEqual(`<div>${rendered}</div>`);
+ });
+
+ it('renders keys combined with plus ("+") correctly', () => {
+ expect(render(['shift+a+b+c'])).toEqual(
+ `<div><kbd>Shift</kbd> + <kbd>a</kbd> + <kbd>b</kbd> + <kbd>c</kbd></div>`,
+ );
+ });
+
+ it('renders keys combined with space (" ") correctly', () => {
+ expect(render(['shift a b c'])).toEqual(
+ `<div><kbd>Shift</kbd> then <kbd>a</kbd> then <kbd>b</kbd> then <kbd>c</kbd></div>`,
+ );
+ });
+
+ it('renders multiple shortcuts correctly', () => {
+ expect(render(['shift+[', 'shift+k'])).toEqual(
+ `<div><kbd>Shift</kbd> + <kbd>[</kbd> or <br><kbd>Shift</kbd> + <kbd>k</kbd></div>`,
+ );
+ expect(render(['[', 'k'])).toEqual(`<div><kbd>[</kbd> or <kbd>k</kbd></div>`);
+ });
+ });
+
+ describe('With browser env isMac: true', () => {
+ beforeEach(() => {
+ window.gl = { client: { isMac: true } };
+ });
+
+ it.each([
+ ['mod', '<kbd>⌘</kbd>'],
+ ['command', '<kbd>⌘</kbd>'],
+ ['meta', '<kbd>⌘</kbd>'],
+ ['option', '<kbd>⌥</kbd>'],
+ ['alt', '<kbd>⌥</kbd>'],
+ ])('renders platform specific key %p as: %p', (key, rendered) => {
+ expect(render([key])).toEqual(`<div>${rendered}</div>`);
+ });
+
+ it('does render Mac specific shortcuts', () => {
+ expect(render(['command+[', 'ctrl+k'])).toEqual(
+ `<div><kbd>⌘</kbd> + <kbd>[</kbd> or <br><kbd>Ctrl</kbd> + <kbd>k</kbd></div>`,
+ );
+ });
+ });
+
+ describe('With browser env isMac: false', () => {
+ beforeEach(() => {
+ window.gl = { client: { isMac: false } };
+ });
+
+ it.each([
+ ['mod', '<kbd>Ctrl</kbd>'],
+ ['command', ''],
+ ['meta', ''],
+ ['option', '<kbd>Alt</kbd>'],
+ ['alt', '<kbd>Alt</kbd>'],
+ ])('renders platform specific key %p as: %p', (key, rendered) => {
+ expect(render([key])).toEqual(`<div>${rendered}</div>`);
+ });
+
+ it('does not render Mac specific shortcuts', () => {
+ expect(render(['command+[', 'ctrl+k'])).toEqual(`<div><kbd>Ctrl</kbd> + <kbd>k</kbd></div>`);
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 022f8c05e1e..ceafa6ead94 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -1,4 +1,5 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlLabel } from '@gitlab/ui';
+import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import BoardCard from '~/boards/components/board_card.vue';
@@ -14,10 +15,11 @@ describe('Board card', () => {
const localVue = createLocalVue();
localVue.use(Vuex);
- const createStore = ({ initialState = {}, isSwimlanesOn = false } = {}) => {
+ const createStore = ({ initialState = {} } = {}) => {
mockActions = {
toggleBoardItem: jest.fn(),
toggleBoardItemMultiSelection: jest.fn(),
+ performSearch: jest.fn(),
};
store = new Vuex.Store({
@@ -28,19 +30,21 @@ describe('Board card', () => {
},
actions: mockActions,
getters: {
- isSwimlanesOn: () => isSwimlanesOn,
isEpicBoard: () => false,
},
});
};
// this particular mount component needs to be used after the root beforeEach because it depends on list being initialized
- const mountComponent = ({ propsData = {}, provide = {} } = {}) => {
- wrapper = shallowMount(BoardCard, {
+ const mountComponent = ({
+ propsData = {},
+ provide = {},
+ mountFn = shallowMount,
+ stubs = { BoardCardInner },
+ } = {}) => {
+ wrapper = mountFn(BoardCard, {
localVue,
- stubs: {
- BoardCardInner,
- },
+ stubs,
store,
propsData: {
list: mockLabelList,
@@ -74,72 +78,76 @@ describe('Board card', () => {
store = null;
});
- describe.each`
- isSwimlanesOn
- ${true} | ${false}
- `('when isSwimlanesOn is $isSwimlanesOn', ({ isSwimlanesOn }) => {
- it('should not highlight the card by default', async () => {
- createStore({ isSwimlanesOn });
- mountComponent();
+ describe('when GlLabel is clicked in BoardCardInner', () => {
+ it('doesnt call toggleBoardItem', () => {
+ createStore({ initialState: { isShowingLabels: true } });
+ mountComponent({ mountFn: mount, stubs: {} });
+
+ wrapper.find(GlLabel).trigger('mouseup');
- expect(wrapper.classes()).not.toContain('is-active');
- expect(wrapper.classes()).not.toContain('multi-select');
+ expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(0);
});
+ });
- it('should highlight the card with a correct style when selected', async () => {
- createStore({
- initialState: {
- activeId: mockIssue.id,
- },
- isSwimlanesOn,
- });
- mountComponent();
+ it('should not highlight the card by default', async () => {
+ createStore();
+ mountComponent();
- expect(wrapper.classes()).toContain('is-active');
- expect(wrapper.classes()).not.toContain('multi-select');
+ expect(wrapper.classes()).not.toContain('is-active');
+ expect(wrapper.classes()).not.toContain('multi-select');
+ });
+
+ it('should highlight the card with a correct style when selected', async () => {
+ createStore({
+ initialState: {
+ activeId: mockIssue.id,
+ },
});
+ mountComponent();
- it('should highlight the card with a correct style when multi-selected', async () => {
- createStore({
- initialState: {
- activeId: inactiveId,
- selectedBoardItems: [mockIssue],
- },
- isSwimlanesOn,
- });
- mountComponent();
+ expect(wrapper.classes()).toContain('is-active');
+ expect(wrapper.classes()).not.toContain('multi-select');
+ });
- expect(wrapper.classes()).toContain('multi-select');
- expect(wrapper.classes()).not.toContain('is-active');
+ it('should highlight the card with a correct style when multi-selected', async () => {
+ createStore({
+ initialState: {
+ activeId: inactiveId,
+ selectedBoardItems: [mockIssue],
+ },
});
+ mountComponent();
- describe('when mouseup event is called on the card', () => {
- beforeEach(() => {
- createStore({ isSwimlanesOn });
- mountComponent();
- });
+ expect(wrapper.classes()).toContain('multi-select');
+ expect(wrapper.classes()).not.toContain('is-active');
+ });
- describe('when not using multi-select', () => {
- it('should call vuex action "toggleBoardItem" with correct parameters', async () => {
- await selectCard();
+ describe('when mouseup event is called on the card', () => {
+ beforeEach(() => {
+ createStore();
+ mountComponent();
+ });
+
+ describe('when not using multi-select', () => {
+ it('should call vuex action "toggleBoardItem" with correct parameters', async () => {
+ await selectCard();
- expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(1);
- expect(mockActions.toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), {
- boardItem: mockIssue,
- });
+ expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(1);
+ expect(mockActions.toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), {
+ boardItem: mockIssue,
});
});
+ });
- describe('when using multi-select', () => {
- it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => {
- await multiSelectCard();
+ describe('when using multi-select', () => {
+ it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => {
+ await multiSelectCard();
- expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledTimes(1);
- expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledWith(
- expect.any(Object),
- mockIssue,
- );
- });
+ expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledTimes(1);
+ expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledWith(
+ expect.any(Object),
+ mockIssue,
+ );
});
});
});
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
index 7f949739891..01c99a02db2 100644
--- a/spec/frontend/boards/components/board_content_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -6,9 +6,9 @@ import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
import BoardSidebarDueDate from '~/boards/components/sidebar/board_sidebar_due_date.vue';
import BoardSidebarLabelsSelect from '~/boards/components/sidebar/board_sidebar_labels_select.vue';
import BoardSidebarMilestoneSelect from '~/boards/components/sidebar/board_sidebar_milestone_select.vue';
-import BoardSidebarSubscription from '~/boards/components/sidebar/board_sidebar_subscription.vue';
import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
import { ISSUABLE } from '~/boards/constants';
+import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
import { mockIssue, mockIssueGroupPath, mockIssueProjectPath } from '../mock_data';
describe('BoardContentSidebar', () => {
@@ -111,7 +111,7 @@ describe('BoardContentSidebar', () => {
});
it('renders BoardSidebarSubscription', () => {
- expect(wrapper.find(BoardSidebarSubscription).exists()).toBe(true);
+ expect(wrapper.find(SidebarSubscriptionsWidget).exists()).toBe(true);
});
it('renders BoardSidebarMilestoneSelect', () => {
diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js
new file mode 100644
index 00000000000..e27badca9de
--- /dev/null
+++ b/spec/frontend/boards/components/board_filtered_search_spec.js
@@ -0,0 +1,146 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import BoardFilteredSearch from '~/boards/components/board_filtered_search.vue';
+import { createStore } from '~/boards/stores';
+import * as urlUtility from '~/lib/utils/url_utility';
+import { __ } from '~/locale';
+import FilteredSearchBarRoot from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
+import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
+
+Vue.use(Vuex);
+
+describe('BoardFilteredSearch', () => {
+ let wrapper;
+ let store;
+ const tokens = [
+ {
+ icon: 'labels',
+ title: __('Label'),
+ type: 'label_name',
+ operators: [
+ { value: '=', description: 'is' },
+ { value: '!=', description: 'is not' },
+ ],
+ token: LabelToken,
+ unique: false,
+ symbol: '~',
+ fetchLabels: () => new Promise(() => {}),
+ },
+ {
+ icon: 'pencil',
+ title: __('Author'),
+ type: 'author_username',
+ operators: [
+ { value: '=', description: 'is' },
+ { value: '!=', description: 'is not' },
+ ],
+ symbol: '@',
+ token: AuthorToken,
+ unique: true,
+ fetchAuthors: () => new Promise(() => {}),
+ },
+ ];
+
+ const createComponent = ({ initialFilterParams = {} } = {}) => {
+ wrapper = shallowMount(BoardFilteredSearch, {
+ provide: { initialFilterParams, fullPath: '' },
+ store,
+ propsData: {
+ tokens,
+ },
+ });
+ };
+
+ const findFilteredSearch = () => wrapper.findComponent(FilteredSearchBarRoot);
+
+ beforeEach(() => {
+ // this needed for actions call for performSearch
+ window.gon = { features: {} };
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ store = createStore();
+
+ jest.spyOn(store, 'dispatch');
+
+ createComponent();
+ });
+
+ it('renders FilteredSearch', () => {
+ expect(findFilteredSearch().exists()).toBe(true);
+ });
+
+ it('passes the correct tokens to FilteredSearch', () => {
+ expect(findFilteredSearch().props('tokens')).toEqual(tokens);
+ });
+
+ describe('when onFilter is emitted', () => {
+ it('calls performSearch', () => {
+ findFilteredSearch().vm.$emit('onFilter', [{ value: { data: '' } }]);
+
+ expect(store.dispatch).toHaveBeenCalledWith('performSearch');
+ });
+
+ it('calls historyPushState', () => {
+ jest.spyOn(urlUtility, 'updateHistory');
+ findFilteredSearch().vm.$emit('onFilter', [{ value: { data: 'searchQuery' } }]);
+
+ expect(urlUtility.updateHistory).toHaveBeenCalledWith({
+ replace: true,
+ title: '',
+ url: 'http://test.host/',
+ });
+ });
+ });
+ });
+
+ describe('when searching', () => {
+ beforeEach(() => {
+ store = createStore();
+
+ jest.spyOn(store, 'dispatch');
+
+ createComponent();
+ });
+
+ it('sets the url params to the correct results', async () => {
+ const mockFilters = [
+ { type: 'author_username', value: { data: 'root', operator: '=' } },
+ { type: 'label_name', value: { data: 'label', operator: '=' } },
+ { type: 'label_name', value: { data: 'label2', operator: '=' } },
+ ];
+ jest.spyOn(urlUtility, 'updateHistory');
+ findFilteredSearch().vm.$emit('onFilter', mockFilters);
+
+ expect(urlUtility.updateHistory).toHaveBeenCalledWith({
+ title: '',
+ replace: true,
+ url: 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label2',
+ });
+ });
+ });
+
+ describe('when url params are already set', () => {
+ beforeEach(() => {
+ store = createStore();
+
+ jest.spyOn(store, 'dispatch');
+
+ createComponent({ initialFilterParams: { authorUsername: 'root', labelName: ['label'] } });
+ });
+
+ it('passes the correct props to FilterSearchBar', () => {
+ expect(findFilteredSearch().props('initialFilterValue')).toEqual([
+ { type: 'author_username', value: { data: 'root', operator: '=' } },
+ { type: 'label_name', value: { data: 'label', operator: '=' } },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
index 153d0640b23..ad682774ee6 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
@@ -1,7 +1,11 @@
import { GlLabel } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
-import { labels as TEST_LABELS, mockIssue as TEST_ISSUE } from 'jest/boards/mock_data';
+import {
+ labels as TEST_LABELS,
+ mockIssue as TEST_ISSUE,
+ mockIssueFullPath as TEST_ISSUE_FULLPATH,
+} from 'jest/boards/mock_data';
import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue';
import BoardSidebarLabelsSelect from '~/boards/components/sidebar/board_sidebar_labels_select.vue';
import { createStore } from '~/boards/stores';
@@ -23,7 +27,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
wrapper = null;
});
- const createWrapper = ({ labels = [] } = {}) => {
+ const createWrapper = ({ labels = [], providedValues = {} } = {}) => {
store = createStore();
store.state.boardItems = { [TEST_ISSUE.id]: { ...TEST_ISSUE, labels } };
store.state.activeId = TEST_ISSUE.id;
@@ -32,9 +36,9 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
store,
provide: {
canUpdate: true,
- labelsFetchPath: TEST_HOST,
labelsManagePath: TEST_HOST,
labelsFilterBasePath: TEST_HOST,
+ ...providedValues,
},
stubs: {
BoardEditableItem,
@@ -48,6 +52,22 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
wrapper.findAll(GlLabel).wrappers.map((item) => item.props('title'));
const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
+ describe('when labelsFetchPath is provided', () => {
+ it('uses injected labels fetch path', () => {
+ createWrapper({ providedValues: { labelsFetchPath: 'foobar' } });
+
+ expect(findLabelsSelect().props('labelsFetchPath')).toEqual('foobar');
+ });
+ });
+
+ it('uses the default project label endpoint', () => {
+ createWrapper();
+
+ expect(findLabelsSelect().props('labelsFetchPath')).toEqual(
+ `/${TEST_ISSUE_FULLPATH}/-/labels?include_ancestor_groups=true`,
+ );
+ });
+
it('renders "None" when no labels are selected', () => {
createWrapper();
@@ -78,7 +98,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
it('commits change to the server', () => {
expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({
addLabelIds: TEST_LABELS.map((label) => label.id),
- projectPath: 'gitlab-org/test-subgroup/gitlab-test',
+ projectPath: TEST_ISSUE_FULLPATH,
removeLabelIds: [],
});
});
@@ -103,7 +123,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({
addLabelIds: [5, 7],
removeLabelIds: [6],
- projectPath: 'gitlab-org/test-subgroup/gitlab-test',
+ projectPath: TEST_ISSUE_FULLPATH,
});
});
});
@@ -122,7 +142,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({
removeLabelIds: [getIdFromGraphQLId(testLabel.id)],
- projectPath: 'gitlab-org/test-subgroup/gitlab-test',
+ projectPath: TEST_ISSUE_FULLPATH,
});
});
});
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 1c5b7cf8248..bcaca9522e4 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -151,6 +151,8 @@ export const rawIssue = {
},
};
+export const mockIssueFullPath = 'gitlab-org/test-subgroup/gitlab-test';
+
export const mockIssue = {
id: 'gid://gitlab/Issue/436',
iid: '27',
@@ -159,8 +161,8 @@ export const mockIssue = {
timeEstimate: 0,
weight: null,
confidential: false,
- referencePath: 'gitlab-org/test-subgroup/gitlab-test#27',
- path: '/gitlab-org/test-subgroup/gitlab-test/-/issues/27',
+ referencePath: `${mockIssueFullPath}#27`,
+ path: `/${mockIssueFullPath}/-/issues/27`,
assignees,
labels: [
{
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 460e77a3f03..09343b5704f 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1,15 +1,21 @@
import * as Sentry from '@sentry/browser';
+import {
+ inactiveId,
+ ISSUABLE,
+ ListType,
+ issuableTypes,
+ BoardType,
+ listsQuery,
+} from 'ee_else_ce/boards/constants';
import issueMoveListMutation from 'ee_else_ce/boards/graphql/issue_move_list.mutation.graphql';
import testAction from 'helpers/vuex_action_helper';
import {
- fullBoardId,
formatListIssues,
formatBoardLists,
formatIssueInput,
formatIssue,
getMoveData,
} from '~/boards/boards_util';
-import { inactiveId, ISSUABLE, ListType } from '~/boards/constants';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql';
import actions, { gqlClient } from '~/boards/stores/actions';
@@ -34,12 +40,6 @@ import {
jest.mock('~/flash');
-const expectNotImplemented = (action) => {
- it('is not implemented', () => {
- expect(action).toThrow(new Error('Not implemented!'));
- });
-};
-
// We need this helper to make sure projectPath is including
// subgroups when the movIssue action is called.
const getProjectPath = (path) => path.split('#')[0];
@@ -66,20 +66,32 @@ describe('setInitialBoardData', () => {
});
describe('setFilters', () => {
- it('should commit mutation SET_FILTERS', (done) => {
+ it.each([
+ [
+ 'with correct filters as payload',
+ {
+ filters: { labelName: 'label' },
+ updatedFilters: { labelName: 'label', not: {} },
+ },
+ ],
+ [
+ 'and updates assigneeWildcardId',
+ {
+ filters: { assigneeId: 'None' },
+ updatedFilters: { assigneeWildcardId: 'NONE', not: {} },
+ },
+ ],
+ ])('should commit mutation SET_FILTERS %s', (_, { filters, updatedFilters }) => {
const state = {
filters: {},
};
- const filters = { labelName: 'label' };
-
testAction(
actions.setFilters,
filters,
state,
- [{ type: types.SET_FILTERS, payload: { ...filters, not: {} } }],
+ [{ type: types.SET_FILTERS, payload: updatedFilters }],
[],
- done,
);
});
});
@@ -120,20 +132,12 @@ describe('setActiveId', () => {
});
describe('fetchLists', () => {
- it('should dispatch fetchIssueLists action', () => {
- testAction({
- action: actions.fetchLists,
- expectedActions: [{ type: 'fetchIssueLists' }],
- });
- });
-});
-
-describe('fetchIssueLists', () => {
- const state = {
+ let state = {
fullPath: 'gitlab-org',
- boardId: '1',
+ fullBoardId: 'gid://gitlab/Board/1',
filterParams: {},
boardType: 'group',
+ issuableType: 'issue',
};
let queryResponse = {
@@ -155,7 +159,7 @@ describe('fetchIssueLists', () => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
testAction(
- actions.fetchIssueLists,
+ actions.fetchLists,
{},
state,
[
@@ -173,7 +177,7 @@ describe('fetchIssueLists', () => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
testAction(
- actions.fetchIssueLists,
+ actions.fetchLists,
{},
state,
[
@@ -202,7 +206,7 @@ describe('fetchIssueLists', () => {
jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
testAction(
- actions.fetchIssueLists,
+ actions.fetchLists,
{},
state,
[
@@ -215,6 +219,43 @@ describe('fetchIssueLists', () => {
done,
);
});
+
+ it.each`
+ issuableType | boardType | fullBoardId | isGroup | isProject
+ ${issuableTypes.issue} | ${BoardType.group} | ${'gid://gitlab/Board/1'} | ${true} | ${false}
+ ${issuableTypes.issue} | ${BoardType.project} | ${'gid://gitlab/Board/1'} | ${false} | ${true}
+ `(
+ 'calls $issuableType query with correct variables',
+ async ({ issuableType, boardType, fullBoardId, isGroup, isProject }) => {
+ const commit = jest.fn();
+ const dispatch = jest.fn();
+
+ state = {
+ fullPath: 'gitlab-org',
+ fullBoardId,
+ filterParams: {},
+ boardType,
+ issuableType,
+ };
+
+ const variables = {
+ query: listsQuery[issuableType].query,
+ variables: {
+ fullPath: 'gitlab-org',
+ boardId: fullBoardId,
+ filters: {},
+ isGroup,
+ isProject,
+ },
+ };
+
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ await actions.fetchLists({ commit, state, dispatch });
+
+ expect(gqlClient.query).toHaveBeenCalledWith(variables);
+ },
+ );
});
describe('createList', () => {
@@ -236,7 +277,7 @@ describe('createIssueList', () => {
beforeEach(() => {
state = {
fullPath: 'gitlab-org',
- boardId: '1',
+ fullBoardId: 'gid://gitlab/Board/1',
boardType: 'group',
disabled: false,
boardLists: [{ type: 'closed' }],
@@ -366,7 +407,7 @@ describe('moveList', () => {
const state = {
fullPath: 'gitlab-org',
- boardId: '1',
+ fullBoardId: 'gid://gitlab/Board/1',
boardType: 'group',
disabled: false,
boardLists: initialBoardListsState,
@@ -409,7 +450,7 @@ describe('moveList', () => {
const state = {
fullPath: 'gitlab-org',
- boardId: '1',
+ fullBoardId: 'gid://gitlab/Board/1',
boardType: 'group',
disabled: false,
boardLists: initialBoardListsState,
@@ -443,10 +484,11 @@ describe('updateList', () => {
const state = {
fullPath: 'gitlab-org',
- boardId: '1',
+ fullBoardId: 'gid://gitlab/Board/1',
boardType: 'group',
disabled: false,
boardLists: [{ type: 'closed' }],
+ issuableType: issuableTypes.issue,
};
testAction(
@@ -490,6 +532,7 @@ describe('removeList', () => {
beforeEach(() => {
state = {
boardLists: mockListsById,
+ issuableType: issuableTypes.issue,
};
});
@@ -559,7 +602,7 @@ describe('fetchItemsForList', () => {
const state = {
fullPath: 'gitlab-org',
- boardId: '1',
+ fullBoardId: 'gid://gitlab/Board/1',
filterParams: {},
boardType: 'group',
};
@@ -946,7 +989,7 @@ describe('updateIssueOrder', () => {
const state = {
boardItems: issues,
- boardId: 'gid://gitlab/Board/1',
+ fullBoardId: 'gid://gitlab/Board/1',
};
const moveData = {
@@ -960,7 +1003,7 @@ describe('updateIssueOrder', () => {
mutation: issueMoveListMutation,
variables: {
projectPath: getProjectPath(mockIssue.referencePath),
- boardId: fullBoardId(state.boardId),
+ boardId: state.fullBoardId,
iid: mockIssue.iid,
fromListId: 1,
toListId: 2,
@@ -1362,7 +1405,7 @@ describe('setActiveItemSubscribed', () => {
[mockActiveIssue.id]: mockActiveIssue,
},
fullPath: 'gitlab-org',
- issuableType: 'issue',
+ issuableType: issuableTypes.issue,
};
const getters = { activeBoardItem: mockActiveIssue, isEpicBoard: false };
const subscribedState = true;
@@ -1470,7 +1513,7 @@ describe('setActiveIssueMilestone', () => {
describe('setActiveItemTitle', () => {
const state = {
boardItems: { [mockIssue.id]: mockIssue },
- issuableType: 'issue',
+ issuableType: issuableTypes.issue,
fullPath: 'path/f',
};
const getters = { activeBoardItem: mockIssue, isEpicBoard: false };
@@ -1522,6 +1565,33 @@ describe('setActiveItemTitle', () => {
});
});
+describe('setActiveItemConfidential', () => {
+ const state = { boardItems: { [mockIssue.id]: mockIssue } };
+ const getters = { activeBoardItem: mockIssue };
+
+ it('set confidential value on board item', (done) => {
+ const payload = {
+ itemId: getters.activeBoardItem.id,
+ prop: 'confidential',
+ value: true,
+ };
+
+ testAction(
+ actions.setActiveItemConfidential,
+ true,
+ { ...state, ...getters },
+ [
+ {
+ type: types.UPDATE_BOARD_ITEM_BY_ID,
+ payload,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+});
+
describe('fetchGroupProjects', () => {
const state = {
fullPath: 'gitlab-org',
@@ -1749,27 +1819,3 @@ describe('unsetError', () => {
});
});
});
-
-describe('fetchBacklog', () => {
- expectNotImplemented(actions.fetchBacklog);
-});
-
-describe('bulkUpdateIssues', () => {
- expectNotImplemented(actions.bulkUpdateIssues);
-});
-
-describe('fetchIssue', () => {
- expectNotImplemented(actions.fetchIssue);
-});
-
-describe('toggleIssueSubscription', () => {
- expectNotImplemented(actions.toggleIssueSubscription);
-});
-
-describe('showPage', () => {
- expectNotImplemented(actions.showPage);
-});
-
-describe('toggleEmptyState', () => {
- expectNotImplemented(actions.toggleEmptyState);
-});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index 6114ba0af5f..e7efb21bee5 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -110,6 +110,15 @@ describe('Boards - Getters', () => {
);
});
+ it('returns group path of last subgroup for the active issue', () => {
+ const mockActiveIssue = {
+ referencePath: 'gitlab-org/subgroup/subsubgroup/gitlab-test#1',
+ };
+ expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
+ 'gitlab-org/subgroup/subsubgroup',
+ );
+ });
+
it('returns empty string as group path when active issue is an empty object', () => {
const mockActiveIssue = {};
expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual('');
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index af6d439e294..d89abcc79ae 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -13,12 +13,6 @@ import {
mockList,
} from '../mock_data';
-const expectNotImplemented = (action) => {
- it('is not implemented', () => {
- expect(action).toThrow(new Error('Not implemented!'));
- });
-};
-
describe('Board Store Mutations', () => {
let state;
@@ -158,10 +152,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('REQUEST_ADD_LIST', () => {
- expectNotImplemented(mutations.REQUEST_ADD_LIST);
- });
-
describe('RECEIVE_ADD_LIST_SUCCESS', () => {
it('adds list to boardLists state', () => {
mutations.RECEIVE_ADD_LIST_SUCCESS(state, mockLists[0]);
@@ -172,10 +162,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('RECEIVE_ADD_LIST_ERROR', () => {
- expectNotImplemented(mutations.RECEIVE_ADD_LIST_ERROR);
- });
-
describe('MOVE_LIST', () => {
it('updates boardLists state with reordered lists', () => {
state = {
@@ -341,10 +327,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('REQUEST_ADD_ISSUE', () => {
- expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
- });
-
describe('UPDATE_BOARD_ITEM_BY_ID', () => {
const issueId = '1';
const prop = 'id';
@@ -386,14 +368,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('RECEIVE_ADD_ISSUE_SUCCESS', () => {
- expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_SUCCESS);
- });
-
- describe('RECEIVE_ADD_ISSUE_ERROR', () => {
- expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_ERROR);
- });
-
describe('MUTATE_ISSUE_SUCCESS', () => {
it('updates issue in issues state', () => {
const issues = {
@@ -434,18 +408,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('REQUEST_UPDATE_ISSUE', () => {
- expectNotImplemented(mutations.REQUEST_UPDATE_ISSUE);
- });
-
- describe('RECEIVE_UPDATE_ISSUE_SUCCESS', () => {
- expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_SUCCESS);
- });
-
- describe('RECEIVE_UPDATE_ISSUE_ERROR', () => {
- expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_ERROR);
- });
-
describe('ADD_BOARD_ITEM_TO_LIST', () => {
beforeEach(() => {
setBoardsListsState();
@@ -540,14 +502,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('SET_CURRENT_PAGE', () => {
- expectNotImplemented(mutations.SET_CURRENT_PAGE);
- });
-
- describe('TOGGLE_EMPTY_STATE', () => {
- expectNotImplemented(mutations.TOGGLE_EMPTY_STATE);
- });
-
describe('REQUEST_GROUP_PROJECTS', () => {
it('Should set isLoading in groupProjectsFlags to true in state when fetchNext is false', () => {
mutations[types.REQUEST_GROUP_PROJECTS](state, false);
diff --git a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
index 261c406171e..2afca66b0c1 100644
--- a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
@@ -3,7 +3,7 @@
exports[`Branch divergence graph component renders ahead and behind count 1`] = `
<div
class="divergence-graph px-2 d-none d-md-block"
- title="10 commits behind master, 10 commits ahead"
+ title="10 commits behind main, 10 commits ahead"
>
<graph-bar-stub
count="10"
@@ -26,7 +26,7 @@ exports[`Branch divergence graph component renders ahead and behind count 1`] =
exports[`Branch divergence graph component renders distance count 1`] = `
<div
class="divergence-graph px-2 d-none d-md-block"
- title="More than 900 commits different with master"
+ title="More than 900 commits different with main"
>
<graph-bar-stub
count="900"
diff --git a/spec/frontend/branches/components/divergence_graph_spec.js b/spec/frontend/branches/components/divergence_graph_spec.js
index b54b2ceb233..3b565539f87 100644
--- a/spec/frontend/branches/components/divergence_graph_spec.js
+++ b/spec/frontend/branches/components/divergence_graph_spec.js
@@ -15,7 +15,7 @@ describe('Branch divergence graph component', () => {
it('renders ahead and behind count', () => {
factory({
- defaultBranch: 'master',
+ defaultBranch: 'main',
aheadCount: 10,
behindCount: 10,
maxCommits: 100,
@@ -27,18 +27,18 @@ describe('Branch divergence graph component', () => {
it('sets title for ahead and behind count', () => {
factory({
- defaultBranch: 'master',
+ defaultBranch: 'main',
aheadCount: 10,
behindCount: 10,
maxCommits: 100,
});
- expect(vm.attributes('title')).toBe('10 commits behind master, 10 commits ahead');
+ expect(vm.attributes('title')).toBe('10 commits behind main, 10 commits ahead');
});
it('renders distance count', () => {
factory({
- defaultBranch: 'master',
+ defaultBranch: 'main',
aheadCount: 0,
behindCount: 0,
distance: 900,
@@ -55,13 +55,13 @@ describe('Branch divergence graph component', () => {
${1100} | ${'999+'}
`('sets title for $distance as $titleText', ({ distance, titleText }) => {
factory({
- defaultBranch: 'master',
+ defaultBranch: 'main',
aheadCount: 0,
behindCount: 0,
distance,
maxCommits: 100,
});
- expect(vm.attributes('title')).toBe(`More than ${titleText} commits different with master`);
+ expect(vm.attributes('title')).toBe(`More than ${titleText} commits different with main`);
});
});
diff --git a/spec/frontend/branches/divergence_graph_spec.js b/spec/frontend/branches/divergence_graph_spec.js
index be97a1724d3..7c367f83add 100644
--- a/spec/frontend/branches/divergence_graph_spec.js
+++ b/spec/frontend/branches/divergence_graph_spec.js
@@ -9,14 +9,14 @@ describe('Divergence graph', () => {
mock = new MockAdapter(axios);
mock.onGet('/-/diverging_counts').reply(200, {
- master: { ahead: 1, behind: 1 },
+ main: { ahead: 1, behind: 1 },
'test/hello-world': { ahead: 1, behind: 1 },
});
jest.spyOn(axios, 'get');
document.body.innerHTML = `
- <div class="js-branch-item" data-name="master"><div class="js-branch-divergence-graph"></div></div>
+ <div class="js-branch-item" data-name="main"><div class="js-branch-divergence-graph"></div></div>
<div class="js-branch-item" data-name="test/hello-world"><div class="js-branch-divergence-graph"></div></div>
`;
});
@@ -28,7 +28,7 @@ describe('Divergence graph', () => {
it('calls axios get with list of branch names', () =>
init('/-/diverging_counts').then(() => {
expect(axios.get).toHaveBeenCalledWith('/-/diverging_counts', {
- params: { names: ['master', 'test/hello-world'] },
+ params: { names: ['main', 'test/hello-world'] },
});
}));
@@ -46,7 +46,7 @@ describe('Divergence graph', () => {
it('creates Vue components', () =>
init('/-/diverging_counts').then(() => {
- expect(document.querySelector('[data-name="master"]').innerHTML).not.toEqual('');
+ expect(document.querySelector('[data-name="main"]').innerHTML).not.toEqual('');
expect(document.querySelector('[data-name="test/hello-world"]').innerHTML).not.toEqual('');
}));
});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index 752783a306a..eb18147fcef 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -226,7 +226,7 @@ describe('Ci variable modal', () => {
};
createComponent(mount);
store.state.variable = validMaskandKeyVariable;
- store.state.maskableRegex = /^[a-zA-Z0-9_+=/@:-]{8,}$/;
+ store.state.maskableRegex = /^[a-zA-Z0-9_+=/@:.~-]{8,}$/;
});
it('does not disable the submit button', () => {
diff --git a/spec/frontend/code_navigation/components/app_spec.js b/spec/frontend/code_navigation/components/app_spec.js
index ea389fa35c0..798f3bc0ee2 100644
--- a/spec/frontend/code_navigation/components/app_spec.js
+++ b/spec/frontend/code_navigation/components/app_spec.js
@@ -16,7 +16,7 @@ function factory(initialState = {}) {
state: {
...createState(),
...initialState,
- definitionPathPrefix: 'https://test.com/blob/master',
+ definitionPathPrefix: 'https://test.com/blob/main',
},
actions: {
fetchData,
diff --git a/spec/frontend/code_navigation/store/mutations_spec.js b/spec/frontend/code_navigation/store/mutations_spec.js
index d4a75da429e..cb10729f4b6 100644
--- a/spec/frontend/code_navigation/store/mutations_spec.js
+++ b/spec/frontend/code_navigation/store/mutations_spec.js
@@ -12,11 +12,11 @@ describe('Code navigation mutations', () => {
it('sets initial data', () => {
mutations.SET_INITIAL_DATA(state, {
blobs: ['test'],
- definitionPathPrefix: 'https://test.com/blob/master',
+ definitionPathPrefix: 'https://test.com/blob/main',
});
expect(state.blobs).toEqual(['test']);
- expect(state.definitionPathPrefix).toBe('https://test.com/blob/master');
+ expect(state.definitionPathPrefix).toBe('https://test.com/blob/main');
});
});
diff --git a/spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap b/spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap
new file mode 100644
index 00000000000..f17d99ad257
--- /dev/null
+++ b/spec/frontend/code_quality_walkthrough/components/__snapshots__/step_spec.js.snap
@@ -0,0 +1,174 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component commit_ci_file step renders a popover 1`] = `
+<div>
+ <gl-popover-stub
+ container="viewport"
+ cssclasses=""
+ offset="90"
+ placement="right"
+ show=""
+ target="#js-code-quality-walkthrough"
+ triggers="manual"
+ >
+
+ <gl-sprintf-stub
+ message="To begin with code quality, we first need to create a new CI file using our code editor. We added a code quality template in the code editor to help you get started %{emojiStart}wink%{emojiEnd} .%{lineBreak}Take some time to review the template, when you are ready, use the %{strongStart}commit changes%{strongEnd} button at the bottom of the page."
+ />
+
+ <div
+ class="gl-mt-2 gl-text-right"
+ >
+ <gl-button-stub
+ buttontextclasses=""
+ category="tertiary"
+ href=""
+ icon=""
+ size="medium"
+ variant="link"
+ >
+
+ Got it
+
+ </gl-button-stub>
+ </div>
+ </gl-popover-stub>
+
+ <!---->
+</div>
+`;
+
+exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component failed_pipeline step renders a popover 1`] = `
+<div>
+ <gl-popover-stub
+ container="viewport"
+ cssclasses=""
+ offset="98"
+ placement="bottom"
+ show=""
+ target="#js-code-quality-walkthrough"
+ triggers="manual"
+ >
+
+ <gl-sprintf-stub
+ message="Your job failed. No worries - this happens. Let's view the logs, and see how we can fix it."
+ />
+
+ <div
+ class="gl-mt-2 gl-text-right"
+ >
+ <gl-button-stub
+ buttontextclasses=""
+ category="tertiary"
+ href="/group/project/-/jobs/:id?code_quality_walkthrough=true"
+ icon=""
+ size="medium"
+ variant="link"
+ >
+
+ View the logs
+
+ </gl-button-stub>
+ </div>
+ </gl-popover-stub>
+
+ <!---->
+</div>
+`;
+
+exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component running_pipeline step renders a popover 1`] = `
+<div>
+ <gl-popover-stub
+ container="viewport"
+ cssclasses=""
+ offset="97"
+ placement="bottom"
+ show=""
+ target="#js-code-quality-walkthrough"
+ triggers="manual"
+ >
+
+ <gl-sprintf-stub
+ message="Your pipeline can take a few minutes to run. If you enabled email notifications, you'll receive an email with your pipeline status. In the meantime, why don't you get some coffee? You earned it!"
+ />
+
+ <div
+ class="gl-mt-2 gl-text-right"
+ >
+ <gl-button-stub
+ buttontextclasses=""
+ category="tertiary"
+ href=""
+ icon=""
+ size="medium"
+ variant="link"
+ >
+
+ Got it
+
+ </gl-button-stub>
+ </div>
+ </gl-popover-stub>
+
+ <!---->
+</div>
+`;
+
+exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component success_pipeline step renders a popover 1`] = `
+<div>
+ <gl-popover-stub
+ container="viewport"
+ cssclasses=""
+ offset="98"
+ placement="bottom"
+ show=""
+ target="#js-code-quality-walkthrough"
+ triggers="manual"
+ >
+
+ <gl-sprintf-stub
+ message="A code quality job will now run every time you or your team members commit changes to your project. You can view the results of the code quality job in the job logs."
+ />
+
+ <div
+ class="gl-mt-2 gl-text-right"
+ >
+ <gl-button-stub
+ buttontextclasses=""
+ category="tertiary"
+ href="/group/project/-/jobs/:id?code_quality_walkthrough=true"
+ icon=""
+ size="medium"
+ variant="link"
+ >
+
+ View the logs
+
+ </gl-button-stub>
+ </div>
+ </gl-popover-stub>
+
+ <!---->
+</div>
+`;
+
+exports[`When the code_quality_walkthrough URL parameter is present Code Quality Walkthrough Step component troubleshoot_job step renders an alert 1`] = `
+<div>
+ <!---->
+
+ <gl-alert-stub
+ class="gl-my-5"
+ dismissible="true"
+ dismisslabel="Dismiss"
+ primarybuttontext="Read the documentation"
+ secondarybuttonlink=""
+ secondarybuttontext=""
+ title="Troubleshoot your code quality job"
+ variant="tip"
+ >
+
+ Not sure how to fix your failed job? We have compiled some tips on how to troubleshoot code quality jobs in the documentation.
+
+ </gl-alert-stub>
+</div>
+`;
diff --git a/spec/frontend/code_quality_walkthrough/components/step_spec.js b/spec/frontend/code_quality_walkthrough/components/step_spec.js
new file mode 100644
index 00000000000..c397faf1f35
--- /dev/null
+++ b/spec/frontend/code_quality_walkthrough/components/step_spec.js
@@ -0,0 +1,156 @@
+import { GlButton, GlPopover } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Cookies from 'js-cookie';
+import Step from '~/code_quality_walkthrough/components/step.vue';
+import { EXPERIMENT_NAME, STEPS } from '~/code_quality_walkthrough/constants';
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import { getParameterByName } from '~/lib/utils/common_utils';
+import Tracking from '~/tracking';
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ ...jest.requireActual('~/lib/utils/common_utils'),
+ getParameterByName: jest.fn(),
+}));
+
+let wrapper;
+
+function factory({ step, link }) {
+ wrapper = shallowMount(Step, {
+ propsData: { step, link },
+ });
+}
+
+afterEach(() => {
+ wrapper.destroy();
+});
+
+const dummyLink = '/group/project/-/jobs/:id?code_quality_walkthrough=true';
+const dummyContext = 'experiment_context';
+
+const findButton = () => wrapper.findComponent(GlButton);
+const findPopover = () => wrapper.findComponent(GlPopover);
+
+describe('When the code_quality_walkthrough URL parameter is missing', () => {
+ beforeEach(() => {
+ getParameterByName.mockReturnValue(false);
+ });
+
+ it('does not render the component', () => {
+ factory({
+ step: STEPS.commitCiFile,
+ });
+
+ expect(findPopover().exists()).toBe(false);
+ });
+});
+
+describe('When the code_quality_walkthrough URL parameter is present', () => {
+ beforeEach(() => {
+ getParameterByName.mockReturnValue(true);
+ Cookies.set(EXPERIMENT_NAME, { data: dummyContext });
+ });
+
+ afterEach(() => {
+ Cookies.remove(EXPERIMENT_NAME);
+ });
+
+ describe('When mounting the component', () => {
+ beforeEach(() => {
+ jest.spyOn(Tracking, 'event');
+
+ factory({
+ step: STEPS.commitCiFile,
+ });
+ });
+
+ it('tracks an event', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ EXPERIMENT_NAME,
+ `${STEPS.commitCiFile}_displayed`,
+ {
+ context: {
+ schema: TRACKING_CONTEXT_SCHEMA,
+ data: dummyContext,
+ },
+ },
+ );
+ });
+ });
+
+ describe('When updating the component', () => {
+ beforeEach(() => {
+ factory({
+ step: STEPS.runningPipeline,
+ });
+
+ jest.spyOn(Tracking, 'event');
+
+ wrapper.setProps({ step: STEPS.successPipeline });
+ });
+
+ it('tracks an event', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ EXPERIMENT_NAME,
+ `${STEPS.successPipeline}_displayed`,
+ {
+ context: {
+ schema: TRACKING_CONTEXT_SCHEMA,
+ data: dummyContext,
+ },
+ },
+ );
+ });
+ });
+
+ describe('When dismissing a popover', () => {
+ beforeEach(() => {
+ factory({
+ step: STEPS.commitCiFile,
+ });
+
+ jest.spyOn(Cookies, 'set');
+ jest.spyOn(Tracking, 'event');
+
+ findButton().vm.$emit('click');
+ });
+
+ it('sets a cookie', () => {
+ expect(Cookies.set).toHaveBeenCalledWith(
+ EXPERIMENT_NAME,
+ { commit_ci_file: true, data: dummyContext },
+ { expires: 365 },
+ );
+ });
+
+ it('removes the popover', () => {
+ expect(findPopover().exists()).toBe(false);
+ });
+
+ it('tracks an event', () => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ EXPERIMENT_NAME,
+ `${STEPS.commitCiFile}_dismissed`,
+ {
+ context: {
+ schema: TRACKING_CONTEXT_SCHEMA,
+ data: dummyContext,
+ },
+ },
+ );
+ });
+ });
+
+ describe('Code Quality Walkthrough Step component', () => {
+ describe.each(Object.values(STEPS))('%s step', (step) => {
+ it(`renders ${step === STEPS.troubleshootJob ? 'an alert' : 'a popover'}`, () => {
+ const options = { step };
+ if ([STEPS.successPipeline, STEPS.failedPipeline].includes(step)) {
+ options.link = dummyLink;
+ }
+ factory(options);
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/commit/pipelines/pipelines_spec.js b/spec/frontend/commit/pipelines/pipelines_spec.js
index bbe02daa24b..fe928a01acf 100644
--- a/spec/frontend/commit/pipelines/pipelines_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_spec.js
@@ -1,3 +1,4 @@
+import '~/commons';
import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
diff --git a/spec/frontend/commits_spec.js b/spec/frontend/commits_spec.js
index 954025091cf..8189ebe6e55 100644
--- a/spec/frontend/commits_spec.js
+++ b/spec/frontend/commits_spec.js
@@ -10,7 +10,7 @@ describe('Commits List', () => {
beforeEach(() => {
setFixtures(`
- <form class="commits-search-form" action="/h5bp/html5-boilerplate/commits/master">
+ <form class="commits-search-form" action="/h5bp/html5-boilerplate/commits/main">
<input id="commits-search">
</form>
<ol id="commits-list"></ol>
@@ -59,7 +59,7 @@ describe('Commits List', () => {
jest.spyOn(window.history, 'replaceState').mockImplementation(() => {});
mock = new MockAdapter(axios);
- mock.onGet('/h5bp/html5-boilerplate/commits/master').reply(200, {
+ mock.onGet('/h5bp/html5-boilerplate/commits/main').reply(200, {
html: '<li>Result</li>',
});
diff --git a/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
new file mode 100644
index 00000000000..35c02911e27
--- /dev/null
+++ b/spec/frontend/content_editor/components/__snapshots__/toolbar_button_spec.js.snap
@@ -0,0 +1,9 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`content_editor/components/toolbar_button displays tertiary, small button with a provided label and icon 1`] = `
+"<b-button-stub size=\\"sm\\" variant=\\"default\\" type=\\"button\\" tag=\\"button\\" aria-label=\\"Bold\\" title=\\"Bold\\" class=\\"gl-mx-2 gl-button btn-default-tertiary btn-icon\\">
+ <!---->
+ <gl-icon-stub name=\\"bold\\" size=\\"16\\" class=\\"gl-button-icon\\"></gl-icon-stub>
+ <!---->
+</b-button-stub>"
+`;
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index f055a49135b..e3741032bf4 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -1,26 +1,59 @@
+import { EditorContent } from '@tiptap/vue-2';
import { shallowMount } from '@vue/test-utils';
-import { EditorContent } from 'tiptap';
import ContentEditor from '~/content_editor/components/content_editor.vue';
-import createEditor from '~/content_editor/services/create_editor';
-
-jest.mock('~/content_editor/services/create_editor');
+import TopToolbar from '~/content_editor/components/top_toolbar.vue';
+import { createContentEditor } from '~/content_editor/services/create_content_editor';
describe('ContentEditor', () => {
let wrapper;
+ let editor;
- const buildWrapper = () => {
- wrapper = shallowMount(ContentEditor);
+ const createWrapper = async (contentEditor) => {
+ wrapper = shallowMount(ContentEditor, {
+ propsData: {
+ contentEditor,
+ },
+ });
};
+ beforeEach(() => {
+ editor = createContentEditor({ renderMarkdown: () => true });
+ });
+
afterEach(() => {
wrapper.destroy();
});
it('renders editor content component and attaches editor instance', () => {
- const editor = {};
+ createWrapper(editor);
+
+ expect(wrapper.findComponent(EditorContent).props().editor).toBe(editor.tiptapEditor);
+ });
+
+ it('renders top toolbar component and attaches editor instance', () => {
+ createWrapper(editor);
+
+ expect(wrapper.findComponent(TopToolbar).props().contentEditor).toBe(editor);
+ });
+
+ it.each`
+ isFocused | classes
+ ${true} | ${['md', 'md-area', 'is-focused']}
+ ${false} | ${['md', 'md-area']}
+ `(
+ 'has $classes class selectors when tiptapEditor.isFocused = $isFocused',
+ ({ isFocused, classes }) => {
+ editor.tiptapEditor.isFocused = isFocused;
+ createWrapper(editor);
+
+ expect(wrapper.classes()).toStrictEqual(classes);
+ },
+ );
+
+ it('adds isFocused class when tiptapEditor is focused', () => {
+ editor.tiptapEditor.isFocused = true;
+ createWrapper(editor);
- createEditor.mockReturnValueOnce(editor);
- buildWrapper();
- expect(wrapper.findComponent(EditorContent).props().editor).toBe(editor);
+ expect(wrapper.classes()).toContain('is-focused');
});
});
diff --git a/spec/frontend/content_editor/components/toolbar_button_spec.js b/spec/frontend/content_editor/components/toolbar_button_spec.js
new file mode 100644
index 00000000000..a49efa34017
--- /dev/null
+++ b/spec/frontend/content_editor/components/toolbar_button_spec.js
@@ -0,0 +1,98 @@
+import { GlButton } from '@gitlab/ui';
+import { Extension } from '@tiptap/core';
+import { shallowMount } from '@vue/test-utils';
+import ToolbarButton from '~/content_editor/components/toolbar_button.vue';
+import { createContentEditor } from '~/content_editor/services/create_content_editor';
+
+describe('content_editor/components/toolbar_button', () => {
+ let wrapper;
+ let tiptapEditor;
+ let toggleFooSpy;
+ const CONTENT_TYPE = 'bold';
+ const ICON_NAME = 'bold';
+ const LABEL = 'Bold';
+
+ const buildEditor = () => {
+ toggleFooSpy = jest.fn();
+ tiptapEditor = createContentEditor({
+ extensions: [
+ {
+ tiptapExtension: Extension.create({
+ addCommands() {
+ return {
+ toggleFoo: () => toggleFooSpy,
+ };
+ },
+ }),
+ },
+ ],
+ renderMarkdown: () => true,
+ }).tiptapEditor;
+
+ jest.spyOn(tiptapEditor, 'isActive');
+ };
+
+ const buildWrapper = (propsData = {}) => {
+ wrapper = shallowMount(ToolbarButton, {
+ stubs: {
+ GlButton,
+ },
+ propsData: {
+ tiptapEditor,
+ contentType: CONTENT_TYPE,
+ iconName: ICON_NAME,
+ label: LABEL,
+ ...propsData,
+ },
+ });
+ };
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ beforeEach(() => {
+ buildEditor();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays tertiary, small button with a provided label and icon', () => {
+ buildWrapper();
+
+ expect(findButton().html()).toMatchSnapshot();
+ });
+
+ it.each`
+ editorState | outcomeDescription | outcome
+ ${{ isActive: true, isFocused: true }} | ${'button is active'} | ${true}
+ ${{ isActive: false, isFocused: true }} | ${'button is not active'} | ${false}
+ ${{ isActive: true, isFocused: false }} | ${'button is not active '} | ${false}
+ `('$outcomeDescription when when editor state is $editorState', ({ editorState, outcome }) => {
+ tiptapEditor.isActive.mockReturnValueOnce(editorState.isActive);
+ tiptapEditor.isFocused = editorState.isFocused;
+ buildWrapper();
+
+ expect(findButton().classes().includes('active')).toBe(outcome);
+ expect(tiptapEditor.isActive).toHaveBeenCalledWith(CONTENT_TYPE);
+ });
+
+ describe('when button is clicked', () => {
+ it('executes the content type command when executeCommand = true', async () => {
+ buildWrapper({ editorCommand: 'toggleFoo' });
+
+ await findButton().trigger('click');
+
+ expect(toggleFooSpy).toHaveBeenCalled();
+ expect(wrapper.emitted().execute).toHaveLength(1);
+ });
+
+ it('does not executes the content type command when executeCommand = false', async () => {
+ buildWrapper();
+
+ await findButton().trigger('click');
+
+ expect(toggleFooSpy).not.toHaveBeenCalled();
+ expect(wrapper.emitted().execute).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
new file mode 100644
index 00000000000..8f47be3f489
--- /dev/null
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -0,0 +1,76 @@
+import { shallowMount } from '@vue/test-utils';
+import { mockTracking } from 'helpers/tracking_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import TopToolbar from '~/content_editor/components/top_toolbar.vue';
+import {
+ TOOLBAR_CONTROL_TRACKING_ACTION,
+ CONTENT_EDITOR_TRACKING_LABEL,
+} from '~/content_editor/constants';
+import { createContentEditor } from '~/content_editor/services/create_content_editor';
+
+describe('content_editor/components/top_toolbar', () => {
+ let wrapper;
+ let contentEditor;
+ let trackingSpy;
+ const buildEditor = () => {
+ contentEditor = createContentEditor({ renderMarkdown: () => true });
+ };
+
+ const buildWrapper = () => {
+ wrapper = extendedWrapper(
+ shallowMount(TopToolbar, {
+ propsData: {
+ contentEditor,
+ },
+ }),
+ );
+ };
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, null, jest.spyOn);
+ });
+
+ beforeEach(() => {
+ buildEditor();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each`
+ testId | buttonProps
+ ${'bold'} | ${{ contentType: 'bold', iconName: 'bold', label: 'Bold text', editorCommand: 'toggleBold' }}
+ ${'italic'} | ${{ contentType: 'italic', iconName: 'italic', label: 'Italic text', editorCommand: 'toggleItalic' }}
+ ${'code'} | ${{ contentType: 'code', iconName: 'code', label: 'Code', editorCommand: 'toggleCode' }}
+ ${'blockquote'} | ${{ contentType: 'blockquote', iconName: 'quote', label: 'Insert a quote', editorCommand: 'toggleBlockquote' }}
+ ${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
+ ${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
+ `('given a $testId toolbar control', ({ testId, buttonProps }) => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
+ it('renders the toolbar control with the provided properties', () => {
+ expect(wrapper.findByTestId(testId).props()).toEqual({
+ ...buttonProps,
+ tiptapEditor: contentEditor.tiptapEditor,
+ });
+ });
+
+ it.each`
+ control | eventData
+ ${'bold'} | ${{ contentType: 'bold' }}
+ ${'blockquote'} | ${{ contentType: 'blockquote', value: 1 }}
+ `('tracks the execution of toolbar controls', ({ control, eventData }) => {
+ const { contentType, value } = eventData;
+ wrapper.findByTestId(control).vm.$emit('execute', eventData);
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, TOOLBAR_CONTROL_TRACKING_ACTION, {
+ label: CONTENT_EDITOR_TRACKING_LABEL,
+ property: contentType,
+ value,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_spec.js b/spec/frontend/content_editor/markdown_processing_spec.js
index e435af30e9f..cb34476d680 100644
--- a/spec/frontend/content_editor/markdown_processing_spec.js
+++ b/spec/frontend/content_editor/markdown_processing_spec.js
@@ -1,12 +1,13 @@
-import { createEditor } from '~/content_editor';
+import { createContentEditor } from '~/content_editor';
import { loadMarkdownApiExamples, loadMarkdownApiResult } from './markdown_processing_examples';
describe('markdown processing', () => {
// Ensure we generate same markdown that was provided to Markdown API.
it.each(loadMarkdownApiExamples())('correctly handles %s', async (testName, markdown) => {
const { html } = loadMarkdownApiResult(testName);
- const editor = await createEditor({ content: markdown, renderMarkdown: () => html });
+ const contentEditor = createContentEditor({ renderMarkdown: () => html });
+ await contentEditor.setSerializedContent(markdown);
- expect(editor.getSerializedContent()).toBe(markdown);
+ expect(contentEditor.getSerializedContent()).toBe(markdown);
});
});
diff --git a/spec/frontend/content_editor/services/build_serializer_config_spec.js b/spec/frontend/content_editor/services/build_serializer_config_spec.js
new file mode 100644
index 00000000000..532e0493830
--- /dev/null
+++ b/spec/frontend/content_editor/services/build_serializer_config_spec.js
@@ -0,0 +1,38 @@
+import * as Blockquote from '~/content_editor/extensions/blockquote';
+import * as Bold from '~/content_editor/extensions/bold';
+import * as Dropcursor from '~/content_editor/extensions/dropcursor';
+import * as Paragraph from '~/content_editor/extensions/paragraph';
+
+import buildSerializerConfig from '~/content_editor/services/build_serializer_config';
+
+describe('content_editor/services/build_serializer_config', () => {
+ describe('given one or more content editor extensions', () => {
+ it('creates a serializer config that collects all extension serializers by type', () => {
+ const extensions = [Bold, Blockquote, Paragraph];
+ const serializerConfig = buildSerializerConfig(extensions);
+
+ extensions.forEach(({ tiptapExtension, serializer }) => {
+ const { name, type } = tiptapExtension;
+ expect(serializerConfig[`${type}s`][name]).toBe(serializer);
+ });
+ });
+ });
+
+ describe('given an extension without serializer', () => {
+ it('does not include the extension in the serializer config', () => {
+ const serializerConfig = buildSerializerConfig([Dropcursor]);
+
+ expect(serializerConfig.marks[Dropcursor.tiptapExtension.name]).toBe(undefined);
+ expect(serializerConfig.nodes[Dropcursor.tiptapExtension.name]).toBe(undefined);
+ });
+ });
+
+ describe('given no extensions', () => {
+ it('creates an empty serializer config', () => {
+ expect(buildSerializerConfig()).toStrictEqual({
+ marks: {},
+ nodes: {},
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/services/create_content_editor_spec.js b/spec/frontend/content_editor/services/create_content_editor_spec.js
new file mode 100644
index 00000000000..59b2fab6d54
--- /dev/null
+++ b/spec/frontend/content_editor/services/create_content_editor_spec.js
@@ -0,0 +1,51 @@
+import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '~/content_editor/constants';
+import { createContentEditor } from '~/content_editor/services/create_content_editor';
+import { createTestContentEditorExtension } from '../test_utils';
+
+describe('content_editor/services/create_editor', () => {
+ let renderMarkdown;
+ let editor;
+
+ beforeEach(() => {
+ renderMarkdown = jest.fn();
+ editor = createContentEditor({ renderMarkdown });
+ });
+
+ it('sets gl-outline-0! class selector to the tiptapEditor instance', () => {
+ expect(editor.tiptapEditor.options.editorProps).toMatchObject({
+ attributes: {
+ class: 'gl-outline-0!',
+ },
+ });
+ });
+
+ it('provides the renderMarkdown function to the markdown serializer', async () => {
+ const serializedContent = '**bold text**';
+
+ renderMarkdown.mockReturnValueOnce('<p><b>bold text</b></p>');
+
+ await editor.setSerializedContent(serializedContent);
+
+ expect(renderMarkdown).toHaveBeenCalledWith(serializedContent);
+ });
+
+ it('allows providing external content editor extensions', async () => {
+ const labelReference = 'this is a ~group::editor';
+
+ renderMarkdown.mockReturnValueOnce(
+ '<p>this is a <span data-reference="label" data-label-name="group::editor">group::editor</span></p>',
+ );
+ editor = createContentEditor({
+ renderMarkdown,
+ extensions: [createTestContentEditorExtension()],
+ });
+
+ await editor.setSerializedContent(labelReference);
+
+ expect(editor.getSerializedContent()).toBe(labelReference);
+ });
+
+ it('throws an error when a renderMarkdown fn is not provided', () => {
+ expect(() => createContentEditor()).toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
+ });
+});
diff --git a/spec/frontend/content_editor/services/create_editor_spec.js b/spec/frontend/content_editor/services/create_editor_spec.js
deleted file mode 100644
index 4cf63e608eb..00000000000
--- a/spec/frontend/content_editor/services/create_editor_spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '~/content_editor/constants';
-import createEditor from '~/content_editor/services/create_editor';
-import createMarkdownSerializer from '~/content_editor/services/markdown_serializer';
-
-jest.mock('~/content_editor/services/markdown_serializer');
-
-describe('content_editor/services/create_editor', () => {
- const buildMockSerializer = () => ({
- serialize: jest.fn(),
- deserialize: jest.fn(),
- });
-
- describe('creating an editor', () => {
- it('uses markdown serializer when a renderMarkdown function is provided', async () => {
- const renderMarkdown = () => true;
- const mockSerializer = buildMockSerializer();
- createMarkdownSerializer.mockReturnValueOnce(mockSerializer);
-
- await createEditor({ renderMarkdown });
-
- expect(createMarkdownSerializer).toHaveBeenCalledWith({ render: renderMarkdown });
- });
-
- it('uses custom serializer when it is provided', async () => {
- const mockSerializer = buildMockSerializer();
- const serializedContent = '**bold**';
-
- mockSerializer.serialize.mockReturnValueOnce(serializedContent);
-
- const editor = await createEditor({ serializer: mockSerializer });
-
- expect(editor.getSerializedContent()).toBe(serializedContent);
- });
-
- it('throws an error when neither a serializer or renderMarkdown fn are provided', async () => {
- await expect(createEditor()).rejects.toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
- });
- });
-});
diff --git a/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
new file mode 100644
index 00000000000..437714ba938
--- /dev/null
+++ b/spec/frontend/content_editor/services/track_input_rules_and_shortcuts_spec.js
@@ -0,0 +1,108 @@
+import { BulletList } from '@tiptap/extension-bullet-list';
+import { CodeBlockLowlight } from '@tiptap/extension-code-block-lowlight';
+import { Document } from '@tiptap/extension-document';
+import { Heading } from '@tiptap/extension-heading';
+import { ListItem } from '@tiptap/extension-list-item';
+import { Paragraph } from '@tiptap/extension-paragraph';
+import { Text } from '@tiptap/extension-text';
+import { Editor, EditorContent } from '@tiptap/vue-2';
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { mockTracking } from 'helpers/tracking_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import {
+ KEYBOARD_SHORTCUT_TRACKING_ACTION,
+ INPUT_RULE_TRACKING_ACTION,
+ CONTENT_EDITOR_TRACKING_LABEL,
+} from '~/content_editor/constants';
+import trackInputRulesAndShortcuts from '~/content_editor/services/track_input_rules_and_shortcuts';
+import { ENTER_KEY, BACKSPACE_KEY } from '~/lib/utils/keys';
+
+describe('content_editor/services/track_input_rules_and_shortcuts', () => {
+ let wrapper;
+ let trackingSpy;
+ let editor;
+ const HEADING_TEXT = 'Heading text';
+
+ const buildWrapper = () => {
+ wrapper = extendedWrapper(
+ mount(EditorContent, {
+ propsData: {
+ editor,
+ },
+ }),
+ );
+ };
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, null, jest.spyOn);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('given the heading extension is instrumented', () => {
+ beforeEach(() => {
+ editor = new Editor({
+ extensions: [
+ Document,
+ Paragraph,
+ Text,
+ Heading,
+ CodeBlockLowlight,
+ BulletList,
+ ListItem,
+ ].map(trackInputRulesAndShortcuts),
+ });
+ });
+
+ beforeEach(async () => {
+ buildWrapper();
+ await nextTick();
+ });
+
+ describe('when creating a heading using an keyboard shortcut', () => {
+ it('sends a tracking event indicating that a heading was created using an input rule', async () => {
+ const shortcuts = Heading.config.addKeyboardShortcuts.call(Heading);
+ const [firstShortcut] = Object.keys(shortcuts);
+ const nodeName = Heading.name;
+
+ editor.chain().keyboardShortcut(firstShortcut).insertContent(HEADING_TEXT).run();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, KEYBOARD_SHORTCUT_TRACKING_ACTION, {
+ label: CONTENT_EDITOR_TRACKING_LABEL,
+ property: `${nodeName}.${firstShortcut}`,
+ });
+ });
+ });
+
+ it.each`
+ extension | shortcut
+ ${ListItem.name} | ${ENTER_KEY}
+ ${CodeBlockLowlight.name} | ${BACKSPACE_KEY}
+ `('does not track $shortcut shortcut for $extension extension', ({ shortcut }) => {
+ editor.chain().keyboardShortcut(shortcut).run();
+
+ expect(trackingSpy).not.toHaveBeenCalled();
+ });
+
+ describe('when creating a heading using an input rule', () => {
+ it('sends a tracking event indicating that a heading was created using an input rule', async () => {
+ const nodeName = Heading.name;
+ const { view } = editor;
+ const { selection } = view.state;
+
+ // Triggers the event handler that input rules listen to
+ view.someProp('handleTextInput', (f) => f(view, selection.from, selection.to, '## '));
+
+ editor.chain().insertContent(HEADING_TEXT).run();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, INPUT_RULE_TRACKING_ACTION, {
+ label: CONTENT_EDITOR_TRACKING_LABEL,
+ property: `${nodeName}`,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/content_editor/test_utils.js b/spec/frontend/content_editor/test_utils.js
new file mode 100644
index 00000000000..a92ceb6d058
--- /dev/null
+++ b/spec/frontend/content_editor/test_utils.js
@@ -0,0 +1,34 @@
+import { Node } from '@tiptap/core';
+
+export const createTestContentEditorExtension = () => ({
+ tiptapExtension: Node.create({
+ name: 'label',
+ priority: 101,
+ inline: true,
+ group: 'inline',
+ addAttributes() {
+ return {
+ labelName: {
+ default: null,
+ parseHTML: (element) => {
+ return { labelName: element.dataset.labelName };
+ },
+ },
+ };
+ },
+ parseHTML() {
+ return [
+ {
+ tag: 'span[data-reference="label"]',
+ },
+ ];
+ },
+ renderHTML({ HTMLAttributes }) {
+ return ['span', HTMLAttributes, 0];
+ },
+ }),
+ serializer: (state, node) => {
+ state.write(`~${node.attrs.labelName}`);
+ state.closeBlock(node);
+ },
+});
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
index 15b052fffbb..3f812d3cf4e 100644
--- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -8,7 +8,7 @@ exports[`Contributors charts should render charts when loading completed and the
<h4
class="gl-mb-2 gl-mt-5"
>
- Commits to master
+ Commits to main
</h4>
<span>
diff --git a/spec/frontend/contributors/component/contributors_spec.js b/spec/frontend/contributors/component/contributors_spec.js
index de55be4aa72..cb7e13b9fed 100644
--- a/spec/frontend/contributors/component/contributors_spec.js
+++ b/spec/frontend/contributors/component/contributors_spec.js
@@ -10,7 +10,7 @@ let mock;
let store;
const Component = Vue.extend(ContributorsCharts);
const endpoint = 'contributors';
-const branch = 'master';
+const branch = 'main';
const chartData = [
{ author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-05-05' },
{ author_name: 'John', author_email: 'jawnnypoo@gmail.com', date: '2019-03-03' },
diff --git a/spec/frontend/create_merge_request_dropdown_spec.js b/spec/frontend/create_merge_request_dropdown_spec.js
index b4c13981dd5..8878891701f 100644
--- a/spec/frontend/create_merge_request_dropdown_spec.js
+++ b/spec/frontend/create_merge_request_dropdown_spec.js
@@ -15,7 +15,7 @@ describe('CreateMergeRequestDropdown', () => {
<div id="dummy-wrapper-element">
<div class="available"></div>
<div class="unavailable">
- <div class="spinner"></div>
+ <div class="gl-spinner"></div>
<div class="text"></div>
</div>
<div class="js-ref"></div>
@@ -56,7 +56,7 @@ describe('CreateMergeRequestDropdown', () => {
describe('updateCreatePaths', () => {
it('escapes branch names correctly', () => {
dropdown.createBranchPath = `${TEST_HOST}/branches?branch_name=some-branch&issue=42`;
- dropdown.createMrPath = `${TEST_HOST}/create_merge_request?branch_name=some-branch&ref=master`;
+ dropdown.createMrPath = `${TEST_HOST}/create_merge_request?branch_name=some-branch&ref=main`;
dropdown.updateCreatePaths('branch', 'contains#hash');
@@ -65,7 +65,7 @@ describe('CreateMergeRequestDropdown', () => {
);
expect(dropdown.createMrPath).toBe(
- `${TEST_HOST}/create_merge_request?branch_name=contains%23hash&ref=master`,
+ `${TEST_HOST}/create_merge_request?branch_name=contains%23hash&ref=main`,
);
});
});
diff --git a/spec/frontend/cycle_analytics/mock_data.js b/spec/frontend/cycle_analytics/mock_data.js
new file mode 100644
index 00000000000..091b574821d
--- /dev/null
+++ b/spec/frontend/cycle_analytics/mock_data.js
@@ -0,0 +1,186 @@
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+
+export const summary = [
+ { value: '20', title: 'New Issues' },
+ { value: null, title: 'Commits' },
+ { value: null, title: 'Deploys' },
+ { value: null, title: 'Deployment Frequency', unit: 'per day' },
+];
+
+const issueStage = {
+ title: 'Issue',
+ name: 'issue',
+ legend: '',
+ description: 'Time before an issue gets scheduled',
+ value: null,
+};
+
+const planStage = {
+ title: 'Plan',
+ name: 'plan',
+ legend: '',
+ description: 'Time before an issue starts implementation',
+ value: 'about 21 hours',
+};
+
+const codeStage = {
+ title: 'Code',
+ name: 'code',
+ legend: '',
+ description: 'Time until first merge request',
+ value: '2 days',
+};
+
+const testStage = {
+ title: 'Test',
+ name: 'test',
+ legend: '',
+ description: 'Total test time for all commits/merges',
+ value: 'about 5 hours',
+};
+
+const reviewStage = {
+ title: 'Review',
+ name: 'review',
+ legend: '',
+ description: 'Time between merge request creation and merge/close',
+ value: null,
+};
+
+const stagingStage = {
+ title: 'Staging',
+ name: 'staging',
+ legend: '',
+ description: 'From merge request merge until deploy to production',
+ value: '2 days',
+};
+
+export const selectedStage = {
+ ...issueStage,
+ value: null,
+ active: false,
+ isUserAllowed: true,
+ emptyStageText:
+ 'The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.',
+ component: 'stage-issue-component',
+ slug: 'issue',
+};
+
+export const stats = [issueStage, planStage, codeStage, testStage, reviewStage, stagingStage];
+
+export const permissions = {
+ issue: true,
+ plan: true,
+ code: true,
+ test: true,
+ review: true,
+ staging: true,
+};
+
+export const rawData = {
+ summary,
+ stats,
+ permissions,
+};
+
+export const convertedData = {
+ stages: [
+ selectedStage,
+ {
+ ...planStage,
+ active: false,
+ isUserAllowed: true,
+ emptyStageText:
+ 'The planning stage shows the time from the previous step to pushing your first commit. This time will be added automatically once you push your first commit.',
+ component: 'stage-plan-component',
+ slug: 'plan',
+ },
+ {
+ ...codeStage,
+ active: false,
+ isUserAllowed: true,
+ emptyStageText:
+ 'The coding stage shows the time from the first commit to creating the merge request. The data will automatically be added here once you create your first merge request.',
+ component: 'stage-code-component',
+ slug: 'code',
+ },
+ {
+ ...testStage,
+ active: false,
+ isUserAllowed: true,
+ emptyStageText:
+ 'The testing stage shows the time GitLab CI takes to run every pipeline for the related merge request. The data will automatically be added after your first pipeline finishes running.',
+ component: 'stage-test-component',
+ slug: 'test',
+ },
+ {
+ ...reviewStage,
+ active: false,
+ isUserAllowed: true,
+ emptyStageText:
+ 'The review stage shows the time from creating the merge request to merging it. The data will automatically be added after you merge your first merge request.',
+ component: 'stage-review-component',
+ slug: 'review',
+ },
+ {
+ ...stagingStage,
+ active: false,
+ isUserAllowed: true,
+ emptyStageText:
+ 'The staging stage shows the time between merging the MR and deploying code to the production environment. The data will be automatically added once you deploy to production for the first time.',
+ component: 'stage-staging-component',
+ slug: 'staging',
+ },
+ ],
+ summary: [
+ { value: '20', title: 'New Issues' },
+ { value: '-', title: 'Commits' },
+ { value: '-', title: 'Deploys' },
+ { value: '-', title: 'Deployment Frequency', unit: 'per day' },
+ ],
+};
+
+export const rawEvents = [
+ {
+ title: 'Brockfunc-1617160796',
+ author: {
+ id: 275,
+ name: 'VSM User4',
+ username: 'vsm-user-4-1617160796',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/6a6f5480ae582ba68982a34169420747?s=80&d=identicon',
+ web_url: 'http://gdk.test:3001/vsm-user-4-1617160796',
+ show_status: false,
+ path: '/vsm-user-4-1617160796',
+ },
+ iid: '16',
+ total_time: { days: 1, hours: 9 },
+ created_at: 'about 1 month ago',
+ url: 'http://gdk.test:3001/vsa-life/ror-project-vsa/-/issues/16',
+ short_sha: 'some_sha',
+ commit_url: 'some_commit_url',
+ },
+ {
+ title: 'Subpod-1617160796',
+ author: {
+ id: 274,
+ name: 'VSM User3',
+ username: 'vsm-user-3-1617160796',
+ state: 'active',
+ avatar_url:
+ 'https://www.gravatar.com/avatar/fde853fc3ab7dc552e649dcb4fcf5f7f?s=80&d=identicon',
+ web_url: 'http://gdk.test:3001/vsm-user-3-1617160796',
+ show_status: false,
+ path: '/vsm-user-3-1617160796',
+ },
+ iid: '20',
+ total_time: { days: 2, hours: 18 },
+ created_at: 'about 1 month ago',
+ url: 'http://gdk.test:3001/vsa-life/ror-project-vsa/-/issues/20',
+ },
+];
+
+export const convertedEvents = rawEvents.map((ev) =>
+ convertObjectPropsToCamelCase(ev, { deep: true }),
+);
diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js
new file mode 100644
index 00000000000..630c5100754
--- /dev/null
+++ b/spec/frontend/cycle_analytics/store/actions_spec.js
@@ -0,0 +1,130 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import * as actions from '~/cycle_analytics/store/actions';
+import httpStatusCodes from '~/lib/utils/http_status';
+import { selectedStage } from '../mock_data';
+
+const mockRequestPath = 'some/cool/path';
+const mockStartDate = 30;
+
+describe('Project Value Stream Analytics actions', () => {
+ let state;
+ let mock;
+
+ beforeEach(() => {
+ state = {};
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ state = {};
+ });
+
+ it.each`
+ action | type | payload | expectedActions
+ ${'initializeVsa'} | ${'INITIALIZE_VSA'} | ${{ requestPath: mockRequestPath }} | ${['fetchCycleAnalyticsData']}
+ ${'setDateRange'} | ${'SET_DATE_RANGE'} | ${{ startDate: 30 }} | ${[]}
+ ${'setSelectedStage'} | ${'SET_SELECTED_STAGE'} | ${{ selectedStage }} | ${[]}
+ `(
+ '$action should dispatch $expectedActions and commit $type',
+ ({ action, type, payload, expectedActions }) =>
+ testAction({
+ action: actions[action],
+ state,
+ payload,
+ expectedMutations: [
+ {
+ type,
+ payload,
+ },
+ ],
+ expectedActions: expectedActions.map((a) => ({ type: a })),
+ }),
+ );
+
+ describe('fetchCycleAnalyticsData', () => {
+ beforeEach(() => {
+ state = { requestPath: mockRequestPath };
+ mock = new MockAdapter(axios);
+ mock.onGet(mockRequestPath).reply(httpStatusCodes.OK);
+ });
+
+ it(`dispatches the 'setSelectedStage' and 'fetchStageData' actions`, () =>
+ testAction({
+ action: actions.fetchCycleAnalyticsData,
+ state,
+ payload: {},
+ expectedMutations: [
+ { type: 'REQUEST_CYCLE_ANALYTICS_DATA' },
+ { type: 'RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS' },
+ ],
+ expectedActions: [{ type: 'setSelectedStage' }, { type: 'fetchStageData' }],
+ }));
+
+ describe('with a failing request', () => {
+ beforeEach(() => {
+ state = { requestPath: mockRequestPath };
+ mock = new MockAdapter(axios);
+ mock.onGet(mockRequestPath).reply(httpStatusCodes.BAD_REQUEST);
+ });
+
+ it(`commits the 'RECEIVE_CYCLE_ANALYTICS_DATA_ERROR' mutation`, () =>
+ testAction({
+ action: actions.fetchCycleAnalyticsData,
+ state,
+ payload: {},
+ expectedMutations: [
+ { type: 'REQUEST_CYCLE_ANALYTICS_DATA' },
+ { type: 'RECEIVE_CYCLE_ANALYTICS_DATA_ERROR' },
+ ],
+ expectedActions: [],
+ }));
+ });
+ });
+
+ describe('fetchStageData', () => {
+ const mockStagePath = `${mockRequestPath}/events/${selectedStage.name}.json`;
+
+ beforeEach(() => {
+ state = {
+ requestPath: mockRequestPath,
+ startDate: mockStartDate,
+ selectedStage,
+ };
+ mock = new MockAdapter(axios);
+ mock.onGet(mockStagePath).reply(httpStatusCodes.OK);
+ });
+
+ it(`commits the 'RECEIVE_STAGE_DATA_SUCCESS' mutation`, () =>
+ testAction({
+ action: actions.fetchStageData,
+ state,
+ payload: {},
+ expectedMutations: [{ type: 'REQUEST_STAGE_DATA' }, { type: 'RECEIVE_STAGE_DATA_SUCCESS' }],
+ expectedActions: [],
+ }));
+
+ describe('with a failing request', () => {
+ beforeEach(() => {
+ state = {
+ requestPath: mockRequestPath,
+ startDate: mockStartDate,
+ selectedStage,
+ };
+ mock = new MockAdapter(axios);
+ mock.onGet(mockStagePath).reply(httpStatusCodes.BAD_REQUEST);
+ });
+
+ it(`commits the 'RECEIVE_STAGE_DATA_ERROR' mutation`, () =>
+ testAction({
+ action: actions.fetchStageData,
+ state,
+ payload: {},
+ expectedMutations: [{ type: 'REQUEST_STAGE_DATA' }, { type: 'RECEIVE_STAGE_DATA_ERROR' }],
+ expectedActions: [],
+ }));
+ });
+ });
+});
diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js
new file mode 100644
index 00000000000..08c70af6ef6
--- /dev/null
+++ b/spec/frontend/cycle_analytics/store/mutations_spec.js
@@ -0,0 +1,83 @@
+import * as types from '~/cycle_analytics/store/mutation_types';
+import mutations from '~/cycle_analytics/store/mutations';
+import { selectedStage, rawEvents, convertedEvents, rawData, convertedData } from '../mock_data';
+
+let state;
+const mockRequestPath = 'fake/request/path';
+const mockStartData = '2021-04-20';
+
+describe('Project Value Stream Analytics mutations', () => {
+ beforeEach(() => {
+ state = {};
+ });
+
+ afterEach(() => {
+ state = null;
+ });
+
+ it.each`
+ mutation | stateKey | value
+ ${types.SET_SELECTED_STAGE} | ${'isLoadingStage'} | ${false}
+ ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'isLoading'} | ${true}
+ ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'stages'} | ${[]}
+ ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'hasError'} | ${false}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${'hasError'} | ${false}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'isLoading'} | ${false}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'hasError'} | ${true}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'stages'} | ${[]}
+ ${types.REQUEST_STAGE_DATA} | ${'isLoadingStage'} | ${true}
+ ${types.REQUEST_STAGE_DATA} | ${'isEmptyStage'} | ${false}
+ ${types.REQUEST_STAGE_DATA} | ${'hasError'} | ${false}
+ ${types.REQUEST_STAGE_DATA} | ${'selectedStageEvents'} | ${[]}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'isLoadingStage'} | ${false}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'selectedStageEvents'} | ${[]}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'hasError'} | ${false}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'isLoadingStage'} | ${false}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'selectedStageEvents'} | ${[]}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'hasError'} | ${true}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'isEmptyStage'} | ${true}
+ `('$mutation will set $stateKey to $value', ({ mutation, stateKey, value }) => {
+ mutations[mutation](state, {});
+
+ expect(state).toMatchObject({ [stateKey]: value });
+ });
+
+ it.each`
+ mutation | payload | stateKey | value
+ ${types.INITIALIZE_VSA} | ${{ requestPath: mockRequestPath }} | ${'requestPath'} | ${mockRequestPath}
+ ${types.SET_SELECTED_STAGE} | ${selectedStage} | ${'selectedStage'} | ${selectedStage}
+ ${types.SET_DATE_RANGE} | ${{ startDate: mockStartData }} | ${'startDate'} | ${mockStartData}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'stages'} | ${convertedData.stages}
+ ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${rawData} | ${'summary'} | ${convertedData.summary}
+ `(
+ '$mutation with $payload will set $stateKey to $value',
+ ({ mutation, payload, stateKey, value }) => {
+ mutations[mutation](state, payload);
+
+ expect(state).toMatchObject({ [stateKey]: value });
+ },
+ );
+
+ describe('with a stage selected', () => {
+ beforeEach(() => {
+ state = {
+ selectedStage,
+ };
+ });
+
+ it.each`
+ mutation | payload | stateKey | value
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: [] }} | ${'isEmptyStage'} | ${true}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: rawEvents }} | ${'selectedStageEvents'} | ${convertedEvents}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${{ events: rawEvents }} | ${'isEmptyStage'} | ${false}
+ `(
+ '$mutation with $payload will set $stateKey to $value',
+ ({ mutation, payload, stateKey, value }) => {
+ mutations[mutation](state, payload);
+
+ expect(state).toMatchObject({ [stateKey]: value });
+ },
+ );
+ });
+});
diff --git a/spec/frontend/cycle_analytics/utils_spec.js b/spec/frontend/cycle_analytics/utils_spec.js
new file mode 100644
index 00000000000..73e26e1cdcc
--- /dev/null
+++ b/spec/frontend/cycle_analytics/utils_spec.js
@@ -0,0 +1,77 @@
+import { decorateEvents, decorateData } from '~/cycle_analytics/utils';
+import { selectedStage, rawData, convertedData, rawEvents } from './mock_data';
+
+describe('Value stream analytics utils', () => {
+ describe('decorateEvents', () => {
+ const [result] = decorateEvents(rawEvents, selectedStage);
+ const eventKeys = Object.keys(result);
+ const authorKeys = Object.keys(result.author);
+ it('will return the same number of events', () => {
+ expect(decorateEvents(rawEvents, selectedStage).length).toBe(rawEvents.length);
+ });
+
+ it('will set all the required event fields', () => {
+ ['totalTime', 'author', 'createdAt', 'shortSha', 'commitUrl'].forEach((key) => {
+ expect(eventKeys).toContain(key);
+ });
+ ['webUrl', 'avatarUrl'].forEach((key) => {
+ expect(authorKeys).toContain(key);
+ });
+ });
+
+ it('will remove unused fields', () => {
+ ['total_time', 'created_at', 'short_sha', 'commit_url'].forEach((key) => {
+ expect(eventKeys).not.toContain(key);
+ });
+
+ ['web_url', 'avatar_url'].forEach((key) => {
+ expect(authorKeys).not.toContain(key);
+ });
+ });
+ });
+
+ describe('decorateData', () => {
+ const result = decorateData(rawData);
+ it('returns the summary data', () => {
+ expect(result.summary).toEqual(convertedData.summary);
+ });
+
+ it('returns the stages data', () => {
+ expect(result.stages).toEqual(convertedData.stages);
+ });
+
+ it('returns each of the default value stream stages', () => {
+ const stages = result.stages.map(({ name }) => name);
+ ['issue', 'plan', 'code', 'test', 'review', 'staging'].forEach((stageName) => {
+ expect(stages).toContain(stageName);
+ });
+ });
+
+ it('returns `-` for summary data that has no value', () => {
+ const singleSummaryResult = decorateData({
+ stats: [],
+ permissions: { issue: true },
+ summary: [{ value: null, title: 'Commits' }],
+ });
+ expect(singleSummaryResult.summary).toEqual([{ value: '-', title: 'Commits' }]);
+ });
+
+ it('returns additional fields for each stage', () => {
+ const singleStageResult = decorateData({
+ stats: [{ name: 'issue', value: null }],
+ permissions: { issue: false },
+ });
+ const stage = singleStageResult.stages[0];
+ const txt =
+ 'The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.';
+
+ expect(stage).toMatchObject({
+ active: false,
+ isUserAllowed: false,
+ emptyStageText: txt,
+ slug: 'issue',
+ component: 'stage-issue-component',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/store/mutations_spec.js b/spec/frontend/deploy_freeze/store/mutations_spec.js
index ce75e3b89c3..f8683489340 100644
--- a/spec/frontend/deploy_freeze/store/mutations_spec.js
+++ b/spec/frontend/deploy_freeze/store/mutations_spec.js
@@ -27,15 +27,19 @@ describe('Deploy freeze mutations', () => {
describe('RECEIVE_FREEZE_PERIODS_SUCCESS', () => {
it('should set freeze periods and format timezones from identifiers to names', () => {
- const timezoneNames = ['Berlin', 'UTC', 'Eastern Time (US & Canada)'];
+ const timezoneNames = {
+ 'Europe/Berlin': 'Berlin',
+ 'Etc/UTC': 'UTC',
+ 'America/New_York': 'Eastern Time (US & Canada)',
+ };
mutations[types.RECEIVE_FREEZE_PERIODS_SUCCESS](stateCopy, freezePeriodsFixture);
- const expectedFreezePeriods = freezePeriodsFixture.map((freezePeriod, index) => ({
+ const expectedFreezePeriods = freezePeriodsFixture.map((freezePeriod) => ({
...convertObjectPropsToCamelCase(freezePeriod),
cronTimezone: {
- formattedTimezone: timezoneNames[index],
- identifier: freezePeriod.cronTimezone,
+ formattedTimezone: timezoneNames[freezePeriod.cron_timezone],
+ identifier: freezePeriod.cron_timezone,
},
}));
diff --git a/spec/frontend/deploy_keys/components/action_btn_spec.js b/spec/frontend/deploy_keys/components/action_btn_spec.js
index 21281ff15b1..307a0b6d8b0 100644
--- a/spec/frontend/deploy_keys/components/action_btn_spec.js
+++ b/spec/frontend/deploy_keys/components/action_btn_spec.js
@@ -1,4 +1,4 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import actionBtn from '~/deploy_keys/components/action_btn.vue';
import eventHub from '~/deploy_keys/eventhub';
@@ -8,13 +8,16 @@ describe('Deploy keys action btn', () => {
const deployKey = data.enabled_keys[0];
let wrapper;
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
wrapper = shallowMount(actionBtn, {
propsData: {
deployKey,
type: 'enable',
+ category: 'primary',
+ variant: 'confirm',
+ icon: 'edit',
},
slots: {
default: 'Enable',
@@ -26,10 +29,18 @@ describe('Deploy keys action btn', () => {
expect(wrapper.text()).toBe('Enable');
});
+ it('passes the button props on', () => {
+ expect(findButton().props()).toMatchObject({
+ category: 'primary',
+ variant: 'confirm',
+ icon: 'edit',
+ });
+ });
+
it('sends eventHub event with btn type', () => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- wrapper.trigger('click');
+ findButton().vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(eventHub.$emit).toHaveBeenCalledWith('enable.key', deployKey, expect.anything());
@@ -37,18 +48,10 @@ describe('Deploy keys action btn', () => {
});
it('shows loading spinner after click', () => {
- wrapper.trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findLoadingIcon().exists()).toBe(true);
- });
- });
-
- it('disables button after click', () => {
- wrapper.trigger('click');
+ findButton().vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.attributes('disabled')).toBe('disabled');
+ expect(findButton().props('loading')).toBe(true);
});
});
});
diff --git a/spec/frontend/deploy_keys/components/app_spec.js b/spec/frontend/deploy_keys/components/app_spec.js
index b48e0424580..a72b2b00776 100644
--- a/spec/frontend/deploy_keys/components/app_spec.js
+++ b/spec/frontend/deploy_keys/components/app_spec.js
@@ -3,6 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import deployKeysApp from '~/deploy_keys/components/app.vue';
+import ConfirmModal from '~/deploy_keys/components/confirm_modal.vue';
import eventHub from '~/deploy_keys/eventhub';
import axios from '~/lib/utils/axios_utils';
@@ -36,6 +37,7 @@ describe('Deploy keys app component', () => {
const findLoadingIcon = () => wrapper.find('.gl-spinner');
const findKeyPanels = () => wrapper.findAll('.deploy-keys .gl-tabs-nav li');
+ const findModal = () => wrapper.findComponent(ConfirmModal);
it('renders loading icon while waiting for request', () => {
mock.onGet(TEST_ENDPOINT).reply(() => new Promise());
@@ -94,11 +96,16 @@ describe('Deploy keys app component', () => {
const key = data.public_keys[0];
return mountComponent()
.then(() => {
- jest.spyOn(window, 'confirm').mockReturnValue(true);
jest.spyOn(wrapper.vm.service, 'getKeys').mockImplementation(() => {});
jest.spyOn(wrapper.vm.service, 'disableKey').mockImplementation(() => Promise.resolve());
- eventHub.$emit('disable.key', key);
+ eventHub.$emit('disable.key', key, () => {});
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findModal().props('visible')).toBe(true);
+ findModal().vm.$emit('remove');
return wrapper.vm.$nextTick();
})
@@ -112,11 +119,16 @@ describe('Deploy keys app component', () => {
const key = data.public_keys[0];
return mountComponent()
.then(() => {
- jest.spyOn(window, 'confirm').mockReturnValue(true);
jest.spyOn(wrapper.vm.service, 'getKeys').mockImplementation(() => {});
jest.spyOn(wrapper.vm.service, 'disableKey').mockImplementation(() => Promise.resolve());
- eventHub.$emit('remove.key', key);
+ eventHub.$emit('remove.key', key, () => {});
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ expect(findModal().props('visible')).toBe(true);
+ findModal().vm.$emit('remove');
return wrapper.vm.$nextTick();
})
diff --git a/spec/frontend/deploy_keys/components/confirm_modal_spec.js b/spec/frontend/deploy_keys/components/confirm_modal_spec.js
new file mode 100644
index 00000000000..42cc2b377a7
--- /dev/null
+++ b/spec/frontend/deploy_keys/components/confirm_modal_spec.js
@@ -0,0 +1,28 @@
+import { GlModal } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import ConfirmModal from '~/deploy_keys/components/confirm_modal.vue';
+
+describe('~/deploy_keys/components/confirm_modal.vue', () => {
+ let wrapper;
+ let modal;
+
+ beforeEach(() => {
+ wrapper = mount(ConfirmModal, { propsData: { modalId: 'test', visible: true } });
+ modal = extendedWrapper(wrapper.findComponent(GlModal));
+ });
+
+ it('emits a remove event if the primary button is clicked', () => {
+ modal.findByText('Remove deploy key').trigger('click');
+ expect(wrapper.emitted('remove')).toEqual([[]]);
+ });
+
+ it('emits a cancel event if the secondary button is clicked', () => {
+ modal.findByText('Cancel').trigger('click');
+ expect(wrapper.emitted('cancel')).toEqual([[]]);
+ });
+
+ it('displays the warning about removing the deploy key', () => {
+ expect(modal.text()).toContain('Are you sure you want to remove this deploy key?');
+ });
+});
diff --git a/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap b/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
index f8c68ca4c83..d9f5ba0bade 100644
--- a/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
+++ b/spec/frontend/design_management/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design reply form component renders button text as "Comment" when creating a comment 1`] = `
-"<button data-track-event=\\"click_button\\" data-qa-selector=\\"save_comment_button\\" type=\\"submit\\" disabled=\\"disabled\\" class=\\"btn btn-success btn-md disabled gl-button\\">
+"<button data-track-event=\\"click_button\\" data-qa-selector=\\"save_comment_button\\" type=\\"submit\\" disabled=\\"disabled\\" class=\\"btn gl-mr-3 gl-w-auto! btn-confirm btn-md disabled gl-button\\">
<!---->
<!----> <span class=\\"gl-button-text\\">
Comment
@@ -9,7 +9,7 @@ exports[`Design reply form component renders button text as "Comment" when creat
`;
exports[`Design reply form component renders button text as "Save comment" when creating a comment 1`] = `
-"<button data-track-event=\\"click_button\\" data-qa-selector=\\"save_comment_button\\" type=\\"submit\\" disabled=\\"disabled\\" class=\\"btn btn-success btn-md disabled gl-button\\">
+"<button data-track-event=\\"click_button\\" data-qa-selector=\\"save_comment_button\\" type=\\"submit\\" disabled=\\"disabled\\" class=\\"btn gl-mr-3 gl-w-auto! btn-confirm btn-md disabled gl-button\\">
<!---->
<!----> <span class=\\"gl-button-text\\">
Save comment
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index a01ec1db35c..80a51ee137a 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -19,8 +19,11 @@ describe('CompareVersions', () => {
const targetBranchName = 'tmp-wine-dev';
const { commit } = getDiffWithCommit();
- const createWrapper = (props = {}, commitArgs = {}) => {
- store.state.diffs.commit = { ...store.state.diffs.commit, ...commitArgs };
+ const createWrapper = (props = {}, commitArgs = {}, createCommit = true) => {
+ if (createCommit) {
+ store.state.diffs.commit = { ...store.state.diffs.commit, ...commitArgs };
+ }
+
wrapper = mount(CompareVersionsComponent, {
localVue,
store,
@@ -59,7 +62,7 @@ describe('CompareVersions', () => {
describe('template', () => {
beforeEach(() => {
- createWrapper();
+ createWrapper({}, {}, false);
});
it('should render Tree List toggle button with correct attribute values', () => {
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 9c3c3e82ad5..1e8ad9344f2 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -1,5 +1,6 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import { nextTick } from 'vue';
import Vuex from 'vuex';
import DiffContentComponent from '~/diffs/components/diff_content.vue';
@@ -16,11 +17,14 @@ import createDiffsStore from '~/diffs/store/modules';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
import axios from '~/lib/utils/axios_utils';
+import { scrollToElement } from '~/lib/utils/common_utils';
import httpStatus from '~/lib/utils/http_status';
import createNotesStore from '~/notes/stores/modules';
import diffFileMockDataReadable from '../mock_data/diff_file';
import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
+jest.mock('~/lib/utils/common_utils');
+
function changeViewer(store, index, { automaticallyCollapsed, manuallyCollapsed, name }) {
const file = store.state.diffs.diffFiles[index];
const newViewer = {
@@ -355,6 +359,49 @@ describe('DiffFile', () => {
});
});
+ describe('scoll-to-top of file after collapse', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm.$store, 'dispatch').mockImplementation(() => {});
+ });
+
+ it("scrolls to the top when the file is open, the users initiates the collapse, and there's a content block to scroll to", async () => {
+ makeFileOpenByDefault(store);
+ await nextTick();
+
+ toggleFile(wrapper);
+
+ expect(scrollToElement).toHaveBeenCalled();
+ });
+
+ it('does not scroll when the content block is missing', async () => {
+ makeFileOpenByDefault(store);
+ await nextTick();
+ findDiffContentArea(wrapper).element.remove();
+
+ toggleFile(wrapper);
+
+ expect(scrollToElement).not.toHaveBeenCalled();
+ });
+
+ it("does not scroll if the user doesn't initiate the file collapse", async () => {
+ makeFileOpenByDefault(store);
+ await nextTick();
+
+ wrapper.vm.handleToggle();
+
+ expect(scrollToElement).not.toHaveBeenCalled();
+ });
+
+ it('does not scroll if the file is already collapsed', async () => {
+ makeFileManuallyCollapsed(store);
+ await nextTick();
+
+ toggleFile(wrapper);
+
+ expect(scrollToElement).not.toHaveBeenCalled();
+ });
+ });
+
describe('fetch collapsed diff', () => {
const prepFile = async (inlineLines, parallelLines, readableText) => {
forceHasDiff({
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 0bc1bd40f06..137cc7e3f86 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -1,5 +1,6 @@
import { getByTestId, fireEvent } from '@testing-library/dom';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
import DiffRow from '~/diffs/components/diff_row.vue';
import { mapParallel } from '~/diffs/components/diff_row_utils';
@@ -28,12 +29,12 @@ describe('DiffRow', () => {
},
];
- const createWrapper = ({ props, state, isLoggedIn = true }) => {
- const localVue = createLocalVue();
- localVue.use(Vuex);
+ const createWrapper = ({ props, state, actions, isLoggedIn = true }) => {
+ Vue.use(Vuex);
const diffs = diffsModule();
diffs.state = { ...diffs.state, ...state };
+ diffs.actions = { ...diffs.actions, ...actions };
const getters = { isLoggedIn: () => isLoggedIn };
@@ -54,7 +55,7 @@ describe('DiffRow', () => {
glFeatures: { dragCommentSelection: true },
};
- return shallowMount(DiffRow, { propsData, localVue, store, provide });
+ return shallowMount(DiffRow, { propsData, store, provide });
};
it('isHighlighted returns true given line.left', () => {
@@ -95,6 +96,9 @@ describe('DiffRow', () => {
expect(wrapper.vm.isHighlighted).toBe(false);
});
+ const getCommentButton = (wrapper, side) =>
+ wrapper.find(`[data-testid="${side}-comment-button"]`);
+
describe.each`
side
${'left'}
@@ -102,18 +106,59 @@ describe('DiffRow', () => {
`('$side side', ({ side }) => {
it(`renders empty cells if ${side} is unavailable`, () => {
const wrapper = createWrapper({ props: { line: testLines[2], inline: false } });
- expect(wrapper.find(`[data-testid="${side}LineNumber"]`).exists()).toBe(false);
- expect(wrapper.find(`[data-testid="${side}EmptyCell"]`).exists()).toBe(true);
+ expect(wrapper.find(`[data-testid="${side}-line-number"]`).exists()).toBe(false);
+ expect(wrapper.find(`[data-testid="${side}-empty-cell"]`).exists()).toBe(true);
});
- it('renders comment button', () => {
- const wrapper = createWrapper({ props: { line: testLines[3], inline: false } });
- expect(wrapper.find(`[data-testid="${side}CommentButton"]`).exists()).toBe(true);
+ describe('comment button', () => {
+ const showCommentForm = jest.fn();
+ let line;
+
+ beforeEach(() => {
+ showCommentForm.mockReset();
+ // https://eslint.org/docs/rules/prefer-destructuring#when-not-to-use-it
+ // eslint-disable-next-line prefer-destructuring
+ line = testLines[3];
+ });
+
+ it('renders', () => {
+ const wrapper = createWrapper({ props: { line, inline: false } });
+ expect(getCommentButton(wrapper, side).exists()).toBe(true);
+ });
+
+ it('responds to click and keyboard events', async () => {
+ const wrapper = createWrapper({
+ props: { line, inline: false },
+ actions: { showCommentForm },
+ });
+ const commentButton = getCommentButton(wrapper, side);
+
+ await commentButton.trigger('click');
+ await commentButton.trigger('keydown.enter');
+ await commentButton.trigger('keydown.space');
+
+ expect(showCommentForm).toHaveBeenCalledTimes(3);
+ });
+
+ it('ignores click and keyboard events when comments are disabled', async () => {
+ line[side].commentsDisabled = true;
+ const wrapper = createWrapper({
+ props: { line, inline: false },
+ actions: { showCommentForm },
+ });
+ const commentButton = getCommentButton(wrapper, side);
+
+ await commentButton.trigger('click');
+ await commentButton.trigger('keydown.enter');
+ await commentButton.trigger('keydown.space');
+
+ expect(showCommentForm).not.toHaveBeenCalled();
+ });
});
it('renders avatars', () => {
const wrapper = createWrapper({ props: { line: testLines[0], inline: false } });
- expect(wrapper.find(`[data-testid="${side}Discussions"]`).exists()).toBe(true);
+ expect(wrapper.find(`[data-testid="${side}-discussions"]`).exists()).toBe(true);
});
});
diff --git a/spec/frontend/diffs/components/inline_diff_table_row_spec.js b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
index 66b63a7a1d0..9c3e00cd6cf 100644
--- a/spec/frontend/diffs/components/inline_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
@@ -216,14 +216,14 @@ describe('InlineDiffTableRow', () => {
const TEST_LINE_NUMBER = 1;
describe.each`
- lineProps | findLineNumber | expectedHref | expectedClickArg | expectedQaSelector
- ${{ line_code: TEST_LINE_CODE, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE} | ${undefined}
- ${{ line_code: undefined, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${undefined} | ${undefined}
- ${{ line_code: undefined, left: { line_code: TEST_LINE_CODE }, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${TEST_LINE_CODE} | ${undefined}
- ${{ line_code: undefined, right: { line_code: TEST_LINE_CODE }, new_line: TEST_LINE_NUMBER }} | ${findLineNumberNew} | ${'#'} | ${TEST_LINE_CODE} | ${'new_diff_line_link'}
+ lineProps | findLineNumber | expectedHref | expectedClickArg
+ ${{ line_code: TEST_LINE_CODE, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE}
+ ${{ line_code: undefined, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${undefined}
+ ${{ line_code: undefined, left: { line_code: TEST_LINE_CODE }, old_line: TEST_LINE_NUMBER }} | ${findLineNumberOld} | ${'#'} | ${TEST_LINE_CODE}
+ ${{ line_code: undefined, right: { line_code: TEST_LINE_CODE }, new_line: TEST_LINE_NUMBER }} | ${findLineNumberNew} | ${'#'} | ${TEST_LINE_CODE}
`(
'with line ($lineProps)',
- ({ lineProps, findLineNumber, expectedHref, expectedClickArg, expectedQaSelector }) => {
+ ({ lineProps, findLineNumber, expectedHref, expectedClickArg }) => {
beforeEach(() => {
jest.spyOn(store, 'dispatch').mockImplementation();
createComponent({
@@ -236,7 +236,6 @@ describe('InlineDiffTableRow', () => {
expect(findLineNumber().attributes()).toEqual({
href: expectedHref,
'data-linenumber': TEST_LINE_NUMBER.toString(),
- 'data-qa-selector': expectedQaSelector,
});
});
diff --git a/spec/frontend/diffs/mock_data/diff_metadata.js b/spec/frontend/diffs/mock_data/diff_metadata.js
index cfa0038c06f..ce79843b8b1 100644
--- a/spec/frontend/diffs/mock_data/diff_metadata.js
+++ b/spec/frontend/diffs/mock_data/diff_metadata.js
@@ -3,7 +3,7 @@ export const diffMetadata = {
size: 1,
branch_name: 'update-changelog',
source_branch_exists: true,
- target_branch_name: 'master',
+ target_branch_name: 'main',
commit: null,
context_commits: null,
merge_request_diff: {
diff --git a/spec/frontend/editor/editor_lite_extension_base_spec.js b/spec/frontend/editor/editor_lite_extension_base_spec.js
index 1ae8c70c741..59e1b8968eb 100644
--- a/spec/frontend/editor/editor_lite_extension_base_spec.js
+++ b/spec/frontend/editor/editor_lite_extension_base_spec.js
@@ -7,6 +7,21 @@ import {
} from '~/editor/constants';
import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
+jest.mock('~/helpers/startup_css_helper', () => {
+ return {
+ waitForCSSLoaded: jest.fn().mockImplementation((cb) => {
+ // We have to artificially put the callback's execution
+ // to the end of the current call stack to be able to
+ // test that the callback is called after waitForCSSLoaded.
+ // setTimeout with 0 delay does exactly that.
+ // Otherwise we might end up with false positive results
+ setTimeout(() => {
+ cb.apply();
+ }, 0);
+ }),
+ };
+});
+
describe('The basis for an Editor Lite extension', () => {
const defaultLine = 3;
let ext;
@@ -44,6 +59,19 @@ describe('The basis for an Editor Lite extension', () => {
});
describe('constructor', () => {
+ it('resets the layout in waitForCSSLoaded callback', async () => {
+ const instance = {
+ layout: jest.fn(),
+ };
+ ext = new EditorLiteExtension({ instance });
+ expect(instance.layout).not.toHaveBeenCalled();
+
+ // We're waiting for the waitForCSSLoaded mock to kick in
+ await jest.runOnlyPendingTimers();
+
+ expect(instance.layout).toHaveBeenCalled();
+ });
+
it.each`
description | instance | options
${'accepts configuration options and instance'} | ${{}} | ${defaultOptions}
@@ -51,6 +79,7 @@ describe('The basis for an Editor Lite extension', () => {
${'does not fail if both instance and the options are omitted'} | ${undefined} | ${undefined}
${'throws if only options are passed'} | ${undefined} | ${defaultOptions}
`('$description', ({ instance, options } = {}) => {
+ EditorLiteExtension.deferRerender = jest.fn();
const originalInstance = { ...instance };
if (instance) {
@@ -82,12 +111,14 @@ describe('The basis for an Editor Lite extension', () => {
});
it('initializes the line highlighting', () => {
+ EditorLiteExtension.deferRerender = jest.fn();
const spy = jest.spyOn(EditorLiteExtension, 'highlightLines');
ext = new EditorLiteExtension({ instance: {} });
expect(spy).toHaveBeenCalled();
});
it('sets up the line linking for code instance', () => {
+ EditorLiteExtension.deferRerender = jest.fn();
const spy = jest.spyOn(EditorLiteExtension, 'setupLineLinking');
const instance = {
getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_CODE),
@@ -99,6 +130,7 @@ describe('The basis for an Editor Lite extension', () => {
});
it('does not set up the line linking for diff instance', () => {
+ EditorLiteExtension.deferRerender = jest.fn();
const spy = jest.spyOn(EditorLiteExtension, 'setupLineLinking');
const instance = {
getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_DIFF),
diff --git a/spec/frontend/environments/environment_table_spec.js b/spec/frontend/environments/environment_table_spec.js
index 863c4526bb9..71426ee5170 100644
--- a/spec/frontend/environments/environment_table_spec.js
+++ b/spec/frontend/environments/environment_table_spec.js
@@ -89,6 +89,42 @@ describe('Environment table', () => {
expect(wrapper.find('.deploy-board-icon').exists()).toBe(true);
});
+ it('should render deploy board container when data is provided for children', async () => {
+ const mockItem = {
+ name: 'review',
+ size: 1,
+ environment_path: 'url',
+ logs_path: 'url',
+ id: 1,
+ isFolder: true,
+ isOpen: true,
+ children: [
+ {
+ name: 'review/test',
+ hasDeployBoard: true,
+ deployBoardData: deployBoardMockData,
+ isDeployBoardVisible: true,
+ isLoadingDeployBoard: false,
+ isEmptyDeployBoard: false,
+ },
+ ],
+ };
+
+ await factory({
+ propsData: {
+ environments: [mockItem],
+ canCreateDeployment: false,
+ canReadEnvironment: true,
+ userCalloutsPath: '/callouts',
+ lockPromotionSvgPath: '/assets/illustrations/lock-promotion.svg',
+ helpCanaryDeploymentsPath: 'help/canary-deployments',
+ },
+ });
+
+ expect(wrapper.find('.js-deploy-board-row').exists()).toBe(true);
+ expect(wrapper.find('.deploy-board-icon').exists()).toBe(true);
+ });
+
it('should toggle deploy board visibility when arrow is clicked', (done) => {
const mockItem = {
name: 'review',
@@ -125,7 +161,7 @@ describe('Environment table', () => {
wrapper.find('.deploy-board-icon').trigger('click');
});
- it('should set the enviornment to change and weight when a change canary weight event is recevied', async () => {
+ it('should set the environment to change and weight when a change canary weight event is recevied', async () => {
const mockItem = {
name: 'review',
size: 1,
@@ -359,7 +395,7 @@ describe('Environment table', () => {
},
},
{
- name: 'review/master',
+ name: 'review/main',
last_deployment: {
created_at: '2019-02-17T16:26:15.125Z',
},
@@ -374,7 +410,7 @@ describe('Environment table', () => {
},
];
const [production, review, staging] = mockItems;
- const [addcibuildstatus, master] = mockItems[1].children;
+ const [addcibuildstatus, main] = mockItems[1].children;
factory({
propsData: {
@@ -390,7 +426,7 @@ describe('Environment table', () => {
production.name,
]);
- expect(wrapper.vm.sortedEnvironments[0].children).toEqual([master, addcibuildstatus]);
+ expect(wrapper.vm.sortedEnvironments[0].children).toEqual([main, addcibuildstatus]);
});
});
});
diff --git a/spec/frontend/environments/environments_store_spec.js b/spec/frontend/environments/environments_store_spec.js
index 4a07281353f..cb2394b224d 100644
--- a/spec/frontend/environments/environments_store_spec.js
+++ b/spec/frontend/environments/environments_store_spec.js
@@ -123,6 +123,29 @@ describe('Store', () => {
expect(store.state.environments[1].children.length).toEqual(serverData.length);
});
+
+ it('should parse deploy board data for children', () => {
+ store.storeEnvironments(serverData);
+
+ store.setfolderContent(store.state.environments[1], [
+ {
+ name: 'foo',
+ size: 1,
+ latest: {
+ id: 1,
+ rollout_status: deployBoardMockData,
+ },
+ },
+ ]);
+ const result = store.state.environments[1].children[0];
+ expect(result).toMatchObject({
+ deployBoardData: deployBoardMockData,
+ hasDeployBoard: true,
+ isDeployBoardVisible: true,
+ isLoadingDeployBoard: false,
+ isEmptyDeployBoard: false,
+ });
+ });
});
describe('store pagination', () => {
diff --git a/spec/frontend/environments/mock_data.js b/spec/frontend/environments/mock_data.js
index 4ad005f55c3..9ba71b78c2f 100644
--- a/spec/frontend/environments/mock_data.js
+++ b/spec/frontend/environments/mock_data.js
@@ -76,8 +76,8 @@ const environment = {
iid: 6,
sha: '500aabcb17c97bdcf2d0c410b70cb8556f0362dd',
ref: {
- name: 'master',
- ref_url: 'root/ci-folders/tree/master',
+ name: 'main',
+ ref_url: 'root/ci-folders/tree/main',
},
tag: true,
'last?': true,
@@ -130,8 +130,8 @@ const environment = {
iid: 27,
sha: '1132df044b73943943c949e7ac2c2f120a89bf59',
ref: {
- name: 'master',
- ref_path: '/root/environment-test/-/tree/master',
+ name: 'main',
+ ref_path: '/root/environment-test/-/tree/main',
},
status: 'running',
created_at: '2020-12-04T19:57:49.514Z',
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index f02a261f323..2e8a42dbfe6 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -33,11 +33,12 @@ describe('error tracking settings form', () => {
describe('an empty form', () => {
it('is rendered', () => {
- expect(wrapper.findAll(GlFormInput).length).toBe(2);
- expect(wrapper.find(GlFormInput).attributes('id')).toBe('error-tracking-api-host');
- expect(wrapper.findAll(GlFormInput).at(1).attributes('id')).toBe('error-tracking-token');
-
- expect(wrapper.findAll(GlButton).exists()).toBe(true);
+ expect(wrapper.findAllComponents(GlFormInput).length).toBe(2);
+ expect(wrapper.findComponent(GlFormInput).attributes('id')).toBe('error-tracking-api-host');
+ expect(wrapper.findAllComponents(GlFormInput).at(1).attributes('id')).toBe(
+ 'error-tracking-token',
+ );
+ expect(wrapper.findAllComponents(GlButton).exists()).toBe(true);
});
it('is rendered with labels and placeholders', () => {
@@ -51,7 +52,7 @@ describe('error tracking settings form', () => {
);
expect(pageText).not.toContain('Connection failed. Check Auth Token and try again.');
- expect(wrapper.findAll(GlFormInput).at(0).attributes('placeholder')).toContain(
+ expect(wrapper.findAllComponents(GlFormInput).at(0).attributes('placeholder')).toContain(
'https://mysentryserver.com',
);
});
@@ -63,7 +64,7 @@ describe('error tracking settings form', () => {
});
it('shows loading spinner', () => {
- const buttonEl = wrapper.find(GlButton);
+ const buttonEl = wrapper.findComponent(GlButton);
expect(buttonEl.props('loading')).toBe(true);
expect(buttonEl.text()).toBe('Connecting');
diff --git a/spec/frontend/experimentation/components/experiment_spec.js b/spec/frontend/experimentation/components/gitlab_experiment_spec.js
index dbc7da5c535..f52ebf0f3c4 100644
--- a/spec/frontend/experimentation/components/experiment_spec.js
+++ b/spec/frontend/experimentation/components/gitlab_experiment_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import ExperimentComponent from '~/experimentation/components/experiment.vue';
+import ExperimentComponent from '~/experimentation/components/gitlab_experiment.vue';
const defaultProps = { name: 'experiment_name' };
const defaultSlots = {
diff --git a/spec/frontend/experimentation/utils_spec.js b/spec/frontend/experimentation/utils_spec.js
index ec09bbab349..2ba8c65a252 100644
--- a/spec/frontend/experimentation/utils_spec.js
+++ b/spec/frontend/experimentation/utils_spec.js
@@ -1,5 +1,9 @@
import { assignGitlabExperiment } from 'helpers/experimentation_helper';
-import { DEFAULT_VARIANT, CANDIDATE_VARIANT } from '~/experimentation/constants';
+import {
+ DEFAULT_VARIANT,
+ CANDIDATE_VARIANT,
+ TRACKING_CONTEXT_SCHEMA,
+} from '~/experimentation/constants';
import * as experimentUtils from '~/experimentation/utils';
describe('experiment Utilities', () => {
@@ -19,6 +23,20 @@ describe('experiment Utilities', () => {
});
});
+ describe('getExperimentContexts', () => {
+ describe.each`
+ gon | input | output
+ ${[TEST_KEY, '_data_']} | ${[TEST_KEY]} | ${[{ schema: TRACKING_CONTEXT_SCHEMA, data: { variant: '_data_' } }]}
+ ${[]} | ${[TEST_KEY]} | ${[]}
+ `('with input=$input and gon=$gon', ({ gon, input, output }) => {
+ assignGitlabExperiment(...gon);
+
+ it(`returns ${output}`, () => {
+ expect(experimentUtils.getExperimentContexts(...input)).toEqual(output);
+ });
+ });
+ });
+
describe('isExperimentVariant', () => {
describe.each`
gon | input | output
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
index 2fd8e524e7a..0948b08f942 100644
--- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -90,7 +90,7 @@ describe('Edit feature flag form', () => {
expect(wrapper.find(GlToggle).props('value')).toBe(true);
});
- it('should alert users the flag is read only', () => {
+ it('should alert users the flag is read-only', () => {
expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags');
});
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
index 00d557c11cf..6c3fce68618 100644
--- a/spec/frontend/feature_flags/components/form_spec.js
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -281,7 +281,7 @@ describe('feature flag form', () => {
});
});
- it('renders read only name', () => {
+ it('renders read-only name', () => {
expect(wrapper.find('.js-scope-all').exists()).toEqual(true);
});
});
diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml
index a83d5374e2c..a1ea2806879 100644
--- a/spec/frontend/fixtures/api_markdown.yml
+++ b/spec/frontend/fixtures/api_markdown.yml
@@ -48,3 +48,7 @@
3. list item 3
- name: image
markdown: '![alt text](https://gitlab.com/logo.png)'
+- name: hard_break
+ markdown: |-
+ This is a line after a\
+ hard break
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index 418912638f9..f10f96f2516 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -62,8 +62,14 @@ RSpec.describe Projects::MergeRequestsController, '(JavaScript fixtures)', type:
remove_repository(project)
end
+ it 'merge_requests/merge_request_with_single_assignee_feature.html' do
+ stub_licensed_features(multiple_merge_request_assignees: false)
+
+ render_merge_request(merge_request)
+ end
+
it 'merge_requests/merge_request_of_current_user.html' do
- merge_request.update(author: user)
+ merge_request.update!(author: user)
render_merge_request(merge_request)
end
diff --git a/spec/frontend/fixtures/raw.rb b/spec/frontend/fixtures/raw.rb
index cf51f2389bc..44927bd29d8 100644
--- a/spec/frontend/fixtures/raw.rb
+++ b/spec/frontend/fixtures/raw.rb
@@ -25,6 +25,10 @@ RSpec.describe 'Raw files', '(JavaScript fixtures)' do
@blob = project.repository.blob_at('6d85bb69', 'files/ipython/basic.ipynb')
end
+ it 'blob/notebook/markdown-table.json' do
+ @blob = project.repository.blob_at('f6b7a707', 'files/ipython/markdown-table.ipynb')
+ end
+
it 'blob/notebook/worksheets.json' do
@blob = project.repository.blob_at('6d85bb69', 'files/ipython/worksheets.ipynb')
end
diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb
index dc282b49be5..7ec155fcb10 100644
--- a/spec/frontend/fixtures/releases.rb
+++ b/spec/frontend/fixtures/releases.rb
@@ -119,16 +119,18 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
describe GraphQL::Query, type: :request do
include GraphqlHelpers
- all_releases_query_path = 'releases/queries/all_releases.query.graphql'
- one_release_query_path = 'releases/queries/one_release.query.graphql'
- fragment_paths = ['releases/queries/release.fragment.graphql']
+ all_releases_query_path = 'releases/graphql/queries/all_releases.query.graphql'
+ one_release_query_path = 'releases/graphql/queries/one_release.query.graphql'
+ one_release_for_editing_query_path = 'releases/graphql/queries/one_release_for_editing.query.graphql'
+ release_fragment_path = 'releases/graphql/fragments/release.fragment.graphql'
+ release_for_editing_fragment_path = 'releases/graphql/fragments/release_for_editing.fragment.graphql'
before(:all) do
clean_frontend_fixtures('graphql/releases/')
end
it "graphql/#{all_releases_query_path}.json" do
- query = get_graphql_query_as_string(all_releases_query_path, fragment_paths)
+ query = get_graphql_query_as_string(all_releases_query_path, [release_fragment_path])
post_graphql(query, current_user: admin, variables: { fullPath: project.full_path })
@@ -136,7 +138,15 @@ RSpec.describe 'Releases (JavaScript fixtures)' do
end
it "graphql/#{one_release_query_path}.json" do
- query = get_graphql_query_as_string(one_release_query_path, fragment_paths)
+ query = get_graphql_query_as_string(one_release_query_path, [release_fragment_path])
+
+ post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "graphql/#{one_release_for_editing_query_path}.json" do
+ query = get_graphql_query_as_string(one_release_for_editing_query_path, [release_for_editing_fragment_path])
post_graphql(query, current_user: admin, variables: { fullPath: project.full_path, tagName: release.tag })
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index 6d482e5814d..6a5ac76a4d0 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -339,6 +339,20 @@ describe('Flash', () => {
expect(actionConfig.clickHandler).toHaveBeenCalled();
});
});
+
+ describe('additional behavior', () => {
+ describe('close', () => {
+ it('clicks the close icon', () => {
+ const flash = createFlash({ ...defaultParams });
+ const close = document.querySelector('.flash-alert .js-close-icon');
+
+ jest.spyOn(close, 'click');
+ flash.close();
+
+ expect(close.click.mock.calls.length).toBe(1);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index 80059c4c87f..7a1026e8bfc 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -1,10 +1,11 @@
import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
-import { useRealDate } from 'helpers/fake_date';
+import Vuex from 'vuex';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import appComponent from '~/frequent_items/components/app.vue';
+import App from '~/frequent_items/components/app.vue';
+import FrequentItemsList from '~/frequent_items/components/frequent_items_list.vue';
import { FREQUENT_ITEMS, HOUR_IN_MS } from '~/frequent_items/constants';
import eventHub from '~/frequent_items/event_hub';
import { createStore } from '~/frequent_items/store';
@@ -12,246 +13,230 @@ import { getTopFrequentItems } from '~/frequent_items/utils';
import axios from '~/lib/utils/axios_utils';
import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data';
+Vue.use(Vuex);
+
useLocalStorageSpy();
-let session;
-const createComponentWithStore = (namespace = 'projects') => {
- session = currentSession[namespace];
- gon.api_version = session.apiVersion;
- const Component = Vue.extend(appComponent);
- const store = createStore();
-
- return mountComponentWithStore(Component, {
- store,
- props: {
- namespace,
- currentUserName: session.username,
- currentItem: session.project || session.group,
- },
- });
-};
+const TEST_NAMESPACE = 'projects';
+const TEST_VUEX_MODULE = 'frequentProjects';
+const TEST_PROJECT = currentSession[TEST_NAMESPACE].project;
+const TEST_STORAGE_KEY = currentSession[TEST_NAMESPACE].storageKey;
describe('Frequent Items App Component', () => {
- let vm;
+ let wrapper;
let mock;
+ let store;
+
+ const createComponent = ({ currentItem = null } = {}) => {
+ const session = currentSession[TEST_NAMESPACE];
+ gon.api_version = session.apiVersion;
+
+ wrapper = mountExtended(App, {
+ store,
+ propsData: {
+ namespace: TEST_NAMESPACE,
+ currentUserName: session.username,
+ currentItem: currentItem || session.project,
+ },
+ provide: {
+ vuexModule: TEST_VUEX_MODULE,
+ },
+ });
+ };
+
+ const triggerDropdownOpen = () => eventHub.$emit(`${TEST_NAMESPACE}-dropdownOpen`);
+ const getStoredProjects = () => JSON.parse(localStorage.getItem(TEST_STORAGE_KEY));
+ const findSearchInput = () => wrapper.findByTestId('frequent-items-search-input');
+ const findLoading = () => wrapper.findByTestId('loading');
+ const findSectionHeader = () => wrapper.findByTestId('header');
+ const findFrequentItemsList = () => wrapper.findComponent(FrequentItemsList);
+ const findFrequentItems = () => findFrequentItemsList().findAll('li');
+ const setSearch = (search) => {
+ const searchInput = wrapper.find('input');
+
+ searchInput.setValue(search);
+ };
beforeEach(() => {
mock = new MockAdapter(axios);
- vm = createComponentWithStore();
+ store = createStore();
});
afterEach(() => {
mock.restore();
- vm.$destroy();
+ wrapper.destroy();
});
- describe('methods', () => {
- describe('dropdownOpenHandler', () => {
- it('should fetch frequent items when no search has been previously made on desktop', () => {
- jest.spyOn(vm, 'fetchFrequentItems').mockImplementation(() => {});
-
- vm.dropdownOpenHandler();
+ describe('default', () => {
+ beforeEach(() => {
+ jest.spyOn(store, 'dispatch');
- expect(vm.fetchFrequentItems).toHaveBeenCalledWith();
- });
+ createComponent();
});
- describe('logItemAccess', () => {
- let storage;
-
- beforeEach(() => {
- storage = {};
-
- localStorage.setItem.mockImplementation((storageKey, value) => {
- storage[storageKey] = value;
- });
-
- localStorage.getItem.mockImplementation((storageKey) => {
- if (storage[storageKey]) {
- return storage[storageKey];
- }
-
- return null;
- });
- });
+ it('should fetch frequent items', () => {
+ triggerDropdownOpen();
- it('should create a project store if it does not exist and adds a project', () => {
- vm.logItemAccess(session.storageKey, session.project);
-
- const projects = JSON.parse(storage[session.storageKey]);
-
- expect(projects.length).toBe(1);
- expect(projects[0].frequency).toBe(1);
- expect(projects[0].lastAccessedOn).toBeDefined();
- });
-
- it('should prevent inserting same report multiple times into store', () => {
- vm.logItemAccess(session.storageKey, session.project);
- vm.logItemAccess(session.storageKey, session.project);
-
- const projects = JSON.parse(storage[session.storageKey]);
-
- expect(projects.length).toBe(1);
- });
-
- describe('with real date', () => {
- useRealDate();
-
- it('should increase frequency of report if it was logged multiple times over the course of an hour', () => {
- let projects;
- const newTimestamp = Date.now() + HOUR_IN_MS + 1;
+ expect(store.dispatch).toHaveBeenCalledWith(`${TEST_VUEX_MODULE}/fetchFrequentItems`);
+ });
- vm.logItemAccess(session.storageKey, session.project);
- projects = JSON.parse(storage[session.storageKey]);
+ it('should not fetch frequent items if detroyed', () => {
+ wrapper.destroy();
+ triggerDropdownOpen();
- expect(projects[0].frequency).toBe(1);
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${TEST_VUEX_MODULE}/fetchFrequentItems`);
+ });
- vm.logItemAccess(session.storageKey, {
- ...session.project,
- lastAccessedOn: newTimestamp,
- });
- projects = JSON.parse(storage[session.storageKey]);
+ it('should render search input', () => {
+ expect(findSearchInput().exists()).toBe(true);
+ });
- expect(projects[0].frequency).toBe(2);
- expect(projects[0].lastAccessedOn).not.toBe(session.project.lastAccessedOn);
- });
- });
+ it('should render loading animation', async () => {
+ triggerDropdownOpen();
+ store.state[TEST_VUEX_MODULE].isLoadingItems = true;
- it('should always update project metadata', () => {
- let projects;
- const oldProject = {
- ...session.project,
- };
+ await wrapper.vm.$nextTick();
- const newProject = {
- ...session.project,
- name: 'New Name',
- avatarUrl: 'new/avatar.png',
- namespace: 'New / Namespace',
- webUrl: 'http://localhost/new/web/url',
- };
+ const loading = findLoading();
- vm.logItemAccess(session.storageKey, oldProject);
- projects = JSON.parse(storage[session.storageKey]);
+ expect(loading.exists()).toBe(true);
+ expect(loading.find('[aria-label="Loading projects"]').exists()).toBe(true);
+ });
- expect(projects[0].name).toBe(oldProject.name);
- expect(projects[0].avatarUrl).toBe(oldProject.avatarUrl);
- expect(projects[0].namespace).toBe(oldProject.namespace);
- expect(projects[0].webUrl).toBe(oldProject.webUrl);
+ it('should render frequent projects list header', () => {
+ const sectionHeader = findSectionHeader();
- vm.logItemAccess(session.storageKey, newProject);
- projects = JSON.parse(storage[session.storageKey]);
+ expect(sectionHeader.exists()).toBe(true);
+ expect(sectionHeader.text()).toBe('Frequently visited');
+ });
- expect(projects[0].name).toBe(newProject.name);
- expect(projects[0].avatarUrl).toBe(newProject.avatarUrl);
- expect(projects[0].namespace).toBe(newProject.namespace);
- expect(projects[0].webUrl).toBe(newProject.webUrl);
- });
+ it('should render frequent projects list', async () => {
+ const expectedResult = getTopFrequentItems(mockFrequentProjects);
+ localStorage.setItem(TEST_STORAGE_KEY, JSON.stringify(mockFrequentProjects));
- it('should not add more than 20 projects in store', () => {
- for (let id = 0; id < FREQUENT_ITEMS.MAX_COUNT; id += 1) {
- const project = {
- ...session.project,
- id,
- };
- vm.logItemAccess(session.storageKey, project);
- }
+ expect(findFrequentItems().length).toBe(1);
- const projects = JSON.parse(storage[session.storageKey]);
+ triggerDropdownOpen();
+ await wrapper.vm.$nextTick();
- expect(projects.length).toBe(FREQUENT_ITEMS.MAX_COUNT);
+ expect(findFrequentItems().length).toBe(expectedResult.length);
+ expect(findFrequentItemsList().props()).toEqual({
+ items: expectedResult,
+ namespace: TEST_NAMESPACE,
+ hasSearchQuery: false,
+ isFetchFailed: false,
+ matcher: '',
});
});
- });
-
- describe('created', () => {
- it('should bind event listeners on eventHub', (done) => {
- jest.spyOn(eventHub, '$on').mockImplementation(() => {});
- createComponentWithStore().$mount();
-
- Vue.nextTick(() => {
- expect(eventHub.$on).toHaveBeenCalledWith('projects-dropdownOpen', expect.any(Function));
- done();
- });
+ it('should render searched projects list', async () => {
+ mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects.data);
+
+ setSearch('gitlab');
+ await wrapper.vm.$nextTick();
+
+ expect(findLoading().exists()).toBe(true);
+
+ await waitForPromises();
+
+ expect(findFrequentItems().length).toBe(mockSearchedProjects.data.length);
+ expect(findFrequentItemsList().props()).toEqual(
+ expect.objectContaining({
+ items: mockSearchedProjects.data.map(
+ ({ avatar_url, web_url, name_with_namespace, ...item }) => ({
+ ...item,
+ avatarUrl: avatar_url,
+ webUrl: web_url,
+ namespace: name_with_namespace,
+ }),
+ ),
+ namespace: TEST_NAMESPACE,
+ hasSearchQuery: true,
+ isFetchFailed: false,
+ matcher: 'gitlab',
+ }),
+ );
});
});
- describe('beforeDestroy', () => {
- it('should unbind event listeners on eventHub', (done) => {
- jest.spyOn(eventHub, '$off').mockImplementation(() => {});
+ describe('logging', () => {
+ it('when created, it should create a project storage entry and adds a project', () => {
+ createComponent();
- vm.$mount();
- vm.$destroy();
+ expect(getStoredProjects()).toEqual([
+ expect.objectContaining({
+ frequency: 1,
+ lastAccessedOn: Date.now(),
+ }),
+ ]);
+ });
- Vue.nextTick(() => {
- expect(eventHub.$off).toHaveBeenCalledWith('projects-dropdownOpen', expect.any(Function));
- done();
+ describe('when created multiple times', () => {
+ beforeEach(() => {
+ createComponent();
+ wrapper.destroy();
+ createComponent();
+ wrapper.destroy();
});
- });
- });
- describe('template', () => {
- it('should render search input', () => {
- expect(vm.$el.querySelector('.search-input-container')).toBeDefined();
- });
+ it('should only log once', () => {
+ expect(getStoredProjects()).toEqual([
+ expect.objectContaining({
+ lastAccessedOn: Date.now(),
+ frequency: 1,
+ }),
+ ]);
+ });
- it('should render loading animation', (done) => {
- vm.$store.dispatch('fetchSearchedItems');
+ it('should increase frequency, when created an hour later', () => {
+ const hourLater = Date.now() + HOUR_IN_MS + 1;
- Vue.nextTick(() => {
- const loadingEl = vm.$el.querySelector('.loading-animation');
+ jest.spyOn(Date, 'now').mockReturnValue(hourLater);
+ createComponent({ currentItem: { ...TEST_PROJECT, lastAccessedOn: hourLater } });
- expect(loadingEl).toBeDefined();
- expect(loadingEl.classList.contains('prepend-top-20')).toBe(true);
- expect(loadingEl.querySelector('span').getAttribute('aria-label')).toBe('Loading projects');
- done();
+ expect(getStoredProjects()).toEqual([
+ expect.objectContaining({
+ lastAccessedOn: hourLater,
+ frequency: 2,
+ }),
+ ]);
});
});
- it('should render frequent projects list header', (done) => {
- Vue.nextTick(() => {
- const sectionHeaderEl = vm.$el.querySelector('.section-header');
+ it('should always update project metadata', () => {
+ const oldProject = {
+ ...TEST_PROJECT,
+ };
- expect(sectionHeaderEl).toBeDefined();
- expect(sectionHeaderEl.innerText.trim()).toBe('Frequently visited');
- done();
- });
- });
+ const newProject = {
+ ...oldProject,
+ name: 'New Name',
+ avatarUrl: 'new/avatar.png',
+ namespace: 'New / Namespace',
+ webUrl: 'http://localhost/new/web/url',
+ };
- it('should render frequent projects list', (done) => {
- const expectedResult = getTopFrequentItems(mockFrequentProjects);
- localStorage.getItem.mockImplementation(() => JSON.stringify(mockFrequentProjects));
+ createComponent({ currentItem: oldProject });
+ wrapper.destroy();
+ expect(getStoredProjects()).toEqual([expect.objectContaining(oldProject)]);
- expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(1);
+ createComponent({ currentItem: newProject });
+ wrapper.destroy();
- vm.fetchFrequentItems();
- Vue.nextTick(() => {
- expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(
- expectedResult.length,
- );
- done();
- });
+ expect(getStoredProjects()).toEqual([expect.objectContaining(newProject)]);
});
- it('should render searched projects list', (done) => {
- mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects);
-
- expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(1);
-
- vm.$store.dispatch('setSearchQuery', 'gitlab');
- vm.$nextTick()
- .then(() => {
- expect(vm.$el.querySelector('.loading-animation')).toBeDefined();
- })
- .then(waitForPromises)
- .then(() => {
- expect(vm.$el.querySelectorAll('.frequent-items-list-container li').length).toBe(
- mockSearchedProjects.data.length,
- );
- })
- .then(done)
- .catch(done.fail);
+ it('should not add more than 20 projects in store', () => {
+ for (let id = 0; id < FREQUENT_ITEMS.MAX_COUNT + 10; id += 1) {
+ const project = {
+ ...TEST_PROJECT,
+ id,
+ };
+ createComponent({ currentItem: project });
+ wrapper.destroy();
+ }
+
+ expect(getStoredProjects().length).toBe(FREQUENT_ITEMS.MAX_COUNT);
});
});
});
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
index 66fb346cb38..9a68115e4f6 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -1,14 +1,18 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
import { trimText } from 'helpers/text_helper';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
import { createStore } from '~/frequent_items/store';
import { mockProject } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('FrequentItemsListItemComponent', () => {
let wrapper;
let trackingSpy;
- let store = createStore();
+ let store;
const findTitle = () => wrapper.find({ ref: 'frequentItemsItemTitle' });
const findAvatar = () => wrapper.find({ ref: 'frequentItemsItemAvatar' });
@@ -31,11 +35,15 @@ describe('FrequentItemsListItemComponent', () => {
avatarUrl: mockProject.avatarUrl,
...props,
},
+ provide: {
+ vuexModule: 'frequentProjects',
+ },
+ localVue,
});
};
beforeEach(() => {
- store = createStore({ dropdownType: 'project' });
+ store = createStore();
trackingSpy = mockTracking('_category_', document, jest.spyOn);
trackingSpy.mockImplementation(() => {});
});
@@ -119,7 +127,7 @@ describe('FrequentItemsListItemComponent', () => {
});
link.trigger('click');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_link', {
- label: 'project_dropdown_frequent_items_list_item',
+ label: 'projects_dropdown_frequent_items_list_item',
});
});
});
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
index bd0711005b3..c015914c991 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
@@ -1,9 +1,13 @@
-import { mount } from '@vue/test-utils';
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue';
import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
import { createStore } from '~/frequent_items/store';
import { mockFrequentProjects } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('FrequentItemsListComponent', () => {
let wrapper;
@@ -18,6 +22,10 @@ describe('FrequentItemsListComponent', () => {
matcher: 'lab',
...props,
},
+ localVue,
+ provide: {
+ vuexModule: 'frequentProjects',
+ },
});
};
diff --git a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
index 0280fdb0ca2..c9b7e0f3d13 100644
--- a/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_search_input_spec.js
@@ -1,9 +1,13 @@
import { GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import searchComponent from '~/frequent_items/components/frequent_items_search_input.vue';
import { createStore } from '~/frequent_items/store';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('FrequentItemsSearchInputComponent', () => {
let wrapper;
let trackingSpy;
@@ -14,12 +18,16 @@ describe('FrequentItemsSearchInputComponent', () => {
shallowMount(searchComponent, {
store,
propsData: { namespace },
+ localVue,
+ provide: {
+ vuexModule: 'frequentProjects',
+ },
});
const findSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
beforeEach(() => {
- store = createStore({ dropdownType: 'project' });
+ store = createStore();
jest.spyOn(store, 'dispatch').mockImplementation(() => {});
trackingSpy = mockTracking('_category_', document, jest.spyOn);
@@ -57,9 +65,9 @@ describe('FrequentItemsSearchInputComponent', () => {
await wrapper.vm.$nextTick();
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'type_search_query', {
- label: 'project_dropdown_frequent_items_search_input',
+ label: 'projects_dropdown_frequent_items_search_input',
});
- expect(store.dispatch).toHaveBeenCalledWith('setSearchQuery', value);
+ expect(store.dispatch).toHaveBeenCalledWith('frequentProjects/setSearchQuery', value);
});
});
});
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 5453c93eac3..211ed064762 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -691,12 +691,9 @@ describe('GfmAutoComplete', () => {
{ search: 'ErlindaMayert nicolle' },
{ search: 'PhoebeSchaden salina' },
{ search: 'KinaCummings robena' },
- // Remaining members are grouped last
- { search: 'Administrator root' },
- { search: 'AntoineLedner ammie' },
];
- it('sorts by match with start of name/username, then match with any part of name/username, and maintains sort order', () => {
+ it('filters out non-matches, then puts matches with start of name/username first', () => {
expect(GfmAutoComplete.Members.sort(query, items)).toMatchObject(expected);
});
});
diff --git a/spec/frontend/groups/components/invite_members_banner_spec.js b/spec/frontend/groups/components/invite_members_banner_spec.js
index 9a2068a27a1..0da2f84f2a1 100644
--- a/spec/frontend/groups/components/invite_members_banner_spec.js
+++ b/spec/frontend/groups/components/invite_members_banner_spec.js
@@ -2,6 +2,7 @@ import { GlBanner, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import InviteMembersBanner from '~/groups/components/invite_members_banner.vue';
+import eventHub from '~/invite_members/event_hub';
import { setCookie, parseBoolean } from '~/lib/utils/common_utils';
jest.mock('~/lib/utils/common_utils');
@@ -58,12 +59,23 @@ describe('InviteMembersBanner', () => {
});
});
- it('sets the button attributes for the buttonClickEvent', () => {
- const button = wrapper.find(`[href='${wrapper.vm.inviteMembersPath}']`);
+ describe('when the button is clicked', () => {
+ beforeEach(() => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ wrapper.find(GlBanner).vm.$emit('primary');
+ });
+
+ it('calls openModal through the eventHub', () => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('openModal', {
+ inviteeType: 'members',
+ source: 'invite_members_banner',
+ });
+ });
- expect(button.attributes()).toMatchObject({
- 'data-track-event': buttonClickEvent,
- 'data-track-label': trackLabel,
+ it('sends the buttonClickEvent with correct trackCategory and trackLabel', () => {
+ expect(trackingSpy).toHaveBeenCalledWith(trackCategory, buttonClickEvent, {
+ label: trackLabel,
+ });
});
});
@@ -100,10 +112,6 @@ describe('InviteMembersBanner', () => {
it('uses the button_text text from options for buttontext', () => {
expect(findBanner().attributes('buttontext')).toBe(buttonText);
});
-
- it('uses the href from inviteMembersPath for buttonlink', () => {
- expect(findBanner().attributes('buttonlink')).toBe(inviteMembersPath);
- });
});
describe('dismissing', () => {
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index a3b327343e5..646e51160d8 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -510,6 +510,7 @@ describe('RepoEditor', () => {
},
});
await vm.$nextTick();
+ await vm.$nextTick();
expect(vm.initEditor).toHaveBeenCalled();
});
diff --git a/spec/frontend/ide/lib/alerts/environment_spec.js b/spec/frontend/ide/lib/alerts/environment_spec.js
new file mode 100644
index 00000000000..d645209345c
--- /dev/null
+++ b/spec/frontend/ide/lib/alerts/environment_spec.js
@@ -0,0 +1,21 @@
+import { GlLink } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import Environments from '~/ide/lib/alerts/environments.vue';
+
+describe('~/ide/lib/alerts/environment.vue', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = mount(Environments);
+ });
+
+ it('shows a message regarding environments', () => {
+ expect(wrapper.text()).toBe(
+ "No deployments detected. Use environments to control your software's continuous deployment. Learn more about deployment jobs.",
+ );
+ });
+
+ it('links to the help page on environments', () => {
+ expect(wrapper.findComponent(GlLink).attributes('href')).toBe('/help/ci/environments/index.md');
+ });
+});
diff --git a/spec/frontend/ide/services/index_spec.js b/spec/frontend/ide/services/index_spec.js
index 3503834e24b..4a726cff3b6 100644
--- a/spec/frontend/ide/services/index_spec.js
+++ b/spec/frontend/ide/services/index_spec.js
@@ -2,9 +2,11 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import getIdeProject from 'ee_else_ce/ide/queries/get_ide_project.query.graphql';
import Api from '~/api';
+import dismissUserCallout from '~/graphql_shared/mutations/dismiss_user_callout.mutation.graphql';
import services from '~/ide/services';
-import { query } from '~/ide/services/gql';
+import { query, mutate } from '~/ide/services/gql';
import { escapeFileUrl } from '~/lib/utils/url_utility';
+import ciConfig from '~/pipeline_editor/graphql/queries/ci_config.graphql';
import { projectData } from '../mock_data';
jest.mock('~/api');
@@ -299,4 +301,33 @@ describe('IDE services', () => {
});
});
});
+ describe('getCiConfig', () => {
+ const TEST_PROJECT_PATH = 'foo/bar';
+ const TEST_CI_CONFIG = 'test config';
+
+ it('queries with the given CI config and project', () => {
+ const result = { data: { ciConfig: { test: 'data' } } };
+ query.mockResolvedValue(result);
+ return services.getCiConfig(TEST_PROJECT_PATH, TEST_CI_CONFIG).then((data) => {
+ expect(data).toEqual(result.data.ciConfig);
+ expect(query).toHaveBeenCalledWith({
+ query: ciConfig,
+ variables: { projectPath: TEST_PROJECT_PATH, content: TEST_CI_CONFIG },
+ });
+ });
+ });
+ });
+ describe('dismissUserCallout', () => {
+ it('mutates the callout to dismiss', () => {
+ const result = { data: { callouts: { test: 'data' } } };
+ mutate.mockResolvedValue(result);
+ return services.dismissUserCallout('test').then((data) => {
+ expect(data).toEqual(result.data);
+ expect(mutate).toHaveBeenCalledWith({
+ mutation: dismissUserCallout,
+ variables: { input: { featureName: 'test' } },
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/ide/stores/actions/alert_spec.js b/spec/frontend/ide/stores/actions/alert_spec.js
new file mode 100644
index 00000000000..1321c402ebb
--- /dev/null
+++ b/spec/frontend/ide/stores/actions/alert_spec.js
@@ -0,0 +1,46 @@
+import testAction from 'helpers/vuex_action_helper';
+import service from '~/ide/services';
+import {
+ detectEnvironmentsGuidance,
+ dismissEnvironmentsGuidance,
+} from '~/ide/stores/actions/alert';
+import * as types from '~/ide/stores/mutation_types';
+
+jest.mock('~/ide/services');
+
+describe('~/ide/stores/actions/alert', () => {
+ describe('detectEnvironmentsGuidance', () => {
+ it('should try to fetch CI info', () => {
+ const stages = ['a', 'b', 'c'];
+ service.getCiConfig.mockResolvedValue({ stages });
+
+ return testAction(
+ detectEnvironmentsGuidance,
+ 'the content',
+ { currentProjectId: 'gitlab/test' },
+ [{ type: types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT, payload: stages }],
+ [],
+ () => expect(service.getCiConfig).toHaveBeenCalledWith('gitlab/test', 'the content'),
+ );
+ });
+ });
+ describe('dismissCallout', () => {
+ it('should try to dismiss the given callout', () => {
+ const callout = { featureName: 'test', dismissedAt: 'now' };
+
+ service.dismissUserCallout.mockResolvedValue({ userCalloutCreate: { userCallout: callout } });
+
+ return testAction(
+ dismissEnvironmentsGuidance,
+ undefined,
+ {},
+ [{ type: types.DISMISS_ENVIRONMENTS_GUIDANCE_ALERT }],
+ [],
+ () =>
+ expect(service.dismissUserCallout).toHaveBeenCalledWith(
+ 'web_ide_ci_environments_guidance',
+ ),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/ide/stores/actions_spec.js b/spec/frontend/ide/stores/actions_spec.js
index d47dd88dd47..ad55313da93 100644
--- a/spec/frontend/ide/stores/actions_spec.js
+++ b/spec/frontend/ide/stores/actions_spec.js
@@ -4,6 +4,7 @@ import eventHub from '~/ide/eventhub';
import { createRouter } from '~/ide/ide_router';
import { createStore } from '~/ide/stores';
import {
+ init,
stageAllChanges,
unstageAllChanges,
toggleFileFinder,
@@ -54,15 +55,15 @@ describe('Multi-file store actions', () => {
});
});
- describe('setInitialData', () => {
- it('commits initial data', (done) => {
- store
- .dispatch('setInitialData', { canCommit: true })
- .then(() => {
- expect(store.state.canCommit).toBeTruthy();
- done();
- })
- .catch(done.fail);
+ describe('init', () => {
+ it('commits initial data and requests user callouts', () => {
+ return testAction(
+ init,
+ { canCommit: true },
+ store.state,
+ [{ type: 'SET_INITIAL_DATA', payload: { canCommit: true } }],
+ [],
+ );
});
});
diff --git a/spec/frontend/ide/stores/getters/alert_spec.js b/spec/frontend/ide/stores/getters/alert_spec.js
new file mode 100644
index 00000000000..7068b8e637f
--- /dev/null
+++ b/spec/frontend/ide/stores/getters/alert_spec.js
@@ -0,0 +1,46 @@
+import { getAlert } from '~/ide/lib/alerts';
+import EnvironmentsMessage from '~/ide/lib/alerts/environments.vue';
+import { createStore } from '~/ide/stores';
+import * as getters from '~/ide/stores/getters/alert';
+import { file } from '../../helpers';
+
+describe('IDE store alert getters', () => {
+ let localState;
+ let localStore;
+
+ beforeEach(() => {
+ localStore = createStore();
+ localState = localStore.state;
+ });
+
+ describe('alerts', () => {
+ describe('shows an alert about environments', () => {
+ let alert;
+
+ beforeEach(() => {
+ const f = file('.gitlab-ci.yml');
+ localState.openFiles.push(f);
+ localState.currentActivityView = 'repo-commit-section';
+ localState.environmentsGuidanceAlertDetected = true;
+ localState.environmentsGuidanceAlertDismissed = false;
+
+ const alertKey = getters.getAlert(localState)(f);
+ alert = getAlert(alertKey);
+ });
+
+ it('has a message suggesting to use environments', () => {
+ expect(alert.message).toEqual(EnvironmentsMessage);
+ });
+
+ it('dispatches to dismiss the callout on dismiss', () => {
+ jest.spyOn(localStore, 'dispatch').mockImplementation();
+ alert.dismiss(localStore);
+ expect(localStore.dispatch).toHaveBeenCalledWith('dismissEnvironmentsGuidance');
+ });
+
+ it('should be a tip alert', () => {
+ expect(alert.props).toEqual({ variant: 'tip' });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ide/stores/mutations/alert_spec.js b/spec/frontend/ide/stores/mutations/alert_spec.js
new file mode 100644
index 00000000000..2840ec4ebb7
--- /dev/null
+++ b/spec/frontend/ide/stores/mutations/alert_spec.js
@@ -0,0 +1,26 @@
+import * as types from '~/ide/stores/mutation_types';
+import mutations from '~/ide/stores/mutations/alert';
+
+describe('~/ide/stores/mutations/alert', () => {
+ const state = {};
+
+ describe(types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT, () => {
+ it('checks the stages for any that configure environments', () => {
+ mutations[types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT](state, {
+ nodes: [{ groups: { nodes: [{ jobs: { nodes: [{}] } }] } }],
+ });
+ expect(state.environmentsGuidanceAlertDetected).toBe(true);
+ mutations[types.DETECT_ENVIRONMENTS_GUIDANCE_ALERT](state, {
+ nodes: [{ groups: { nodes: [{ jobs: { nodes: [{ environment: {} }] } }] } }],
+ });
+ expect(state.environmentsGuidanceAlertDetected).toBe(false);
+ });
+ });
+
+ describe(types.DISMISS_ENVIRONMENTS_GUIDANCE_ALERT, () => {
+ it('stops environments guidance', () => {
+ mutations[types.DISMISS_ENVIRONMENTS_GUIDANCE_ALERT](state);
+ expect(state.environmentsGuidanceAlertDismissed).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
index 7a83136e785..0c69cfb3bc5 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_row_spec.js
@@ -19,7 +19,8 @@ const getFakeGroup = (status) => ({
new_name: 'group1',
},
id: 1,
- status,
+ validation_errors: [],
+ progress: { status },
});
const EXISTING_GROUP_TARGET_NAMESPACE = 'existing-group';
@@ -187,21 +188,25 @@ describe('import table row', () => {
expect(wrapper.text()).toContain('Please choose a group URL with no special characters.');
});
- it('Reports invalid group name if group already exists', async () => {
+ it('Reports invalid group name if relevant validation error exists', async () => {
+ const FAKE_ERROR_MESSAGE = 'fake error';
+
createComponent({
group: {
...getFakeGroup(STATUSES.NONE),
- import_target: {
- target_namespace: EXISTING_GROUP_TARGET_NAMESPACE,
- new_name: EXISTING_GROUP_PATH,
- },
+ validation_errors: [
+ {
+ field: 'new_name',
+ message: FAKE_ERROR_MESSAGE,
+ },
+ ],
},
});
jest.runOnlyPendingTimers();
await nextTick();
- expect(wrapper.text()).toContain('Name already exists.');
+ expect(wrapper.text()).toContain(FAKE_ERROR_MESSAGE);
});
});
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index 496c5cda7c7..99ef6d9a7fb 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -1,4 +1,5 @@
import {
+ GlButton,
GlEmptyState,
GlLoadingIcon,
GlSearchBoxByClick,
@@ -14,7 +15,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { STATUSES } from '~/import_entities/constants';
import ImportTable from '~/import_entities/import_groups/components/import_table.vue';
import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
-import importGroupMutation from '~/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql';
+import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
@@ -40,6 +41,7 @@ describe('import table', () => {
];
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
+ const findImportAllButton = () => wrapper.find('h1').find(GlButton);
const findPaginationDropdown = () => wrapper.findComponent(GlDropdown);
const findPaginationDropdownText = () => findPaginationDropdown().find({ ref: 'text' }).text();
@@ -72,7 +74,6 @@ describe('import table', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('renders loading icon while performing request', async () => {
@@ -141,7 +142,7 @@ describe('import table', () => {
event | payload | mutation | variables
${'update-target-namespace'} | ${'new-namespace'} | ${setTargetNamespaceMutation} | ${{ sourceGroupId: FAKE_GROUP.id, targetNamespace: 'new-namespace' }}
${'update-new-name'} | ${'new-name'} | ${setNewNameMutation} | ${{ sourceGroupId: FAKE_GROUP.id, newName: 'new-name' }}
- ${'import-group'} | ${undefined} | ${importGroupMutation} | ${{ sourceGroupId: FAKE_GROUP.id }}
+ ${'import-group'} | ${undefined} | ${importGroupsMutation} | ${{ sourceGroupIds: [FAKE_GROUP.id] }}
`('correctly maps $event to mutation', async ({ event, payload, mutation, variables }) => {
jest.spyOn(apolloProvider.defaultClient, 'mutate');
wrapper.find(ImportTableRow).vm.$emit(event, payload);
@@ -277,4 +278,66 @@ describe('import table', () => {
);
});
});
+
+ describe('import all button', () => {
+ it('does not exists when no groups available', () => {
+ createComponent({
+ bulkImportSourceGroups: () => new Promise(() => {}),
+ });
+
+ expect(findImportAllButton().exists()).toBe(false);
+ });
+
+ it('exists when groups are available for import', async () => {
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: FAKE_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findImportAllButton().exists()).toBe(true);
+ });
+
+ it('counts only not-imported groups', async () => {
+ const NEW_GROUPS = [
+ generateFakeEntry({ id: 1, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 2, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
+ ];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: NEW_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findImportAllButton().text()).toMatchInterpolatedText('Import 2 groups');
+ });
+
+ it('disables button when any group has validation errors', async () => {
+ const NEW_GROUPS = [
+ generateFakeEntry({ id: 1, status: STATUSES.NONE }),
+ generateFakeEntry({
+ id: 2,
+ status: STATUSES.NONE,
+ validation_errors: [{ field: 'new_name', message: 'test validation error' }],
+ }),
+ generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
+ ];
+
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: NEW_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ }),
+ });
+ await waitForPromises();
+
+ expect(findImportAllButton().props().disabled).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index 1feff861c1e..ef83c9ebbc4 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -8,10 +8,15 @@ import {
clientTypenames,
createResolvers,
} from '~/import_entities/import_groups/graphql/client_factory';
-import importGroupMutation from '~/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql';
+import addValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/add_validation_error.mutation.graphql';
+import importGroupsMutation from '~/import_entities/import_groups/graphql/mutations/import_groups.mutation.graphql';
+import removeValidationErrorMutation from '~/import_entities/import_groups/graphql/mutations/remove_validation_error.mutation.graphql';
+import setImportProgressMutation from '~/import_entities/import_groups/graphql/mutations/set_import_progress.mutation.graphql';
import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
+import updateImportStatusMutation from '~/import_entities/import_groups/graphql/mutations/update_import_status.mutation.graphql';
import availableNamespacesQuery from '~/import_entities/import_groups/graphql/queries/available_namespaces.query.graphql';
+import bulkImportSourceGroupQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_group.query.graphql';
import bulkImportSourceGroupsQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_groups.query.graphql';
import { StatusPoller } from '~/import_entities/import_groups/graphql/services/status_poller';
@@ -78,6 +83,31 @@ describe('Bulk import resolvers', () => {
});
});
+ describe('bulkImportSourceGroup', () => {
+ beforeEach(async () => {
+ axiosMockAdapter.onGet(FAKE_ENDPOINTS.status).reply(httpStatus.OK, statusEndpointFixture);
+ axiosMockAdapter
+ .onGet(FAKE_ENDPOINTS.availableNamespaces)
+ .reply(httpStatus.OK, availableNamespacesFixture);
+
+ return client.query({
+ query: bulkImportSourceGroupsQuery,
+ });
+ });
+
+ it('returns group', async () => {
+ const { id } = statusEndpointFixture.importable_data[0];
+ const {
+ data: { bulkImportSourceGroup: group },
+ } = await client.query({
+ query: bulkImportSourceGroupQuery,
+ variables: { id: id.toString() },
+ });
+
+ expect(group).toMatchObject(statusEndpointFixture.importable_data[0]);
+ });
+ });
+
describe('bulkImportSourceGroups', () => {
let results;
@@ -89,8 +119,12 @@ describe('Bulk import resolvers', () => {
});
it('respects cached import state when provided by group manager', async () => {
+ const FAKE_JOB_ID = '1';
const FAKE_STATUS = 'DEMO_STATUS';
- const FAKE_IMPORT_TARGET = {};
+ const FAKE_IMPORT_TARGET = {
+ new_name: 'test-name',
+ target_namespace: 'test-namespace',
+ };
const TARGET_INDEX = 0;
const clientWithMockedManager = createClient({
@@ -98,8 +132,11 @@ describe('Bulk import resolvers', () => {
getImportStateFromStorageByGroupId(groupId) {
if (groupId === statusEndpointFixture.importable_data[TARGET_INDEX].id) {
return {
- status: FAKE_STATUS,
- importTarget: FAKE_IMPORT_TARGET,
+ jobId: FAKE_JOB_ID,
+ importState: {
+ status: FAKE_STATUS,
+ importTarget: FAKE_IMPORT_TARGET,
+ },
};
}
@@ -113,8 +150,8 @@ describe('Bulk import resolvers', () => {
});
const clientResults = clientResponse.data.bulkImportSourceGroups.nodes;
- expect(clientResults[TARGET_INDEX].import_target).toBe(FAKE_IMPORT_TARGET);
- expect(clientResults[TARGET_INDEX].status).toBe(FAKE_STATUS);
+ expect(clientResults[TARGET_INDEX].import_target).toStrictEqual(FAKE_IMPORT_TARGET);
+ expect(clientResults[TARGET_INDEX].progress.status).toBe(FAKE_STATUS);
});
it('populates each result instance with empty import_target when there are no available namespaces', async () => {
@@ -143,8 +180,8 @@ describe('Bulk import resolvers', () => {
).toBe(true);
});
- it('populates each result instance with status field default to none', () => {
- expect(results.every((r) => r.status === STATUSES.NONE)).toBe(true);
+ it('populates each result instance with status default to none', () => {
+ expect(results.every((r) => r.progress.status === STATUSES.NONE)).toBe(true);
});
it('populates each result instance with import_target defaulted to first available namespace', () => {
@@ -183,7 +220,6 @@ describe('Bulk import resolvers', () => {
});
describe('mutations', () => {
- let results;
const GROUP_ID = 1;
beforeEach(() => {
@@ -195,7 +231,10 @@ describe('Bulk import resolvers', () => {
{
__typename: clientTypenames.BulkImportSourceGroup,
id: GROUP_ID,
- status: STATUSES.NONE,
+ progress: {
+ id: `test-${GROUP_ID}`,
+ status: STATUSES.NONE,
+ },
web_url: 'https://fake.host/1',
full_path: 'fake_group_1',
full_name: 'fake_name_1',
@@ -203,6 +242,7 @@ describe('Bulk import resolvers', () => {
target_namespace: 'root',
new_name: 'group1',
},
+ validation_errors: [],
},
],
pageInfo: {
@@ -214,35 +254,42 @@ describe('Bulk import resolvers', () => {
},
},
});
-
- client
- .watchQuery({
- query: bulkImportSourceGroupsQuery,
- fetchPolicy: 'cache-only',
- })
- .subscribe(({ data }) => {
- results = data.bulkImportSourceGroups.nodes;
- });
});
it('setTargetNamespaces updates group target namespace', async () => {
const NEW_TARGET_NAMESPACE = 'target';
- await client.mutate({
+ const {
+ data: {
+ setTargetNamespace: {
+ id: idInResponse,
+ import_target: { target_namespace: namespaceInResponse },
+ },
+ },
+ } = await client.mutate({
mutation: setTargetNamespaceMutation,
variables: { sourceGroupId: GROUP_ID, targetNamespace: NEW_TARGET_NAMESPACE },
});
- expect(results[0].import_target.target_namespace).toBe(NEW_TARGET_NAMESPACE);
+ expect(idInResponse).toBe(GROUP_ID);
+ expect(namespaceInResponse).toBe(NEW_TARGET_NAMESPACE);
});
it('setNewName updates group target name', async () => {
const NEW_NAME = 'new';
- await client.mutate({
+ const {
+ data: {
+ setNewName: {
+ id: idInResponse,
+ import_target: { new_name: nameInResponse },
+ },
+ },
+ } = await client.mutate({
mutation: setNewNameMutation,
variables: { sourceGroupId: GROUP_ID, newName: NEW_NAME },
});
- expect(results[0].import_target.new_name).toBe(NEW_NAME);
+ expect(idInResponse).toBe(GROUP_ID);
+ expect(nameInResponse).toBe(NEW_NAME);
});
describe('importGroup', () => {
@@ -250,8 +297,8 @@ describe('Bulk import resolvers', () => {
axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(() => new Promise(() => {}));
client.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
});
await waitForPromises();
@@ -261,33 +308,49 @@ describe('Bulk import resolvers', () => {
query: bulkImportSourceGroupsQuery,
});
- expect(intermediateResults[0].status).toBe(STATUSES.SCHEDULING);
+ expect(intermediateResults[0].progress.status).toBe(STATUSES.SCHEDULING);
});
- it('sets import status to CREATED when request completes', async () => {
- axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 });
- await client.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ describe('when request completes', () => {
+ let results;
+
+ beforeEach(() => {
+ client
+ .watchQuery({
+ query: bulkImportSourceGroupsQuery,
+ fetchPolicy: 'cache-only',
+ })
+ .subscribe(({ data }) => {
+ results = data.bulkImportSourceGroups.nodes;
+ });
});
- expect(results[0].status).toBe(STATUSES.CREATED);
- });
+ it('sets import status to CREATED when request completes', async () => {
+ axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 });
+ await client.mutate({
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
+ });
+ await waitForPromises();
- it('resets status to NONE if request fails', async () => {
- axiosMockAdapter
- .onPost(FAKE_ENDPOINTS.createBulkImport)
- .reply(httpStatus.INTERNAL_SERVER_ERROR);
+ expect(results[0].progress.status).toBe(STATUSES.CREATED);
+ });
- client
- .mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
- })
- .catch(() => {});
- await waitForPromises();
+ it('resets status to NONE if request fails', async () => {
+ axiosMockAdapter
+ .onPost(FAKE_ENDPOINTS.createBulkImport)
+ .reply(httpStatus.INTERNAL_SERVER_ERROR);
+
+ client
+ .mutate({
+ mutation: [importGroupsMutation],
+ variables: { sourceGroupIds: [GROUP_ID] },
+ })
+ .catch(() => {});
+ await waitForPromises();
- expect(results[0].status).toBe(STATUSES.NONE);
+ expect(results[0].progress.status).toBe(STATUSES.NONE);
+ });
});
it('shows default error message when server error is not provided', async () => {
@@ -297,8 +360,8 @@ describe('Bulk import resolvers', () => {
client
.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
})
.catch(() => {});
await waitForPromises();
@@ -315,8 +378,8 @@ describe('Bulk import resolvers', () => {
client
.mutate({
- mutation: importGroupMutation,
- variables: { sourceGroupId: GROUP_ID },
+ mutation: importGroupsMutation,
+ variables: { sourceGroupIds: [GROUP_ID] },
})
.catch(() => {});
await waitForPromises();
@@ -324,5 +387,75 @@ describe('Bulk import resolvers', () => {
expect(createFlash).toHaveBeenCalledWith({ message: CUSTOM_MESSAGE });
});
});
+
+ it('setImportProgress updates group progress', async () => {
+ const NEW_STATUS = 'dummy';
+ const FAKE_JOB_ID = 5;
+ const {
+ data: {
+ setImportProgress: { progress },
+ },
+ } = await client.mutate({
+ mutation: setImportProgressMutation,
+ variables: { sourceGroupId: GROUP_ID, status: NEW_STATUS, jobId: FAKE_JOB_ID },
+ });
+
+ expect(progress).toMatchObject({
+ id: FAKE_JOB_ID,
+ status: NEW_STATUS,
+ });
+ });
+
+ it('updateImportStatus returns new status', async () => {
+ const NEW_STATUS = 'dummy';
+ const FAKE_JOB_ID = 5;
+ const {
+ data: { updateImportStatus: statusInResponse },
+ } = await client.mutate({
+ mutation: updateImportStatusMutation,
+ variables: { id: FAKE_JOB_ID, status: NEW_STATUS },
+ });
+
+ expect(statusInResponse).toMatchObject({
+ id: FAKE_JOB_ID,
+ status: NEW_STATUS,
+ });
+ });
+
+ it('addValidationError adds error to group', async () => {
+ const FAKE_FIELD = 'some-field';
+ const FAKE_MESSAGE = 'some-message';
+ const {
+ data: {
+ addValidationError: { validation_errors: validationErrors },
+ },
+ } = await client.mutate({
+ mutation: addValidationErrorMutation,
+ variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD, message: FAKE_MESSAGE },
+ });
+
+ expect(validationErrors).toMatchObject([{ field: FAKE_FIELD, message: FAKE_MESSAGE }]);
+ });
+
+ it('removeValidationError removes error from group', async () => {
+ const FAKE_FIELD = 'some-field';
+ const FAKE_MESSAGE = 'some-message';
+
+ await client.mutate({
+ mutation: addValidationErrorMutation,
+ variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD, message: FAKE_MESSAGE },
+ });
+
+ const {
+ data: {
+ removeValidationError: { validation_errors: validationErrors },
+ },
+ } = await client.mutate({
+ mutation: removeValidationErrorMutation,
+ variables: { sourceGroupId: GROUP_ID, field: FAKE_FIELD },
+ });
+
+ expect(validationErrors).toMatchObject([]);
+ });
});
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/fixtures.js b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
index 62e9581bd2d..6f66066b312 100644
--- a/spec/frontend/import_entities/import_groups/graphql/fixtures.js
+++ b/spec/frontend/import_entities/import_groups/graphql/fixtures.js
@@ -10,7 +10,11 @@ export const generateFakeEntry = ({ id, status, ...rest }) => ({
new_name: `group${id}`,
},
id,
- status,
+ progress: {
+ id: `test-${id}`,
+ status,
+ },
+ validation_errors: [],
...rest,
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
index 5baa201906a..bae715edac0 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/source_groups_manager_spec.js
@@ -1,6 +1,3 @@
-import { defaultDataIdFromObject } from 'apollo-cache-inmemory';
-import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory';
-import ImportSourceGroupFragment from '~/import_entities/import_groups/graphql/fragments/bulk_import_source_group_item.fragment.graphql';
import {
KEY,
SourceGroupsManager,
@@ -10,25 +7,15 @@ const FAKE_SOURCE_URL = 'http://demo.host';
describe('SourceGroupsManager', () => {
let manager;
- let client;
let storage;
- const getFakeGroup = () => ({
- __typename: clientTypenames.BulkImportSourceGroup,
- id: 5,
- });
-
beforeEach(() => {
- client = {
- readFragment: jest.fn(),
- writeFragment: jest.fn(),
- };
storage = {
getItem: jest.fn(),
setItem: jest.fn(),
};
- manager = new SourceGroupsManager({ client, storage, sourceUrl: FAKE_SOURCE_URL });
+ manager = new SourceGroupsManager({ storage, sourceUrl: FAKE_SOURCE_URL });
});
describe('storage management', () => {
@@ -41,93 +28,37 @@ describe('SourceGroupsManager', () => {
expect(storage.getItem).toHaveBeenCalledWith(KEY);
});
- it('saves to storage when import is starting', () => {
- manager.startImport({
- importId: IMPORT_ID,
- group: FAKE_GROUP,
- });
+ it('saves to storage when createImportState is called', () => {
+ const FAKE_STATUS = 'fake;';
+ manager.createImportState(IMPORT_ID, { status: FAKE_STATUS, groups: [FAKE_GROUP] });
const storedObject = JSON.parse(storage.setItem.mock.calls[0][1]);
expect(Object.values(storedObject)[0]).toStrictEqual({
- id: FAKE_GROUP.id,
- importTarget: IMPORT_TARGET,
- status: STATUS,
+ status: FAKE_STATUS,
+ groups: [
+ {
+ id: FAKE_GROUP.id,
+ importTarget: IMPORT_TARGET,
+ },
+ ],
});
});
- it('saves to storage when import status is updated', () => {
+ it('updates storage when previous state is available', () => {
const CHANGED_STATUS = 'changed';
- manager.startImport({
- importId: IMPORT_ID,
- group: FAKE_GROUP,
- });
+ manager.createImportState(IMPORT_ID, { status: STATUS, groups: [FAKE_GROUP] });
- manager.setImportStatusByImportId(IMPORT_ID, CHANGED_STATUS);
+ manager.updateImportProgress(IMPORT_ID, CHANGED_STATUS);
const storedObject = JSON.parse(storage.setItem.mock.calls[1][1]);
expect(Object.values(storedObject)[0]).toStrictEqual({
- id: FAKE_GROUP.id,
- importTarget: IMPORT_TARGET,
status: CHANGED_STATUS,
+ groups: [
+ {
+ id: FAKE_GROUP.id,
+ importTarget: IMPORT_TARGET,
+ },
+ ],
});
});
});
-
- it('finds item by group id', () => {
- const ID = 5;
-
- const FAKE_GROUP = getFakeGroup();
- client.readFragment.mockReturnValue(FAKE_GROUP);
- const group = manager.findById(ID);
- expect(group).toBe(FAKE_GROUP);
- expect(client.readFragment).toHaveBeenCalledWith({
- fragment: ImportSourceGroupFragment,
- id: defaultDataIdFromObject(getFakeGroup()),
- });
- });
-
- it('updates group with provided function', () => {
- const UPDATED_GROUP = {};
- const fn = jest.fn().mockReturnValue(UPDATED_GROUP);
- manager.update(getFakeGroup(), fn);
-
- expect(client.writeFragment).toHaveBeenCalledWith({
- fragment: ImportSourceGroupFragment,
- id: defaultDataIdFromObject(getFakeGroup()),
- data: UPDATED_GROUP,
- });
- });
-
- it('updates group by id with provided function', () => {
- const UPDATED_GROUP = {};
- const fn = jest.fn().mockReturnValue(UPDATED_GROUP);
- client.readFragment.mockReturnValue(getFakeGroup());
- manager.updateById(getFakeGroup().id, fn);
-
- expect(client.readFragment).toHaveBeenCalledWith({
- fragment: ImportSourceGroupFragment,
- id: defaultDataIdFromObject(getFakeGroup()),
- });
-
- expect(client.writeFragment).toHaveBeenCalledWith({
- fragment: ImportSourceGroupFragment,
- id: defaultDataIdFromObject(getFakeGroup()),
- data: UPDATED_GROUP,
- });
- });
-
- it('sets import status when group is provided', () => {
- client.readFragment.mockReturnValue(getFakeGroup());
-
- const NEW_STATUS = 'NEW_STATUS';
- manager.setImportStatus(getFakeGroup(), NEW_STATUS);
-
- expect(client.writeFragment).toHaveBeenCalledWith({
- fragment: ImportSourceGroupFragment,
- id: defaultDataIdFromObject(getFakeGroup()),
- data: {
- ...getFakeGroup(),
- status: NEW_STATUS,
- },
- });
- });
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js b/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
index 0d4809971ae..9c47647c430 100644
--- a/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/services/status_poller_spec.js
@@ -21,17 +21,15 @@ const FAKE_POLL_PATH = '/fake/poll/path';
describe('Bulk import status poller', () => {
let poller;
let mockAdapter;
- let groupManager;
+ let updateImportStatus;
const getPollHistory = () => mockAdapter.history.get.filter((x) => x.url === FAKE_POLL_PATH);
beforeEach(() => {
mockAdapter = new MockAdapter(axios);
mockAdapter.onGet(FAKE_POLL_PATH).reply(200, {});
- groupManager = {
- setImportStatusByImportId: jest.fn(),
- };
- poller = new StatusPoller({ groupManager, pollPath: FAKE_POLL_PATH });
+ updateImportStatus = jest.fn();
+ poller = new StatusPoller({ updateImportStatus, pollPath: FAKE_POLL_PATH });
});
it('creates poller with proper config', () => {
@@ -96,9 +94,9 @@ describe('Bulk import status poller', () => {
it('when success response arrives updates relevant group status', () => {
const FAKE_ID = 5;
const [[pollConfig]] = Poll.mock.calls;
+ const FAKE_RESPONSE = { id: FAKE_ID, status_name: STATUSES.FINISHED };
+ pollConfig.successCallback({ data: [FAKE_RESPONSE] });
- pollConfig.successCallback({ data: [{ id: FAKE_ID, status_name: STATUSES.FINISHED }] });
-
- expect(groupManager.setImportStatusByImportId).toHaveBeenCalledWith(FAKE_ID, STATUSES.FINISHED);
+ expect(updateImportStatus).toHaveBeenCalledWith(FAKE_RESPONSE);
});
});
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index df681658081..c7286d70b94 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -270,22 +270,25 @@ describe('Incidents List', () => {
const noneSort = 'none';
it.each`
- selector | initialSort | firstSort | nextSort
- ${TH_CREATED_AT_TEST_ID} | ${descSort} | ${ascSort} | ${descSort}
- ${TH_SEVERITY_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
- ${TH_PUBLISHED_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
- ${TH_INCIDENT_SLA_TEST_ID} | ${noneSort} | ${ascSort} | ${descSort}
- `('updates sort with new direction', async ({ selector, initialSort, firstSort, nextSort }) => {
- const [[attr, value]] = Object.entries(selector);
- const columnHeader = () => wrapper.find(`[${attr}="${value}"]`);
- expect(columnHeader().attributes('aria-sort')).toBe(initialSort);
- columnHeader().trigger('click');
- await wrapper.vm.$nextTick();
- expect(columnHeader().attributes('aria-sort')).toBe(firstSort);
- columnHeader().trigger('click');
- await wrapper.vm.$nextTick();
- expect(columnHeader().attributes('aria-sort')).toBe(nextSort);
- });
+ description | selector | initialSort | firstSort | nextSort
+ ${'creation date'} | ${TH_CREATED_AT_TEST_ID} | ${descSort} | ${ascSort} | ${descSort}
+ ${'severity'} | ${TH_SEVERITY_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
+ ${'publish date'} | ${TH_PUBLISHED_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
+ ${'due date'} | ${TH_INCIDENT_SLA_TEST_ID} | ${noneSort} | ${ascSort} | ${descSort}
+ `(
+ 'updates sort with new direction when sorting by $description',
+ async ({ selector, initialSort, firstSort, nextSort }) => {
+ const [[attr, value]] = Object.entries(selector);
+ const columnHeader = () => wrapper.find(`[${attr}="${value}"]`);
+ expect(columnHeader().attributes('aria-sort')).toBe(initialSort);
+ columnHeader().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(columnHeader().attributes('aria-sort')).toBe(firstSort);
+ columnHeader().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(columnHeader().attributes('aria-sort')).toBe(nextSort);
+ },
+ );
});
describe('Snowplow tracking', () => {
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
index 5796b3fa44e..85d21f231b1 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -90,7 +90,7 @@ exports[`Alert integration settings form default state should match the default
checked="true"
>
<span>
- Automatically close incidents when the associated Prometheus alert resolves.
+ Automatically close associated incident when a recovery alert notification resolves an alert
</span>
</gl-form-checkbox-stub>
</gl-form-group-stub>
diff --git a/spec/frontend/integrations/edit/components/active_checkbox_spec.js b/spec/frontend/integrations/edit/components/active_checkbox_spec.js
index 76fd6dd3a48..0e56fb6454e 100644
--- a/spec/frontend/integrations/edit/components/active_checkbox_spec.js
+++ b/spec/frontend/integrations/edit/components/active_checkbox_spec.js
@@ -6,31 +6,27 @@ import { createStore } from '~/integrations/edit/store';
describe('ActiveCheckbox', () => {
let wrapper;
- const createComponent = (customStateProps = {}, isInheriting = false) => {
+ const createComponent = (customStateProps = {}, { isInheriting = false } = {}) => {
wrapper = mount(ActiveCheckbox, {
store: createStore({
customState: { ...customStateProps },
+ override: !isInheriting,
+ defaultState: isInheriting ? {} : undefined,
}),
- computed: {
- isInheriting: () => isInheriting,
- },
});
};
afterEach(() => {
- if (wrapper) {
- wrapper.destroy();
- wrapper = null;
- }
+ wrapper.destroy();
});
- const findGlFormCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findInputInCheckbox = () => findGlFormCheckbox().find('input');
describe('template', () => {
describe('is inheriting adminSettings', () => {
it('renders GlFormCheckbox as disabled', () => {
- createComponent({}, true);
+ createComponent({}, { isInheriting: true });
expect(findGlFormCheckbox().exists()).toBe(true);
expect(findInputInCheckbox().attributes('disabled')).toBe('disabled');
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index d08a1904e06..f121a148f27 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -136,7 +136,7 @@ describe('JiraIssuesFields', () => {
describe('Vulnerabilities creation', () => {
beforeEach(() => {
- createComponent({ provide: { glFeatures: { jiraForVulnerabilities: true } } });
+ createComponent();
});
it.each([true, false])(
@@ -178,18 +178,6 @@ describe('JiraIssuesFields', () => {
expect(eventHubEmitSpy).toHaveBeenCalledWith('getJiraIssueTypes');
});
-
- describe('with "jiraForVulnerabilities" feature flag disabled', () => {
- beforeEach(async () => {
- createComponent({
- provide: { glFeatures: { jiraForVulnerabilities: false } },
- });
- });
-
- it('does not show section', () => {
- expect(findJiraForVulnerabilities().exists()).toBe(false);
- });
- });
});
});
});
diff --git a/spec/frontend/invite_member/components/invite_member_modal_spec.js b/spec/frontend/invite_member/components/invite_member_modal_spec.js
deleted file mode 100644
index 03e3da2d5ef..00000000000
--- a/spec/frontend/invite_member/components/invite_member_modal_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import { GlLink, GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { stubComponent } from 'helpers/stub_component';
-import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
-import InviteMemberModal from '~/invite_member/components/invite_member_modal.vue';
-
-const memberPath = 'member_path';
-
-const GlEmoji = { template: '<img />' };
-const createComponent = () => {
- return shallowMount(InviteMemberModal, {
- propsData: {
- membersPath: memberPath,
- },
- stubs: {
- GlEmoji,
- GlModal: stubComponent(GlModal, {
- template: '<div><slot name="modal-title"></slot><slot></slot></div>',
- }),
- },
- });
-};
-
-describe('InviteMemberModal', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findLink = () => wrapper.find(GlLink);
-
- describe('rendering the modal', () => {
- it('renders the modal with the correct title', () => {
- expect(wrapper.text()).toContain("Oops, this feature isn't ready yet");
- });
-
- describe('rendering the see who link', () => {
- it('renders the correct link', () => {
- expect(findLink().attributes('href')).toBe(memberPath);
- });
- });
- });
-
- describe('tracking', () => {
- let trackingSpy;
-
- afterEach(() => {
- unmockTracking();
- });
-
- it('send an event when go to pipelines is clicked', () => {
- trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
-
- triggerEvent(findLink().element);
-
- expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_who_can_invite_link', {
- label: 'invite_members_message',
- });
- });
- });
-});
diff --git a/spec/frontend/invite_member/components/invite_member_trigger_mock_data.js b/spec/frontend/invite_member/components/invite_member_trigger_mock_data.js
deleted file mode 100644
index 9b34a8027e9..00000000000
--- a/spec/frontend/invite_member/components/invite_member_trigger_mock_data.js
+++ /dev/null
@@ -1,7 +0,0 @@
-const triggerProvides = {
- displayText: 'Invite member',
- event: 'click_invite_members_version_b',
- label: 'edit_assignee',
-};
-
-export default triggerProvides;
diff --git a/spec/frontend/invite_member/components/invite_member_trigger_spec.js b/spec/frontend/invite_member/components/invite_member_trigger_spec.js
deleted file mode 100644
index 630e2dbfc16..00000000000
--- a/spec/frontend/invite_member/components/invite_member_trigger_spec.js
+++ /dev/null
@@ -1,48 +0,0 @@
-import { GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
-import InviteMemberTrigger from '~/invite_member/components/invite_member_trigger.vue';
-import triggerProvides from './invite_member_trigger_mock_data';
-
-const createComponent = () => {
- return shallowMount(InviteMemberTrigger, { propsData: triggerProvides });
-};
-
-describe('InviteMemberTrigger', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findLink = () => wrapper.find(GlLink);
-
- describe('displayText', () => {
- it('includes the correct displayText for the link', () => {
- expect(findLink().text()).toBe(triggerProvides.displayText);
- });
- });
-
- describe('tracking', () => {
- let trackingSpy;
-
- afterEach(() => {
- unmockTracking();
- });
-
- it('send an event when go to pipelines is clicked', () => {
- trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
-
- triggerEvent(findLink().element);
-
- expect(trackingSpy).toHaveBeenCalledWith('_category_', triggerProvides.event, {
- label: triggerProvides.label,
- });
- });
- });
-});
diff --git a/spec/frontend/issuable/components/csv_export_modal_spec.js b/spec/frontend/issuable/components/csv_export_modal_spec.js
index a327da2d63a..7eb85a946ae 100644
--- a/spec/frontend/issuable/components/csv_export_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_export_modal_spec.js
@@ -13,6 +13,8 @@ describe('CsvExportModal', () => {
mount(CsvExportModal, {
propsData: {
modalId: 'csv-export-modal',
+ exportCsvPath: 'export/csv/path',
+ issuableCount: 1,
...props,
},
provide: {
diff --git a/spec/frontend/merge_request/components/status_box_spec.js b/spec/frontend/issuable/components/status_box_spec.js
index de0f3574ab2..990fac67f7e 100644
--- a/spec/frontend/merge_request/components/status_box_spec.js
+++ b/spec/frontend/issuable/components/status_box_spec.js
@@ -1,8 +1,6 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import StatusBox from '~/merge_request/components/status_box.vue';
-import mrEventHub from '~/merge_request/eventhub';
+import StatusBox from '~/issuable/components/status_box.vue';
let wrapper;
@@ -70,18 +68,4 @@ describe('Merge request status box component', () => {
});
});
});
-
- it('updates with eventhub event', async () => {
- factory({
- initialState: 'opened',
- });
-
- expect(wrapper.text()).toContain('Open');
-
- mrEventHub.$emit('mr.state.updated', { state: 'closed' });
-
- await nextTick();
-
- expect(wrapper.text()).toContain('Closed');
- });
});
diff --git a/spec/frontend/issuable_form_spec.js b/spec/frontend/issuable_form_spec.js
index 009ca28ff78..bc7a87eb65c 100644
--- a/spec/frontend/issuable_form_spec.js
+++ b/spec/frontend/issuable_form_spec.js
@@ -20,11 +20,6 @@ describe('IssuableForm', () => {
describe('removeWip', () => {
it.each`
prefix
- ${'wip '}
- ${' wIP: '}
- ${'[WIp] '}
- ${'wIP:'}
- ${' [WIp]'}
${'drAft '}
${'draFT: '}
${' [DRaft] '}
@@ -34,7 +29,7 @@ describe('IssuableForm', () => {
${'dRaFt - '}
${'(draft) '}
${' (DrafT)'}
- ${'wip wip: [wip] draft draft - draft: [draft] (draft)'}
+ ${'draft draft - draft: [draft] (draft)'}
`('removes "$prefix" from the beginning of the title', ({ prefix }) => {
instance.titleField.val(`${prefix}The Issuable's Title Value`);
diff --git a/spec/frontend/issuable_list/components/issuable_item_spec.js b/spec/frontend/issuable_list/components/issuable_item_spec.js
index 7281d2fde1d..e324f071966 100644
--- a/spec/frontend/issuable_list/components/issuable_item_spec.js
+++ b/spec/frontend/issuable_list/components/issuable_item_spec.js
@@ -453,5 +453,31 @@ describe('IssuableItem', () => {
expect(updatedAtEl.find('span').attributes('title')).toBe('Sep 10, 2020 11:41am GMT+0000');
expect(updatedAtEl.text()).toBe(wrapper.vm.updatedAt);
});
+
+ describe('when issuable is closed', () => {
+ it('renders issuable card with a closed style', () => {
+ wrapper = createComponent({ issuable: { ...mockIssuable, closedAt: '2020-12-10' } });
+
+ expect(wrapper.classes()).toContain('closed');
+ });
+ });
+
+ describe('when issuable was created within the past 24 hours', () => {
+ it('renders issuable card with a recently-created style', () => {
+ wrapper = createComponent({
+ issuable: { ...mockIssuable, createdAt: '2020-12-10T12:34:56' },
+ });
+
+ expect(wrapper.classes()).toContain('today');
+ });
+ });
+
+ describe('when issuable was created earlier than the past 24 hours', () => {
+ it('renders issuable card without a recently-created style', () => {
+ wrapper = createComponent({ issuable: { ...mockIssuable, createdAt: '2020-12-09' } });
+
+ expect(wrapper.classes()).not.toContain('today');
+ });
+ });
});
});
diff --git a/spec/frontend/issue_show/components/form_spec.js b/spec/frontend/issue_show/components/form_spec.js
index fc2e224ad92..6d4807c4261 100644
--- a/spec/frontend/issue_show/components/form_spec.js
+++ b/spec/frontend/issue_show/components/form_spec.js
@@ -1,13 +1,15 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { GlAlert } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import Autosave from '~/autosave';
+import DescriptionTemplate from '~/issue_show/components/fields/description_template.vue';
import formComponent from '~/issue_show/components/form.vue';
+import LockedWarning from '~/issue_show/components/locked_warning.vue';
import eventHub from '~/issue_show/event_hub';
jest.mock('~/autosave');
describe('Inline edit form component', () => {
- let vm;
+ let wrapper;
const defaultProps = {
canDestroy: true,
formState: {
@@ -24,22 +26,26 @@ describe('Inline edit form component', () => {
};
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
const createComponent = (props) => {
- const Component = Vue.extend(formComponent);
-
- vm = mountComponent(Component, {
- ...defaultProps,
- ...props,
+ wrapper = shallowMount(formComponent, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
});
};
+ const findDescriptionTemplate = () => wrapper.findComponent(DescriptionTemplate);
+ const findLockedWarning = () => wrapper.findComponent(LockedWarning);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
it('does not render template selector if no templates exist', () => {
createComponent();
- expect(vm.$el.querySelector('.js-issuable-selector-wrap')).toBeNull();
+ expect(findDescriptionTemplate().exists()).toBe(false);
});
it('renders template selector when templates as array exists', () => {
@@ -49,7 +55,7 @@ describe('Inline edit form component', () => {
],
});
- expect(vm.$el.querySelector('.js-issuable-selector-wrap')).not.toBeNull();
+ expect(findDescriptionTemplate().exists()).toBe(true);
});
it('renders template selector when templates as hash exists', () => {
@@ -59,19 +65,19 @@ describe('Inline edit form component', () => {
},
});
- expect(vm.$el.querySelector('.js-issuable-selector-wrap')).not.toBeNull();
+ expect(findDescriptionTemplate().exists()).toBe(true);
});
it('hides locked warning by default', () => {
createComponent();
- expect(vm.$el.querySelector('.alert')).toBeNull();
+ expect(findLockedWarning().exists()).toBe(false);
});
it('shows locked warning if formState is different', () => {
createComponent({ formState: { ...defaultProps.formState, lockedWarningVisible: true } });
- expect(vm.$el.querySelector('.alert')).not.toBeNull();
+ expect(findLockedWarning().exists()).toBe(true);
});
it('hides locked warning when currently saving', () => {
@@ -79,7 +85,7 @@ describe('Inline edit form component', () => {
formState: { ...defaultProps.formState, updateLoading: true, lockedWarningVisible: true },
});
- expect(vm.$el.querySelector('.alert')).toBeNull();
+ expect(findLockedWarning().exists()).toBe(false);
});
describe('autosave', () => {
@@ -110,5 +116,23 @@ describe('Inline edit form component', () => {
expect(spy).toHaveBeenCalledTimes(6);
});
+
+ describe('outdated description', () => {
+ it('does not show warning if lock version from server is the same as the local lock version', () => {
+ createComponent();
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('shows warning if lock version from server differs than the local lock version', async () => {
+ Autosave.prototype.getSavedLockVersion.mockResolvedValue('lock version from local storage');
+
+ createComponent({
+ formState: { ...defaultProps.formState, lock_version: 'lock version from server' },
+ });
+
+ await wrapper.vm.$nextTick();
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
});
});
diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js
index 476804bda12..5d83bf0142f 100644
--- a/spec/frontend/issues_list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issues_list_app_spec.js
@@ -3,45 +3,51 @@ import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
+import { apiParams, filteredTokens, locationSearch, urlParams } from 'jest/issues_list/mock_data';
import createFlash from '~/flash';
import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
+import IssuableByEmail from '~/issuable/components/issuable_by_email.vue';
import IssuableList from '~/issuable_list/components/issuable_list_root.vue';
import { IssuableListTabs, IssuableStates } from '~/issuable_list/constants';
import IssuesListApp from '~/issues_list/components/issues_list_app.vue';
-
import {
+ apiSortParams,
CREATED_DESC,
+ DUE_DATE_OVERDUE,
PAGE_SIZE,
PAGE_SIZE_MANUAL,
- RELATIVE_POSITION_ASC,
- sortOptions,
- sortParams,
+ PARAM_DUE_DATE,
+ RELATIVE_POSITION_DESC,
+ urlSortParams,
} from '~/issues_list/constants';
import eventHub from '~/issues_list/eventhub';
+import { getSortOptions } from '~/issues_list/utils';
import axios from '~/lib/utils/axios_utils';
import { setUrlParams } from '~/lib/utils/url_utility';
jest.mock('~/flash');
describe('IssuesListApp component', () => {
- const originalWindowLocation = window.location;
let axiosMock;
let wrapper;
const defaultProvide = {
+ autocompleteUsersPath: 'autocomplete/users/path',
calendarPath: 'calendar/path',
canBulkUpdate: false,
emptyStateSvgPath: 'empty-state.svg',
endpoint: 'api/endpoint',
exportCsvPath: 'export/csv/path',
- fullPath: 'path/to/project',
+ hasBlockedIssuesFeature: true,
hasIssues: true,
+ hasIssueWeightsFeature: true,
isSignedIn: false,
issuesPath: 'path/to/issues',
jiraIntegrationPath: 'jira/integration/path',
newIssuePath: 'new/issue/path',
+ projectLabelsPath: 'project/labels/path',
+ projectPath: 'path/to/project',
rssPath: 'rss/path',
- showImportButton: true,
showNewIssueLink: true,
signInPath: 'sign/in/path',
};
@@ -63,6 +69,7 @@ describe('IssuesListApp component', () => {
};
const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons);
+ const findIssuableByEmail = () => wrapper.findComponent(IssuableByEmail);
const findGlButton = () => wrapper.findComponent(GlButton);
const findGlButtons = () => wrapper.findAllComponents(GlButton);
const findGlButtonAt = (index) => findGlButtons().at(index);
@@ -86,7 +93,7 @@ describe('IssuesListApp component', () => {
});
afterEach(() => {
- window.location = originalWindowLocation;
+ global.jsdom.reconfigure({ url: TEST_HOST });
axiosMock.reset();
wrapper.destroy();
});
@@ -99,10 +106,10 @@ describe('IssuesListApp component', () => {
it('renders', () => {
expect(findIssuableList().props()).toMatchObject({
- namespace: defaultProvide.fullPath,
+ namespace: defaultProvide.projectPath,
recentSearchesStorageKey: 'issues',
searchInputPlaceholder: 'Search or filter results…',
- sortOptions,
+ sortOptions: getSortOptions(true, true),
initialSortBy: CREATED_DESC,
tabs: IssuableListTabs,
currentTab: IssuableStates.Opened,
@@ -120,46 +127,58 @@ describe('IssuesListApp component', () => {
describe('header action buttons', () => {
it('renders rss button', () => {
- wrapper = mountComponent();
+ wrapper = mountComponent({ mountFn: mount });
+ expect(findGlButtonAt(0).props('icon')).toBe('rss');
expect(findGlButtonAt(0).attributes()).toMatchObject({
href: defaultProvide.rssPath,
- icon: 'rss',
'aria-label': IssuesListApp.i18n.rssLabel,
});
});
it('renders calendar button', () => {
- wrapper = mountComponent();
+ wrapper = mountComponent({ mountFn: mount });
+ expect(findGlButtonAt(1).props('icon')).toBe('calendar');
expect(findGlButtonAt(1).attributes()).toMatchObject({
href: defaultProvide.calendarPath,
- icon: 'calendar',
'aria-label': IssuesListApp.i18n.calendarLabel,
});
});
- it('renders csv import/export component', async () => {
- const search = '?page=1&search=refactor';
+ describe('csv import/export component', () => {
+ describe('when user is signed in', () => {
+ it('renders', async () => {
+ const search = '?page=1&search=refactor&state=opened&sort=created_date';
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { search },
- });
+ global.jsdom.reconfigure({ url: `${TEST_HOST}${search}` });
- wrapper = mountComponent();
+ wrapper = mountComponent({
+ provide: { ...defaultProvide, isSignedIn: true },
+ mountFn: mount,
+ });
- await waitForPromises();
+ await waitForPromises();
+
+ expect(findCsvImportExportButtons().props()).toMatchObject({
+ exportCsvPath: `${defaultProvide.exportCsvPath}${search}`,
+ issuableCount: xTotal,
+ });
+ });
+ });
- expect(findCsvImportExportButtons().props()).toMatchObject({
- exportCsvPath: `${defaultProvide.exportCsvPath}${search}`,
- issuableCount: xTotal,
+ describe('when user is not signed in', () => {
+ it('does not render', () => {
+ wrapper = mountComponent({ provide: { ...defaultProvide, isSignedIn: false } });
+
+ expect(findCsvImportExportButtons().exists()).toBe(false);
+ });
});
});
describe('bulk edit button', () => {
it('renders when user has permissions', () => {
- wrapper = mountComponent({ provide: { canBulkUpdate: true } });
+ wrapper = mountComponent({ provide: { canBulkUpdate: true }, mountFn: mount });
expect(findGlButtonAt(2).text()).toBe('Edit issues');
});
@@ -170,20 +189,22 @@ describe('IssuesListApp component', () => {
expect(findGlButtons().filter((button) => button.text() === 'Edit issues')).toHaveLength(0);
});
- it('emits "issuables:enableBulkEdit" event to legacy bulk edit class', () => {
- wrapper = mountComponent({ provide: { canBulkUpdate: true } });
+ it('emits "issuables:enableBulkEdit" event to legacy bulk edit class', async () => {
+ wrapper = mountComponent({ provide: { canBulkUpdate: true }, mountFn: mount });
jest.spyOn(eventHub, '$emit');
findGlButtonAt(2).vm.$emit('click');
+ await waitForPromises();
+
expect(eventHub.$emit).toHaveBeenCalledWith('issuables:enableBulkEdit');
});
});
describe('new issue button', () => {
it('renders when user has permissions', () => {
- wrapper = mountComponent({ provide: { showNewIssueLink: true } });
+ wrapper = mountComponent({ provide: { showNewIssueLink: true }, mountFn: mount });
expect(findGlButtonAt(2).text()).toBe('New issue');
expect(findGlButtonAt(2).attributes('href')).toBe(defaultProvide.newIssuePath);
@@ -198,14 +219,21 @@ describe('IssuesListApp component', () => {
});
describe('initial url params', () => {
+ describe('due_date', () => {
+ it('is set from the url params', () => {
+ global.jsdom.reconfigure({ url: `${TEST_HOST}?${PARAM_DUE_DATE}=${DUE_DATE_OVERDUE}` });
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('urlParams')).toMatchObject({ due_date: DUE_DATE_OVERDUE });
+ });
+ });
+
describe('page', () => {
it('is set from the url params', () => {
const page = 5;
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: setUrlParams({ page }, TEST_HOST) },
- });
+ global.jsdom.reconfigure({ url: setUrlParams({ page }, TEST_HOST) });
wrapper = mountComponent();
@@ -213,18 +241,25 @@ describe('IssuesListApp component', () => {
});
});
+ describe('search', () => {
+ it('is set from the url params', () => {
+ global.jsdom.reconfigure({ url: `${TEST_HOST}${locationSearch}` });
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('urlParams')).toMatchObject({ search: 'find issues' });
+ });
+ });
+
describe('sort', () => {
- it.each(Object.keys(sortParams))('is set as %s from the url params', (sortKey) => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: setUrlParams(sortParams[sortKey], TEST_HOST) },
- });
+ it.each(Object.keys(urlSortParams))('is set as %s from the url params', (sortKey) => {
+ global.jsdom.reconfigure({ url: setUrlParams(urlSortParams[sortKey], TEST_HOST) });
wrapper = mountComponent();
expect(findIssuableList().props()).toMatchObject({
initialSortBy: sortKey,
- urlParams: sortParams[sortKey],
+ urlParams: urlSortParams[sortKey],
});
});
});
@@ -233,16 +268,23 @@ describe('IssuesListApp component', () => {
it('is set from the url params', () => {
const initialState = IssuableStates.All;
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: setUrlParams({ state: initialState }, TEST_HOST) },
- });
+ global.jsdom.reconfigure({ url: setUrlParams({ state: initialState }, TEST_HOST) });
wrapper = mountComponent();
expect(findIssuableList().props('currentTab')).toBe(initialState);
});
});
+
+ describe('filter tokens', () => {
+ it('is set from the url params', () => {
+ global.jsdom.reconfigure({ url: `${TEST_HOST}${locationSearch}` });
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('initialFilterValue')).toEqual(filteredTokens);
+ });
+ });
});
describe('bulk edit', () => {
@@ -262,16 +304,23 @@ describe('IssuesListApp component', () => {
);
});
+ describe('IssuableByEmail component', () => {
+ describe.each([true, false])(`when issue creation by email is enabled=%s`, (enabled) => {
+ it(`${enabled ? 'renders' : 'does not render'}`, () => {
+ wrapper = mountComponent({ provide: { initialEmail: enabled } });
+
+ expect(findIssuableByEmail().exists()).toBe(enabled);
+ });
+ });
+ });
+
describe('empty states', () => {
describe('when there are issues', () => {
describe('when search returns no results', () => {
beforeEach(async () => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: setUrlParams({ search: 'no results' }, TEST_HOST) },
- });
+ global.jsdom.reconfigure({ url: `${TEST_HOST}?search=no+results` });
- wrapper = mountComponent({ provide: { hasIssues: true } });
+ wrapper = mountComponent({ provide: { hasIssues: true }, mountFn: mount });
await waitForPromises();
});
@@ -286,8 +335,10 @@ describe('IssuesListApp component', () => {
});
describe('when "Open" tab has no issues', () => {
- beforeEach(() => {
- wrapper = mountComponent({ provide: { hasIssues: true } });
+ beforeEach(async () => {
+ wrapper = mountComponent({ provide: { hasIssues: true }, mountFn: mount });
+
+ await waitForPromises();
});
it('shows empty state', () => {
@@ -301,12 +352,13 @@ describe('IssuesListApp component', () => {
describe('when "Closed" tab has no issues', () => {
beforeEach(async () => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value: { href: setUrlParams({ state: IssuableStates.Closed }, TEST_HOST) },
+ global.jsdom.reconfigure({
+ url: setUrlParams({ state: IssuableStates.Closed }, TEST_HOST),
});
- wrapper = mountComponent({ provide: { hasIssues: true } });
+ wrapper = mountComponent({ provide: { hasIssues: true }, mountFn: mount });
+
+ await waitForPromises();
});
it('shows empty state', () => {
@@ -346,11 +398,11 @@ describe('IssuesListApp component', () => {
it('shows Jira integration information', () => {
const paragraphs = wrapper.findAll('p');
- expect(paragraphs.at(2).text()).toContain(IssuesListApp.i18n.jiraIntegrationTitle);
- expect(paragraphs.at(3).text()).toContain(
+ expect(paragraphs.at(1).text()).toContain(IssuesListApp.i18n.jiraIntegrationTitle);
+ expect(paragraphs.at(2).text()).toContain(
'Enable the Jira integration to view your Jira issues in GitLab.',
);
- expect(paragraphs.at(4).text()).toContain(
+ expect(paragraphs.at(3).text()).toContain(
IssuesListApp.i18n.jiraIntegrationSecondaryMessage,
);
expect(findGlLink().text()).toBe('Enable the Jira integration');
@@ -418,7 +470,7 @@ describe('IssuesListApp component', () => {
});
it('fetches issues with expected params', () => {
- expect(axiosMock.history.get[1].params).toEqual({
+ expect(axiosMock.history.get[1].params).toMatchObject({
page,
per_page: PAGE_SIZE,
state,
@@ -489,7 +541,7 @@ describe('IssuesListApp component', () => {
});
describe('when "sort" event is emitted by IssuableList', () => {
- it.each(Object.keys(sortParams))(
+ it.each(Object.keys(apiSortParams))(
'fetches issues with correct params with payload `%s`',
async (sortKey) => {
wrapper = mountComponent();
@@ -500,10 +552,10 @@ describe('IssuesListApp component', () => {
expect(axiosMock.history.get[1].params).toEqual({
page: xPage,
- per_page: sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE,
+ per_page: sortKey === RELATIVE_POSITION_DESC ? PAGE_SIZE_MANUAL : PAGE_SIZE,
state,
with_labels_details: true,
- ...sortParams[sortKey],
+ ...apiSortParams[sortKey],
});
},
);
@@ -525,21 +577,18 @@ describe('IssuesListApp component', () => {
});
describe('when "filter" event is emitted by IssuableList', () => {
- beforeEach(async () => {
+ beforeEach(() => {
wrapper = mountComponent();
- const payload = [
- { type: 'filtered-search-term', value: { data: 'no' } },
- { type: 'filtered-search-term', value: { data: 'issues' } },
- ];
-
- findIssuableList().vm.$emit('filter', payload);
-
- await waitForPromises();
+ findIssuableList().vm.$emit('filter', filteredTokens);
});
it('makes an API call to search for issues with the search term', () => {
- expect(axiosMock.history.get[1].params).toMatchObject({ search: 'no issues' });
+ expect(axiosMock.history.get[1].params).toMatchObject(apiParams);
+ });
+
+ it('updates IssuableList with url params', () => {
+ expect(findIssuableList().props('urlParams')).toMatchObject(urlParams);
});
});
});
diff --git a/spec/frontend/issues_list/mock_data.js b/spec/frontend/issues_list/mock_data.js
new file mode 100644
index 00000000000..ce2880d177a
--- /dev/null
+++ b/spec/frontend/issues_list/mock_data.js
@@ -0,0 +1,127 @@
+import {
+ OPERATOR_IS,
+ OPERATOR_IS_NOT,
+} from '~/vue_shared/components/filtered_search_bar/constants';
+
+export const locationSearch = [
+ '?search=find+issues',
+ 'author_username=homer',
+ 'not[author_username]=marge',
+ 'assignee_username[]=bart',
+ 'assignee_username[]=lisa',
+ 'not[assignee_username][]=patty',
+ 'not[assignee_username][]=selma',
+ 'milestone_title=season+4',
+ 'not[milestone_title]=season+20',
+ 'label_name[]=cartoon',
+ 'label_name[]=tv',
+ 'not[label_name][]=live action',
+ 'not[label_name][]=drama',
+ 'my_reaction_emoji=thumbsup',
+ 'confidential=no',
+ 'iteration_title=season:+%234',
+ 'not[iteration_title]=season:+%2320',
+ 'epic_id=12',
+ 'not[epic_id]=34',
+ 'weight=1',
+ 'not[weight]=3',
+].join('&');
+
+export const locationSearchWithSpecialValues = [
+ 'assignee_id=123',
+ 'assignee_username=bart',
+ 'my_reaction_emoji=None',
+ 'iteration_id=Current',
+ 'epic_id=None',
+ 'weight=None',
+].join('&');
+
+export const filteredTokens = [
+ { type: 'author_username', value: { data: 'homer', operator: OPERATOR_IS } },
+ { type: 'author_username', value: { data: 'marge', operator: OPERATOR_IS_NOT } },
+ { type: 'assignee_username', value: { data: 'bart', operator: OPERATOR_IS } },
+ { type: 'assignee_username', value: { data: 'lisa', operator: OPERATOR_IS } },
+ { type: 'assignee_username', value: { data: 'patty', operator: OPERATOR_IS_NOT } },
+ { type: 'assignee_username', value: { data: 'selma', operator: OPERATOR_IS_NOT } },
+ { type: 'milestone', value: { data: 'season 4', operator: OPERATOR_IS } },
+ { type: 'milestone', value: { data: 'season 20', operator: OPERATOR_IS_NOT } },
+ { type: 'labels', value: { data: 'cartoon', operator: OPERATOR_IS } },
+ { type: 'labels', value: { data: 'tv', operator: OPERATOR_IS } },
+ { type: 'labels', value: { data: 'live action', operator: OPERATOR_IS_NOT } },
+ { type: 'labels', value: { data: 'drama', operator: OPERATOR_IS_NOT } },
+ { type: 'my_reaction_emoji', value: { data: 'thumbsup', operator: OPERATOR_IS } },
+ { type: 'confidential', value: { data: 'no', operator: OPERATOR_IS } },
+ { type: 'iteration', value: { data: 'season: #4', operator: OPERATOR_IS } },
+ { type: 'iteration', value: { data: 'season: #20', operator: OPERATOR_IS_NOT } },
+ { type: 'epic_id', value: { data: '12', operator: OPERATOR_IS } },
+ { type: 'epic_id', value: { data: '34', operator: OPERATOR_IS_NOT } },
+ { type: 'weight', value: { data: '1', operator: OPERATOR_IS } },
+ { type: 'weight', value: { data: '3', operator: OPERATOR_IS_NOT } },
+ { type: 'filtered-search-term', value: { data: 'find' } },
+ { type: 'filtered-search-term', value: { data: 'issues' } },
+];
+
+export const filteredTokensWithSpecialValues = [
+ { type: 'assignee_username', value: { data: '123', operator: OPERATOR_IS } },
+ { type: 'assignee_username', value: { data: 'bart', operator: OPERATOR_IS } },
+ { type: 'my_reaction_emoji', value: { data: 'None', operator: OPERATOR_IS } },
+ { type: 'iteration', value: { data: 'Current', operator: OPERATOR_IS } },
+ { type: 'epic_id', value: { data: 'None', operator: OPERATOR_IS } },
+ { type: 'weight', value: { data: 'None', operator: OPERATOR_IS } },
+];
+
+export const apiParams = {
+ author_username: 'homer',
+ 'not[author_username]': 'marge',
+ assignee_username: ['bart', 'lisa'],
+ 'not[assignee_username]': ['patty', 'selma'],
+ milestone: 'season 4',
+ 'not[milestone]': 'season 20',
+ labels: ['cartoon', 'tv'],
+ 'not[labels]': ['live action', 'drama'],
+ my_reaction_emoji: 'thumbsup',
+ confidential: 'no',
+ iteration_title: 'season: #4',
+ 'not[iteration_title]': 'season: #20',
+ epic_id: '12',
+ 'not[epic_id]': '34',
+ weight: '1',
+ 'not[weight]': '3',
+};
+
+export const apiParamsWithSpecialValues = {
+ assignee_id: '123',
+ assignee_username: 'bart',
+ my_reaction_emoji: 'None',
+ iteration_id: 'Current',
+ epic_id: 'None',
+ weight: 'None',
+};
+
+export const urlParams = {
+ author_username: 'homer',
+ 'not[author_username]': 'marge',
+ 'assignee_username[]': ['bart', 'lisa'],
+ 'not[assignee_username][]': ['patty', 'selma'],
+ milestone_title: 'season 4',
+ 'not[milestone_title]': 'season 20',
+ 'label_name[]': ['cartoon', 'tv'],
+ 'not[label_name][]': ['live action', 'drama'],
+ my_reaction_emoji: 'thumbsup',
+ confidential: 'no',
+ iteration_title: 'season: #4',
+ 'not[iteration_title]': 'season: #20',
+ epic_id: '12',
+ 'not[epic_id]': '34',
+ weight: '1',
+ 'not[weight]': '3',
+};
+
+export const urlParamsWithSpecialValues = {
+ assignee_id: '123',
+ 'assignee_username[]': 'bart',
+ my_reaction_emoji: 'None',
+ iteration_id: 'Current',
+ epic_id: 'None',
+ weight: 'None',
+};
diff --git a/spec/frontend/issues_list/utils_spec.js b/spec/frontend/issues_list/utils_spec.js
new file mode 100644
index 00000000000..17127753972
--- /dev/null
+++ b/spec/frontend/issues_list/utils_spec.js
@@ -0,0 +1,109 @@
+import {
+ apiParams,
+ apiParamsWithSpecialValues,
+ filteredTokens,
+ filteredTokensWithSpecialValues,
+ locationSearch,
+ locationSearchWithSpecialValues,
+ urlParams,
+ urlParamsWithSpecialValues,
+} from 'jest/issues_list/mock_data';
+import { API_PARAM, DUE_DATE_VALUES, URL_PARAM, urlSortParams } from '~/issues_list/constants';
+import {
+ convertToParams,
+ convertToSearchQuery,
+ getDueDateValue,
+ getFilterTokens,
+ getSortKey,
+ getSortOptions,
+} from '~/issues_list/utils';
+
+describe('getSortKey', () => {
+ it.each(Object.keys(urlSortParams))('returns %s given the correct inputs', (sortKey) => {
+ const { sort } = urlSortParams[sortKey];
+ expect(getSortKey(sort)).toBe(sortKey);
+ });
+});
+
+describe('getDueDateValue', () => {
+ it.each(DUE_DATE_VALUES)('returns the argument when it is `%s`', (value) => {
+ expect(getDueDateValue(value)).toBe(value);
+ });
+
+ it('returns undefined when the argument is invalid', () => {
+ expect(getDueDateValue('invalid value')).toBeUndefined();
+ });
+});
+
+describe('getSortOptions', () => {
+ describe.each`
+ hasIssueWeightsFeature | hasBlockedIssuesFeature | length | containsWeight | containsBlocking
+ ${false} | ${false} | ${8} | ${false} | ${false}
+ ${true} | ${false} | ${9} | ${true} | ${false}
+ ${false} | ${true} | ${9} | ${false} | ${true}
+ ${true} | ${true} | ${10} | ${true} | ${true}
+ `(
+ 'when hasIssueWeightsFeature=$hasIssueWeightsFeature and hasBlockedIssuesFeature=$hasBlockedIssuesFeature',
+ ({
+ hasIssueWeightsFeature,
+ hasBlockedIssuesFeature,
+ length,
+ containsWeight,
+ containsBlocking,
+ }) => {
+ const sortOptions = getSortOptions(hasIssueWeightsFeature, hasBlockedIssuesFeature);
+
+ it('returns the correct length of sort options', () => {
+ expect(sortOptions).toHaveLength(length);
+ });
+
+ it(`${containsWeight ? 'contains' : 'does not contain'} weight option`, () => {
+ expect(sortOptions.some((option) => option.title === 'Weight')).toBe(containsWeight);
+ });
+
+ it(`${containsBlocking ? 'contains' : 'does not contain'} blocking option`, () => {
+ expect(sortOptions.some((option) => option.title === 'Blocking')).toBe(containsBlocking);
+ });
+ },
+ );
+});
+
+describe('getFilterTokens', () => {
+ it('returns filtered tokens given "window.location.search"', () => {
+ expect(getFilterTokens(locationSearch)).toEqual(filteredTokens);
+ });
+
+ it('returns filtered tokens given "window.location.search" with special values', () => {
+ expect(getFilterTokens(locationSearchWithSpecialValues)).toEqual(
+ filteredTokensWithSpecialValues,
+ );
+ });
+});
+
+describe('convertToParams', () => {
+ it('returns api params given filtered tokens', () => {
+ expect(convertToParams(filteredTokens, API_PARAM)).toEqual(apiParams);
+ });
+
+ it('returns api params given filtered tokens with special values', () => {
+ expect(convertToParams(filteredTokensWithSpecialValues, API_PARAM)).toEqual(
+ apiParamsWithSpecialValues,
+ );
+ });
+
+ it('returns url params given filtered tokens', () => {
+ expect(convertToParams(filteredTokens, URL_PARAM)).toEqual(urlParams);
+ });
+
+ it('returns url params given filtered tokens with special values', () => {
+ expect(convertToParams(filteredTokensWithSpecialValues, URL_PARAM)).toEqual(
+ urlParamsWithSpecialValues,
+ );
+ });
+});
+
+describe('convertToSearchQuery', () => {
+ it('returns search string given filtered tokens', () => {
+ expect(convertToSearchQuery(filteredTokens)).toBe('find issues');
+ });
+});
diff --git a/spec/frontend/jira_connect/components/groups_list_spec.js b/spec/frontend/jira_connect/components/groups_list_spec.js
index f354cfe6a9b..4b875928a90 100644
--- a/spec/frontend/jira_connect/components/groups_list_spec.js
+++ b/spec/frontend/jira_connect/components/groups_list_spec.js
@@ -1,12 +1,24 @@
-import { GlAlert, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon, GlSearchBoxByType, GlPagination } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { fetchGroups } from '~/jira_connect/api';
import GroupsList from '~/jira_connect/components/groups_list.vue';
import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
+import { DEFAULT_GROUPS_PER_PAGE } from '~/jira_connect/constants';
import { mockGroup1, mockGroup2 } from '../mock_data';
+const createMockGroup = (groupId) => {
+ return {
+ ...mockGroup1,
+ id: groupId,
+ };
+};
+
+const createMockGroups = (count) => {
+ return [...new Array(count)].map((_, idx) => createMockGroup(idx));
+};
+
jest.mock('~/jira_connect/api', () => {
return {
fetchGroups: jest.fn(),
@@ -42,6 +54,7 @@ describe('GroupsList', () => {
const findSecondItem = () => findAllItems().at(1);
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
const findGroupsList = () => wrapper.findByTestId('groups-list');
+ const findPagination = () => wrapper.findComponent(GlPagination);
describe('when groups are loading', () => {
it('renders loading icon', async () => {
@@ -130,14 +143,14 @@ describe('GroupsList', () => {
});
it('calls `fetchGroups` with search term', () => {
- expect(fetchGroups).toHaveBeenCalledWith(mockGroupsPath, {
+ expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
page: 1,
- perPage: 10,
+ perPage: DEFAULT_GROUPS_PER_PAGE,
search: mockSearchTeam,
});
});
- it('disables GroupListItems', async () => {
+ it('disables GroupListItems', () => {
findAllItems().wrappers.forEach((groupListItem) => {
expect(groupListItem.props('disabled')).toBe(true);
});
@@ -165,6 +178,122 @@ describe('GroupsList', () => {
expect(findFirstItem().props('group')).toBe(mockGroup1);
});
});
+
+ it.each`
+ userSearchTerm | finalSearchTerm
+ ${'gitl'} | ${'gitl'}
+ ${'git'} | ${'git'}
+ ${'gi'} | ${''}
+ ${'g'} | ${''}
+ ${''} | ${''}
+ ${undefined} | ${undefined}
+ `(
+ 'searches for "$finalSearchTerm" when user enters "$userSearchTerm"',
+ async ({ userSearchTerm, finalSearchTerm }) => {
+ fetchGroups.mockResolvedValue({
+ data: [mockGroup1],
+ headers: { 'X-PAGE': 1, 'X-TOTAL': 1 },
+ });
+
+ createComponent();
+ await waitForPromises();
+
+ const searchBox = findSearchBox();
+ searchBox.vm.$emit('input', userSearchTerm);
+
+ expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
+ page: 1,
+ perPage: DEFAULT_GROUPS_PER_PAGE,
+ search: finalSearchTerm,
+ });
+ },
+ );
+ });
+
+ describe('when page=2', () => {
+ beforeEach(async () => {
+ const totalItems = DEFAULT_GROUPS_PER_PAGE + 1;
+ const mockGroups = createMockGroups(totalItems);
+ fetchGroups.mockResolvedValue({
+ headers: { 'X-TOTAL': totalItems, 'X-PAGE': 1 },
+ data: mockGroups,
+ });
+ createComponent();
+ await waitForPromises();
+
+ const paginationEl = findPagination();
+ paginationEl.vm.$emit('input', 2);
+ });
+
+ it('should load results for page 2', () => {
+ expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
+ page: 2,
+ perPage: DEFAULT_GROUPS_PER_PAGE,
+ search: '',
+ });
+ });
+
+ it('resets page to 1 on search `input` event', () => {
+ const mockSearchTerm = 'gitlab';
+ const searchBox = findSearchBox();
+
+ searchBox.vm.$emit('input', mockSearchTerm);
+
+ expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
+ page: 1,
+ perPage: DEFAULT_GROUPS_PER_PAGE,
+ search: mockSearchTerm,
+ });
+ });
+ });
+ });
+
+ describe('pagination', () => {
+ it.each`
+ scenario | totalItems | shouldShowPagination
+ ${'renders pagination'} | ${DEFAULT_GROUPS_PER_PAGE + 1} | ${true}
+ ${'does not render pagination'} | ${DEFAULT_GROUPS_PER_PAGE} | ${false}
+ ${'does not render pagination'} | ${2} | ${false}
+ ${'does not render pagination'} | ${0} | ${false}
+ `('$scenario with $totalItems groups', async ({ totalItems, shouldShowPagination }) => {
+ const mockGroups = createMockGroups(totalItems);
+ fetchGroups.mockResolvedValue({
+ headers: { 'X-TOTAL': totalItems, 'X-PAGE': 1 },
+ data: mockGroups,
+ });
+ createComponent();
+ await waitForPromises();
+
+ const paginationEl = findPagination();
+
+ expect(paginationEl.exists()).toBe(shouldShowPagination);
+ if (shouldShowPagination) {
+ expect(paginationEl.props('totalItems')).toBe(totalItems);
+ }
+ });
+
+ describe('when `input` event triggered', () => {
+ beforeEach(async () => {
+ const MOCK_TOTAL_ITEMS = DEFAULT_GROUPS_PER_PAGE + 1;
+ fetchGroups.mockResolvedValue({
+ headers: { 'X-TOTAL': MOCK_TOTAL_ITEMS, 'X-PAGE': 1 },
+ data: createMockGroups(MOCK_TOTAL_ITEMS),
+ });
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('executes `fetchGroups` with correct arguments', () => {
+ const paginationEl = findPagination();
+ paginationEl.vm.$emit('input', 2);
+
+ expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
+ page: 2,
+ perPage: DEFAULT_GROUPS_PER_PAGE,
+ search: '',
+ });
+ });
});
});
});
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index bea27c8877d..9f49cb4007a 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -24,7 +24,9 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
role="columnheader"
scope="col"
>
- Jira display name
+ <div>
+ Jira display name
+ </div>
</th>
<th
aria-colindex="2"
@@ -32,14 +34,18 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
class=""
role="columnheader"
scope="col"
- />
+ >
+ <div />
+ </th>
<th
aria-colindex="3"
class=""
role="columnheader"
scope="col"
>
- GitLab username
+ <div>
+ GitLab username
+ </div>
</th>
</tr>
</thead>
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index 2974e91e46d..3fcefde1aba 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -35,6 +35,7 @@ describe('Job App', () => {
const props = {
artifactHelpUrl: 'help/artifact',
deploymentHelpUrl: 'help/deployment',
+ codeQualityHelpPath: '/help/code_quality',
runnerSettingsUrl: 'settings/ci-cd/runners',
variablesSettingsUrl: 'settings/ci-cd/variables',
terminalPath: 'jobs/123/terminal',
diff --git a/spec/frontend/jobs/components/table/cells.vue/duration_cell_spec.js b/spec/frontend/jobs/components/table/cells.vue/duration_cell_spec.js
new file mode 100644
index 00000000000..763a4b0eaa2
--- /dev/null
+++ b/spec/frontend/jobs/components/table/cells.vue/duration_cell_spec.js
@@ -0,0 +1,81 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import DurationCell from '~/jobs/components/table/cells/duration_cell.vue';
+
+describe('Duration Cell', () => {
+ let wrapper;
+
+ const findJobDuration = () => wrapper.findByTestId('job-duration');
+ const findJobFinishedTime = () => wrapper.findByTestId('job-finished-time');
+ const findDurationIcon = () => wrapper.findByTestId('duration-icon');
+ const findFinishedTimeIcon = () => wrapper.findByTestId('finished-time-icon');
+
+ const createComponent = (props) => {
+ wrapper = extendedWrapper(
+ shallowMount(DurationCell, {
+ propsData: {
+ job: {
+ ...props,
+ },
+ },
+ }),
+ );
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('does not display duration or finished time when no properties are present', () => {
+ createComponent();
+
+ expect(findJobDuration().exists()).toBe(false);
+ expect(findJobFinishedTime().exists()).toBe(false);
+ });
+
+ it('displays duration and finished time when both properties are present', () => {
+ const props = {
+ duration: 7,
+ finishedAt: '2021-04-26T13:37:52Z',
+ };
+
+ createComponent(props);
+
+ expect(findJobDuration().exists()).toBe(true);
+ expect(findJobFinishedTime().exists()).toBe(true);
+ });
+
+ it('displays only the duration of the job when the duration property is present', () => {
+ const props = {
+ duration: 7,
+ };
+
+ createComponent(props);
+
+ expect(findJobDuration().exists()).toBe(true);
+ expect(findJobFinishedTime().exists()).toBe(false);
+ });
+
+ it('displays only the finished time of the job when the finshedAt property is present', () => {
+ const props = {
+ finishedAt: '2021-04-26T13:37:52Z',
+ };
+
+ createComponent(props);
+
+ expect(findJobFinishedTime().exists()).toBe(true);
+ expect(findJobDuration().exists()).toBe(false);
+ });
+
+ it('displays icons for finished time and duration', () => {
+ const props = {
+ duration: 7,
+ finishedAt: '2021-04-26T13:37:52Z',
+ };
+
+ createComponent(props);
+
+ expect(findFinishedTimeIcon().props('name')).toBe('calendar');
+ expect(findDurationIcon().props('name')).toBe('timer');
+ });
+});
diff --git a/spec/frontend/jobs/components/table/cells.vue/job_cell_spec.js b/spec/frontend/jobs/components/table/cells.vue/job_cell_spec.js
new file mode 100644
index 00000000000..fc4e5586349
--- /dev/null
+++ b/spec/frontend/jobs/components/table/cells.vue/job_cell_spec.js
@@ -0,0 +1,140 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import JobCell from '~/jobs/components/table/cells/job_cell.vue';
+import { mockJobsInTable } from '../../../mock_data';
+
+const mockJob = mockJobsInTable[0];
+const mockJobCreatedByTag = mockJobsInTable[1];
+const mockJobLimitedAccess = mockJobsInTable[2];
+const mockStuckJob = mockJobsInTable[3];
+
+describe('Job Cell', () => {
+ let wrapper;
+
+ const findJobIdLink = () => wrapper.findByTestId('job-id-link');
+ const findJobIdNoLink = () => wrapper.findByTestId('job-id-limited-access');
+ const findJobRef = () => wrapper.findByTestId('job-ref');
+ const findJobSha = () => wrapper.findByTestId('job-sha');
+ const findLabelIcon = () => wrapper.findByTestId('label-icon');
+ const findForkIcon = () => wrapper.findByTestId('fork-icon');
+ const findStuckIcon = () => wrapper.findByTestId('stuck-icon');
+ const findAllTagBadges = () => wrapper.findAllByTestId('job-tag-badge');
+
+ const findBadgeById = (id) => wrapper.findByTestId(id);
+
+ const createComponent = (jobData = mockJob) => {
+ wrapper = extendedWrapper(
+ shallowMount(JobCell, {
+ propsData: {
+ job: jobData,
+ },
+ }),
+ );
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Job Id', () => {
+ it('displays the job id and links to the job', () => {
+ createComponent();
+
+ const expectedJobId = `#${getIdFromGraphQLId(mockJob.id)}`;
+
+ expect(findJobIdLink().text()).toBe(expectedJobId);
+ expect(findJobIdLink().attributes('href')).toBe(mockJob.detailedStatus.detailsPath);
+ expect(findJobIdNoLink().exists()).toBe(false);
+ });
+
+ it('display the job id with no link', () => {
+ createComponent(mockJobLimitedAccess);
+
+ const expectedJobId = `#${getIdFromGraphQLId(mockJobLimitedAccess.id)}`;
+
+ expect(findJobIdNoLink().text()).toBe(expectedJobId);
+ expect(findJobIdNoLink().exists()).toBe(true);
+ expect(findJobIdLink().exists()).toBe(false);
+ });
+ });
+
+ describe('Ref of the job', () => {
+ it('displays the ref name and links to the ref', () => {
+ createComponent();
+
+ expect(findJobRef().text()).toBe(mockJob.refName);
+ expect(findJobRef().attributes('href')).toBe(mockJob.refPath);
+ });
+
+ it('displays fork icon when job is not created by tag', () => {
+ createComponent();
+
+ expect(findForkIcon().exists()).toBe(true);
+ expect(findLabelIcon().exists()).toBe(false);
+ });
+
+ it('displays label icon when job is created by a tag', () => {
+ createComponent(mockJobCreatedByTag);
+
+ expect(findLabelIcon().exists()).toBe(true);
+ expect(findForkIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('Commit of the job', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays the sha and links to the commit', () => {
+ expect(findJobSha().text()).toBe(mockJob.shortSha);
+ expect(findJobSha().attributes('href')).toBe(mockJob.commitPath);
+ });
+ });
+
+ describe('Job badges', () => {
+ it('displays tags of the job', () => {
+ const mockJobWithTags = {
+ tags: ['tag-1', 'tag-2', 'tag-3'],
+ };
+
+ createComponent(mockJobWithTags);
+
+ expect(findAllTagBadges()).toHaveLength(mockJobWithTags.tags.length);
+ });
+
+ it.each`
+ testId | text
+ ${'manual-job-badge'} | ${'manual'}
+ ${'triggered-job-badge'} | ${'triggered'}
+ ${'fail-job-badge'} | ${'allowed to fail'}
+ ${'delayed-job-badge'} | ${'delayed'}
+ `('displays the static $text badge', ({ testId, text }) => {
+ createComponent({
+ manualJob: true,
+ triggered: true,
+ allowFailure: true,
+ scheduledAt: '2021-03-09T14:58:50+00:00',
+ });
+
+ expect(findBadgeById(testId).exists()).toBe(true);
+ expect(findBadgeById(testId).text()).toBe(text);
+ });
+ });
+
+ describe('Job icons', () => {
+ it('stuck icon is not shown if job is not stuck', () => {
+ createComponent();
+
+ expect(findStuckIcon().exists()).toBe(false);
+ });
+
+ it('stuck icon is shown if job is stuck', () => {
+ createComponent(mockStuckJob);
+
+ expect(findStuckIcon().exists()).toBe(true);
+ expect(findStuckIcon().attributes('name')).toBe('warning');
+ });
+ });
+});
diff --git a/spec/frontend/jobs/components/table/cells.vue/pipeline_cell_spec.js b/spec/frontend/jobs/components/table/cells.vue/pipeline_cell_spec.js
new file mode 100644
index 00000000000..1f5e0a7aa21
--- /dev/null
+++ b/spec/frontend/jobs/components/table/cells.vue/pipeline_cell_spec.js
@@ -0,0 +1,82 @@
+import { GlAvatar } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+import PipelineCell from '~/jobs/components/table/cells/pipeline_cell.vue';
+
+const mockJobWithoutUser = {
+ id: 'gid://gitlab/Ci::Build/2264',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/460',
+ path: '/root/ci-project/-/pipelines/460',
+ },
+};
+
+const mockJobWithUser = {
+ id: 'gid://gitlab/Ci::Build/2264',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/460',
+ path: '/root/ci-project/-/pipelines/460',
+ user: {
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ webPath: '/root',
+ },
+ },
+};
+
+describe('Pipeline Cell', () => {
+ let wrapper;
+
+ const findPipelineId = () => wrapper.findByTestId('pipeline-id');
+ const findPipelineUserLink = () => wrapper.findByTestId('pipeline-user-link');
+ const findUserAvatar = () => wrapper.findComponent(GlAvatar);
+
+ const createComponent = (props = mockJobWithUser) => {
+ wrapper = extendedWrapper(
+ shallowMount(PipelineCell, {
+ propsData: {
+ job: props,
+ },
+ }),
+ );
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Pipeline Id', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays the pipeline id and links to the pipeline', () => {
+ const expectedPipelineId = `#${getIdFromGraphQLId(mockJobWithUser.pipeline.id)}`;
+
+ expect(findPipelineId().text()).toBe(expectedPipelineId);
+ expect(findPipelineId().attributes('href')).toBe(mockJobWithUser.pipeline.path);
+ });
+ });
+
+ describe('Pipeline created by', () => {
+ const apiWrapperText = 'API';
+
+ it('shows and links to the pipeline user', () => {
+ createComponent();
+
+ expect(findPipelineUserLink().exists()).toBe(true);
+ expect(findPipelineUserLink().attributes('href')).toBe(mockJobWithUser.pipeline.user.webPath);
+ expect(findUserAvatar().attributes('src')).toBe(mockJobWithUser.pipeline.user.avatarUrl);
+ expect(wrapper.text()).not.toContain(apiWrapperText);
+ });
+
+ it('shows pipeline was created by the API', () => {
+ createComponent(mockJobWithoutUser);
+
+ expect(findPipelineUserLink().exists()).toBe(false);
+ expect(findUserAvatar().exists()).toBe(false);
+ expect(wrapper.text()).toContain(apiWrapperText);
+ });
+ });
+});
diff --git a/spec/frontend/jobs/components/table/job_table_app_spec.js b/spec/frontend/jobs/components/table/job_table_app_spec.js
new file mode 100644
index 00000000000..9d1135e26c8
--- /dev/null
+++ b/spec/frontend/jobs/components/table/job_table_app_spec.js
@@ -0,0 +1,110 @@
+import { GlSkeletonLoader, GlAlert, GlEmptyState } from '@gitlab/ui';
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import getJobsQuery from '~/jobs/components/table/graphql/queries/get_jobs.query.graphql';
+import JobsTable from '~/jobs/components/table/jobs_table.vue';
+import JobsTableApp from '~/jobs/components/table/jobs_table_app.vue';
+import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
+import { mockJobsQueryResponse, mockJobsQueryEmptyResponse } from '../../mock_data';
+
+const projectPath = 'gitlab-org/gitlab';
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('Job table app', () => {
+ let wrapper;
+
+ const successHandler = jest.fn().mockResolvedValue(mockJobsQueryResponse);
+ const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ const emptyHandler = jest.fn().mockResolvedValue(mockJobsQueryEmptyResponse);
+
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findTable = () => wrapper.findComponent(JobsTable);
+ const findTabs = () => wrapper.findComponent(JobsTableTabs);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ const createMockApolloProvider = (handler) => {
+ const requestHandlers = [[getJobsQuery, handler]];
+
+ return createMockApollo(requestHandlers);
+ };
+
+ const createComponent = (handler = successHandler, mountFn = shallowMount) => {
+ wrapper = mountFn(JobsTableApp, {
+ provide: {
+ projectPath,
+ },
+ localVue,
+ apolloProvider: createMockApolloProvider(handler),
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('loading state', () => {
+ it('should display skeleton loader when loading', () => {
+ createComponent();
+
+ expect(findSkeletonLoader().exists()).toBe(true);
+ expect(findTable().exists()).toBe(false);
+ });
+ });
+
+ describe('loaded state', () => {
+ beforeEach(async () => {
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('should display the jobs table with data', () => {
+ expect(findTable().exists()).toBe(true);
+ expect(findSkeletonLoader().exists()).toBe(false);
+ });
+
+ it('should retfech jobs query on fetchJobsByStatus event', async () => {
+ jest.spyOn(wrapper.vm.$apollo.queries.jobs, 'refetch').mockImplementation(jest.fn());
+
+ expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(0);
+
+ await findTabs().vm.$emit('fetchJobsByStatus');
+
+ expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('error state', () => {
+ it('should show an alert if there is an error fetching the data', async () => {
+ createComponent(failedHandler);
+
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('empty state', () => {
+ it('should display empty state if there are no jobs and tab scope is null', async () => {
+ createComponent(emptyHandler, mount);
+
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('should not display empty state if there are jobs and tab scope is not null', async () => {
+ createComponent(successHandler, mount);
+
+ await waitForPromises();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findTable().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js b/spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js
new file mode 100644
index 00000000000..05b066a9edc
--- /dev/null
+++ b/spec/frontend/jobs/components/table/jobs_table_empty_state_spec.js
@@ -0,0 +1,37 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import JobsTableEmptyState from '~/jobs/components/table/jobs_table_empty_state.vue';
+
+describe('Jobs table empty state', () => {
+ let wrapper;
+
+ const pipelineEditorPath = '/root/project/-/ci/editor';
+ const emptyStateSvgPath = 'assets/jobs-empty-state.svg';
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ const createComponent = () => {
+ wrapper = shallowMount(JobsTableEmptyState, {
+ provide: {
+ pipelineEditorPath,
+ emptyStateSvgPath,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('links to the pipeline editor', () => {
+ expect(findEmptyState().props('primaryButtonLink')).toBe(pipelineEditorPath);
+ });
+
+ it('shows an empty state image', () => {
+ expect(findEmptyState().props('svgPath')).toBe(emptyStateSvgPath);
+ });
+});
diff --git a/spec/frontend/jobs/components/table/jobs_table_spec.js b/spec/frontend/jobs/components/table/jobs_table_spec.js
index db057efbfb4..ac8bef675f8 100644
--- a/spec/frontend/jobs/components/table/jobs_table_spec.js
+++ b/spec/frontend/jobs/components/table/jobs_table_spec.js
@@ -1,20 +1,29 @@
import { GlTable } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import JobsTable from '~/jobs/components/table/jobs_table.vue';
+import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
import { mockJobsInTable } from '../../mock_data';
describe('Jobs Table', () => {
let wrapper;
const findTable = () => wrapper.findComponent(GlTable);
+ const findStatusBadge = () => wrapper.findComponent(CiBadge);
+ const findTableRows = () => wrapper.findAllByTestId('jobs-table-row');
+ const findJobStage = () => wrapper.findByTestId('job-stage-name');
+ const findJobName = () => wrapper.findByTestId('job-name');
+ const findAllCoverageJobs = () => wrapper.findAllByTestId('job-coverage');
const createComponent = (props = {}) => {
- wrapper = shallowMount(JobsTable, {
- propsData: {
- jobs: mockJobsInTable,
- ...props,
- },
- });
+ wrapper = extendedWrapper(
+ mount(JobsTable, {
+ propsData: {
+ jobs: mockJobsInTable,
+ ...props,
+ },
+ }),
+ );
};
beforeEach(() => {
@@ -25,7 +34,31 @@ describe('Jobs Table', () => {
wrapper.destroy();
});
- it('displays a table', () => {
+ it('displays the jobs table', () => {
expect(findTable().exists()).toBe(true);
});
+
+ it('displays correct number of job rows', () => {
+ expect(findTableRows()).toHaveLength(mockJobsInTable.length);
+ });
+
+ it('displays job status', () => {
+ expect(findStatusBadge().exists()).toBe(true);
+ });
+
+ it('displays the job stage and name', () => {
+ const firstJob = mockJobsInTable[0];
+
+ expect(findJobStage().text()).toBe(firstJob.stage.name);
+ expect(findJobName().text()).toBe(firstJob.name);
+ });
+
+ it('displays the coverage for only jobs that have coverage', () => {
+ const jobsThatHaveCoverage = mockJobsInTable.filter((job) => job.coverage !== null);
+
+ jobsThatHaveCoverage.forEach((job, index) => {
+ expect(findAllCoverageJobs().at(index).text()).toBe(`${job.coverage}%`);
+ });
+ expect(findAllCoverageJobs()).toHaveLength(jobsThatHaveCoverage.length);
+ });
});
diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/jobs/mock_data.js
index 1432c6d7e9b..57f0b852ff8 100644
--- a/spec/frontend/jobs/mock_data.js
+++ b/spec/frontend/jobs/mock_data.js
@@ -920,6 +920,7 @@ export default {
cancel_path: '/root/ci-mock/-/jobs/4757/cancel',
new_issue_path: '/root/ci-mock/issues/new',
playable: false,
+ complete: true,
created_at: threeWeeksAgo.toISOString(),
updated_at: threeWeeksAgo.toISOString(),
finished_at: threeWeeksAgo.toISOString(),
@@ -1237,8 +1238,8 @@ export const mockPipelineWithAttachedMR = {
title: 'Update README.md',
source_branch: 'feature-1234',
source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
- target_branch: 'master',
- target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ target_branch: 'main',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/main',
},
ref: {
name: 'test-branch',
@@ -1269,8 +1270,8 @@ export const mockPipelineDetached = {
title: 'Update README.md',
source_branch: 'feature-1234',
source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
- target_branch: 'master',
- target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ target_branch: 'main',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/main',
},
ref: {
name: 'test-branch',
@@ -1292,11 +1293,12 @@ export const mockJobsInTable = [
title: 'Play',
__typename: 'StatusAction',
},
+ detailsPath: '/root/ci-project/-/jobs/2004',
__typename: 'DetailedStatus',
},
id: 'gid://gitlab/Ci::Build/2004',
- refName: 'master',
- refPath: '/root/ci-project/-/commits/master',
+ refName: 'main',
+ refPath: '/root/ci-project/-/commits/main',
tags: [],
shortSha: '2d5d8323',
commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
@@ -1316,10 +1318,13 @@ export const mockJobsInTable = [
duration: null,
finishedAt: null,
coverage: null,
+ createdByTag: false,
retryable: false,
playable: true,
cancelable: false,
active: false,
+ stuck: false,
+ userPermissions: { readBuild: true, __typename: 'JobPermissions' },
__typename: 'CiJob',
},
{
@@ -1332,8 +1337,8 @@ export const mockJobsInTable = [
__typename: 'DetailedStatus',
},
id: 'gid://gitlab/Ci::Build/2021',
- refName: 'master',
- refPath: '/root/ci-project/-/commits/master',
+ refName: 'main',
+ refPath: '/root/ci-project/-/commits/main',
tags: [],
shortSha: '2d5d8323',
commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
@@ -1353,10 +1358,13 @@ export const mockJobsInTable = [
duration: null,
finishedAt: null,
coverage: null,
+ createdByTag: true,
retryable: false,
playable: false,
cancelable: false,
active: false,
+ stuck: false,
+ userPermissions: { readBuild: true, __typename: 'JobPermissions' },
__typename: 'CiJob',
},
{
@@ -1376,8 +1384,8 @@ export const mockJobsInTable = [
__typename: 'DetailedStatus',
},
id: 'gid://gitlab/Ci::Build/2015',
- refName: 'master',
- refPath: '/root/ci-project/-/commits/master',
+ refName: 'main',
+ refPath: '/root/ci-project/-/commits/main',
tags: [],
shortSha: '2d5d8323',
commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
@@ -1396,11 +1404,172 @@ export const mockJobsInTable = [
name: 'artifact_job',
duration: 2,
finishedAt: '2021-04-01T17:36:18Z',
- coverage: null,
+ coverage: 82.71,
+ createdByTag: false,
retryable: true,
playable: false,
cancelable: false,
active: false,
+ stuck: false,
+ userPermissions: { readBuild: false, __typename: 'JobPermissions' },
+ __typename: 'CiJob',
+ },
+ {
+ artifacts: { nodes: [], __typename: 'CiJobArtifactConnection' },
+ allowFailure: false,
+ status: 'PENDING',
+ scheduledAt: null,
+ manualJob: false,
+ triggered: null,
+ createdByTag: false,
+ detailedStatus: {
+ detailsPath: '/root/ci-project/-/jobs/2391',
+ group: 'pending',
+ icon: 'status_pending',
+ label: 'pending',
+ text: 'pending',
+ tooltip: 'pending',
+ action: {
+ buttonTitle: 'Cancel this job',
+ icon: 'cancel',
+ method: 'post',
+ path: '/root/ci-project/-/jobs/2391/cancel',
+ title: 'Cancel',
+ __typename: 'StatusAction',
+ },
+ __typename: 'DetailedStatus',
+ },
+ id: 'gid://gitlab/Ci::Build/2391',
+ refName: 'master',
+ refPath: '/root/ci-project/-/commits/master',
+ tags: [],
+ shortSha: '916330b4',
+ commitPath: '/root/ci-project/-/commit/916330b4fda5dae226524ceb51c756c0ed26679d',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/482',
+ path: '/root/ci-project/-/pipelines/482',
+ user: {
+ webPath: '/root',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ __typename: 'UserCore',
+ },
+ __typename: 'Pipeline',
+ },
+ stage: { name: 'build', __typename: 'CiStage' },
+ name: 'build_job',
+ duration: null,
+ finishedAt: null,
+ coverage: null,
+ retryable: false,
+ playable: false,
+ cancelable: true,
+ active: true,
+ stuck: true,
+ userPermissions: { readBuild: true, __typename: 'JobPermissions' },
__typename: 'CiJob',
},
];
+
+export const mockJobsQueryResponse = {
+ data: {
+ project: {
+ jobs: {
+ pageInfo: {
+ endCursor: 'eyJpZCI6IjIzMTcifQ',
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: 'eyJpZCI6IjIzMzYifQ',
+ __typename: 'PageInfo',
+ },
+ nodes: [
+ {
+ artifacts: {
+ nodes: [
+ {
+ downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=trace',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/root/ci-project/-/jobs/2336/artifacts/download?file_type=metadata',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath: '/root/ci-project/-/jobs/2336/artifacts/download?file_type=archive',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ allowFailure: false,
+ status: 'SUCCESS',
+ scheduledAt: null,
+ manualJob: false,
+ triggered: null,
+ createdByTag: false,
+ detailedStatus: {
+ detailsPath: '/root/ci-project/-/jobs/2336',
+ group: 'success',
+ icon: 'status_success',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ action: {
+ buttonTitle: 'Retry this job',
+ icon: 'retry',
+ method: 'post',
+ path: '/root/ci-project/-/jobs/2336/retry',
+ title: 'Retry',
+ __typename: 'StatusAction',
+ },
+ __typename: 'DetailedStatus',
+ },
+ id: 'gid://gitlab/Ci::Build/2336',
+ refName: 'main',
+ refPath: '/root/ci-project/-/commits/main',
+ tags: [],
+ shortSha: '4408fa2a',
+ commitPath: '/root/ci-project/-/commit/4408fa2a27aaadfdf42d8dda3d6a9c01ce6cad78',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/473',
+ path: '/root/ci-project/-/pipelines/473',
+ user: {
+ webPath: '/root',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ __typename: 'UserCore',
+ },
+ __typename: 'Pipeline',
+ },
+ stage: {
+ name: 'deploy',
+ __typename: 'CiStage',
+ },
+ name: 'artifact_job',
+ duration: 3,
+ finishedAt: '2021-04-29T14:19:50Z',
+ coverage: null,
+ retryable: true,
+ playable: false,
+ cancelable: false,
+ active: false,
+ stuck: false,
+ userPermissions: { readBuild: true, __typename: 'JobPermissions' },
+ __typename: 'CiJob',
+ },
+ ],
+ __typename: 'CiJobConnection',
+ },
+ __typename: 'Project',
+ },
+ },
+};
+
+export const mockJobsQueryEmptyResponse = {
+ data: {
+ project: {
+ jobs: [],
+ },
+ },
+};
diff --git a/spec/frontend/learn_gitlab/track_learn_gitlab_spec.js b/spec/frontend/learn_gitlab/track_learn_gitlab_spec.js
new file mode 100644
index 00000000000..3fb38a74c70
--- /dev/null
+++ b/spec/frontend/learn_gitlab/track_learn_gitlab_spec.js
@@ -0,0 +1,21 @@
+import { mockTracking } from 'helpers/tracking_helper';
+import trackLearnGitlab from '~/learn_gitlab/track_learn_gitlab';
+
+describe('trackTrialUserErrors', () => {
+ let spy;
+
+ describe('when an error is present', () => {
+ beforeEach(() => {
+ spy = mockTracking('projects:learn_gitlab_index', document.body, jest.spyOn);
+ });
+
+ it('tracks the error message', () => {
+ trackLearnGitlab();
+
+ expect(spy).toHaveBeenCalledWith('projects:learn_gitlab:index', 'page_init', {
+ label: 'learn_gitlab',
+ property: 'Growth::Activation::Experiment::LearnGitLabB',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/number_utility_spec.js b/spec/frontend/lib/utils/number_utility_spec.js
index 4dcd9211697..f4483f5098b 100644
--- a/spec/frontend/lib/utils/number_utility_spec.js
+++ b/spec/frontend/lib/utils/number_utility_spec.js
@@ -10,6 +10,7 @@ import {
changeInPercent,
formattedChangeInPercent,
isNumeric,
+ isPositiveInteger,
} from '~/lib/utils/number_utils';
describe('Number Utils', () => {
@@ -184,4 +185,29 @@ describe('Number Utils', () => {
expect(isNumeric(value)).toBe(outcome);
});
});
+
+ describe.each`
+ value | outcome
+ ${0} | ${true}
+ ${'0'} | ${true}
+ ${12345} | ${true}
+ ${'12345'} | ${true}
+ ${-1} | ${false}
+ ${'-1'} | ${false}
+ ${1.01} | ${false}
+ ${'1.01'} | ${false}
+ ${'abcd'} | ${false}
+ ${'100abcd'} | ${false}
+ ${'abcd100'} | ${false}
+ ${''} | ${false}
+ ${false} | ${false}
+ ${true} | ${false}
+ ${undefined} | ${false}
+ ${null} | ${false}
+ ${Infinity} | ${false}
+ `('isPositiveInteger', ({ value, outcome }) => {
+ it(`when called with ${typeof value} ${value} it returns ${outcome}`, () => {
+ expect(isPositiveInteger(value)).toBe(outcome);
+ });
+ });
});
diff --git a/spec/frontend/lib/utils/recurrence_spec.js b/spec/frontend/lib/utils/recurrence_spec.js
new file mode 100644
index 00000000000..fc22529dffc
--- /dev/null
+++ b/spec/frontend/lib/utils/recurrence_spec.js
@@ -0,0 +1,333 @@
+import { create, free, recall } from '~/lib/utils/recurrence';
+
+const HEX = /[a-f0-9]/i;
+const HEX_RE = HEX.source;
+const UUIDV4 = new RegExp(
+ `${HEX_RE}{8}-${HEX_RE}{4}-4${HEX_RE}{3}-[89ab]${HEX_RE}{3}-${HEX_RE}{12}`,
+ 'i',
+);
+
+describe('recurrence', () => {
+ let recurInstance;
+ let id;
+
+ beforeEach(() => {
+ recurInstance = create();
+ id = recurInstance.id;
+ });
+
+ afterEach(() => {
+ id = null;
+ recurInstance.free();
+ });
+
+ describe('create', () => {
+ it('returns an object with the correct external api', () => {
+ expect(recurInstance).toMatchObject(
+ expect.objectContaining({
+ id: expect.stringMatching(UUIDV4),
+ count: 0,
+ handlers: {},
+ free: expect.any(Function),
+ handle: expect.any(Function),
+ eject: expect.any(Function),
+ occur: expect.any(Function),
+ reset: expect.any(Function),
+ }),
+ );
+ });
+ });
+
+ describe('recall', () => {
+ it('returns a previously created RecurInstance', () => {
+ expect(recall(id).id).toBe(id);
+ });
+
+ it("returns undefined if the provided UUID doesn't refer to a stored RecurInstance", () => {
+ expect(recall('1234')).toBeUndefined();
+ });
+ });
+
+ describe('free', () => {
+ it('returns true when the RecurInstance exists', () => {
+ expect(free(id)).toBe(true);
+ });
+
+ it("returns false when the ID doesn't refer to a known RecurInstance", () => {
+ expect(free('1234')).toBe(false);
+ });
+
+ it('removes the correct RecurInstance from the list of references', () => {
+ const anotherInstance = create();
+
+ expect(recall(id)).toEqual(recurInstance);
+ expect(recall(anotherInstance.id)).toEqual(anotherInstance);
+
+ free(id);
+
+ expect(recall(id)).toBeUndefined();
+ expect(recall(anotherInstance.id)).toEqual(anotherInstance);
+
+ anotherInstance.free();
+ });
+ });
+
+ describe('RecurInstance (`create()` return value)', () => {
+ it.each`
+ property | value | alias
+ ${'id'} | ${expect.stringMatching(UUIDV4)} | ${'[a string matching the UUIDv4 specification]'}
+ ${'count'} | ${0} | ${0}
+ ${'handlers'} | ${{}} | ${{}}
+ `(
+ 'has the correct primitive value $alias for the member `$property` to start',
+ ({ property, value }) => {
+ expect(recurInstance[property]).toEqual(value);
+ },
+ );
+
+ describe('id', () => {
+ it('cannot be changed manually', () => {
+ expect(() => {
+ recurInstance.id = 'new-id';
+ }).toThrow(TypeError);
+
+ expect(recurInstance.id).toBe(id);
+ });
+
+ it.each`
+ method
+ ${'free'}
+ ${'handle'}
+ ${'eject'}
+ ${'occur'}
+ ${'reset'}
+ `('does not change across any method call - like after `$method`', ({ method }) => {
+ recurInstance[method]();
+
+ expect(recurInstance.id).toBe(id);
+ });
+ });
+
+ describe('count', () => {
+ it('cannot be changed manually', () => {
+ expect(() => {
+ recurInstance.count = 9999;
+ }).toThrow(TypeError);
+
+ expect(recurInstance.count).toBe(0);
+ });
+
+ it.each`
+ method
+ ${'free'}
+ ${'handle'}
+ ${'eject'}
+ ${'reset'}
+ `("doesn't change in unexpected scenarios - like after a call to `$method`", ({ method }) => {
+ recurInstance[method]();
+
+ expect(recurInstance.count).toBe(0);
+ });
+
+ it('increments by one each time `.occur()` is called', () => {
+ expect(recurInstance.count).toBe(0);
+ recurInstance.occur();
+ expect(recurInstance.count).toBe(1);
+ recurInstance.occur();
+ expect(recurInstance.count).toBe(2);
+ });
+ });
+
+ describe('handlers', () => {
+ it('cannot be changed manually', () => {
+ const fn = jest.fn();
+
+ recurInstance.handle(1, fn);
+ expect(() => {
+ recurInstance.handlers = {};
+ }).toThrow(TypeError);
+
+ expect(recurInstance.handlers).toStrictEqual({
+ 1: fn,
+ });
+ });
+
+ it.each`
+ method
+ ${'free'}
+ ${'occur'}
+ ${'eject'}
+ ${'reset'}
+ `("doesn't change in unexpected scenarios - like after a call to `$method`", ({ method }) => {
+ recurInstance[method]();
+
+ expect(recurInstance.handlers).toEqual({});
+ });
+
+ it('adds handlers to the correct slots', () => {
+ const fn1 = jest.fn();
+ const fn2 = jest.fn();
+
+ recurInstance.handle(100, fn1);
+ recurInstance.handle(1000, fn2);
+
+ expect(recurInstance.handlers).toMatchObject({
+ 100: fn1,
+ 1000: fn2,
+ });
+ });
+ });
+
+ describe('free', () => {
+ it('removes itself from recallable memory', () => {
+ expect(recall(id)).toEqual(recurInstance);
+
+ recurInstance.free();
+
+ expect(recall(id)).toBeUndefined();
+ });
+ });
+
+ describe('handle', () => {
+ it('adds a handler for the provided count', () => {
+ const fn = jest.fn();
+
+ recurInstance.handle(5, fn);
+
+ expect(recurInstance.handlers[5]).toEqual(fn);
+ });
+
+ it("doesn't add any handlers if either the count or behavior aren't provided", () => {
+ const fn = jest.fn();
+
+ recurInstance.handle(null, fn);
+ // Note that it's not possible to react to something not happening (without timers)
+ recurInstance.handle(0, fn);
+ recurInstance.handle(5, null);
+
+ expect(recurInstance.handlers).toEqual({});
+ });
+ });
+
+ describe('eject', () => {
+ it('removes the handler assigned to the particular count slot', () => {
+ recurInstance.handle(1, jest.fn());
+
+ expect(recurInstance.handlers[1]).toBeTruthy();
+
+ recurInstance.eject(1);
+
+ expect(recurInstance.handlers).toEqual({});
+ });
+
+ it("succeeds (or fails gracefully) when the count provided doesn't have a handler assigned", () => {
+ recurInstance.eject('abc');
+ recurInstance.eject(1);
+
+ expect(recurInstance.handlers).toEqual({});
+ });
+
+ it('makes no changes if no count is provided', () => {
+ const fn = jest.fn();
+
+ recurInstance.handle(1, fn);
+
+ recurInstance.eject();
+
+ expect(recurInstance.handlers[1]).toStrictEqual(fn);
+ });
+ });
+
+ describe('occur', () => {
+ it('increments the .count property by 1', () => {
+ expect(recurInstance.count).toBe(0);
+
+ recurInstance.occur();
+
+ expect(recurInstance.count).toBe(1);
+ });
+
+ it('calls the appropriate handlers', () => {
+ const fn1 = jest.fn();
+ const fn5 = jest.fn();
+ const fn10 = jest.fn();
+
+ recurInstance.handle(1, fn1);
+ recurInstance.handle(5, fn5);
+ recurInstance.handle(10, fn10);
+
+ expect(fn1).not.toHaveBeenCalled();
+ expect(fn5).not.toHaveBeenCalled();
+ expect(fn10).not.toHaveBeenCalled();
+
+ recurInstance.occur();
+
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(fn5).not.toHaveBeenCalled();
+ expect(fn10).not.toHaveBeenCalled();
+
+ recurInstance.occur();
+ recurInstance.occur();
+ recurInstance.occur();
+ recurInstance.occur();
+
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(fn5).toHaveBeenCalledTimes(1);
+ expect(fn10).not.toHaveBeenCalled();
+
+ recurInstance.occur();
+ recurInstance.occur();
+ recurInstance.occur();
+ recurInstance.occur();
+ recurInstance.occur();
+
+ expect(fn1).toHaveBeenCalledTimes(1);
+ expect(fn5).toHaveBeenCalledTimes(1);
+ expect(fn10).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('reset', () => {
+ it('resets the count only, by default', () => {
+ const fn = jest.fn();
+
+ recurInstance.handle(3, fn);
+ recurInstance.occur();
+ recurInstance.occur();
+
+ expect(recurInstance.count).toBe(2);
+
+ recurInstance.reset();
+
+ expect(recurInstance.count).toBe(0);
+ expect(recurInstance.handlers).toEqual({ 3: fn });
+ });
+
+ it('also resets the handlers, by specific request', () => {
+ const fn = jest.fn();
+
+ recurInstance.handle(3, fn);
+ recurInstance.occur();
+ recurInstance.occur();
+
+ expect(recurInstance.count).toBe(2);
+
+ recurInstance.reset({ handlersList: true });
+
+ expect(recurInstance.count).toBe(0);
+ expect(recurInstance.handlers).toEqual({});
+ });
+
+ it('leaves the count in place, by request', () => {
+ recurInstance.occur();
+ recurInstance.occur();
+
+ expect(recurInstance.count).toBe(2);
+
+ recurInstance.reset({ currentCount: false });
+
+ expect(recurInstance.count).toBe(2);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/text_markdown_spec.js b/spec/frontend/lib/utils/text_markdown_spec.js
index b538257fac0..cad500039c0 100644
--- a/spec/frontend/lib/utils/text_markdown_spec.js
+++ b/spec/frontend/lib/utils/text_markdown_spec.js
@@ -51,6 +51,25 @@ describe('init markdown', () => {
expect(textArea.value).toEqual(`${initialValue}- `);
});
+ it('inserts dollar signs correctly', () => {
+ const initialValue = '';
+
+ textArea.value = initialValue;
+ textArea.selectionStart = 0;
+ textArea.selectionEnd = 0;
+
+ insertMarkdownText({
+ textArea,
+ text: textArea.value,
+ tag: '```suggestion:-0+0\n{text}\n```',
+ blockTag: true,
+ selected: '# Does not parse the `$` currently.',
+ wrap: false,
+ });
+
+ expect(textArea.value).toContain('# Does not parse the `$` currently.');
+ });
+
it('inserts the tag on a new line if the current one is not empty', () => {
const initialValue = 'some text';
diff --git a/spec/frontend/diffs/utils/uuids_spec.js b/spec/frontend/lib/utils/uuids_spec.js
index 8d0a01e8cbd..a7770d37566 100644
--- a/spec/frontend/diffs/utils/uuids_spec.js
+++ b/spec/frontend/lib/utils/uuids_spec.js
@@ -1,4 +1,4 @@
-import { uuids } from '~/diffs/utils/uuids';
+import { uuids } from '~/lib/utils/uuids';
const HEX = /[a-f0-9]/i;
const HEX_RE = HEX.source;
diff --git a/spec/frontend/lib/utils/vuex_module_mappers_spec.js b/spec/frontend/lib/utils/vuex_module_mappers_spec.js
new file mode 100644
index 00000000000..d7e51e4daca
--- /dev/null
+++ b/spec/frontend/lib/utils/vuex_module_mappers_spec.js
@@ -0,0 +1,138 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import {
+ mapVuexModuleActions,
+ mapVuexModuleGetters,
+ mapVuexModuleState,
+ REQUIRE_STRING_ERROR_MESSAGE,
+} from '~/lib/utils/vuex_module_mappers';
+
+const TEST_MODULE_NAME = 'testModuleName';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+// setup test component and store ----------------------------------------------
+//
+// These are used to indirectly test `vuex_module_mappers`.
+const TestComponent = Vue.extend({
+ props: {
+ vuexModule: {
+ type: String,
+ required: true,
+ },
+ },
+ computed: {
+ ...mapVuexModuleState((vm) => vm.vuexModule, { name: 'name', value: 'count' }),
+ ...mapVuexModuleGetters((vm) => vm.vuexModule, ['hasValue', 'hasName']),
+ stateJson() {
+ return JSON.stringify({
+ name: this.name,
+ value: this.value,
+ });
+ },
+ gettersJson() {
+ return JSON.stringify({
+ hasValue: this.hasValue,
+ hasName: this.hasName,
+ });
+ },
+ },
+ methods: {
+ ...mapVuexModuleActions((vm) => vm.vuexModule, ['increment']),
+ },
+ template: `
+<div>
+ <pre data-testid="state">{{ stateJson }}</pre>
+ <pre data-testid="getters">{{ gettersJson }}</pre>
+</div>`,
+});
+
+const createTestStore = () => {
+ return new Vuex.Store({
+ modules: {
+ [TEST_MODULE_NAME]: {
+ namespaced: true,
+ state: {
+ name: 'Lorem',
+ count: 0,
+ },
+ mutations: {
+ INCREMENT: (state, amount) => {
+ state.count += amount;
+ },
+ },
+ actions: {
+ increment({ commit }, amount) {
+ commit('INCREMENT', amount);
+ },
+ },
+ getters: {
+ hasValue: (state) => state.count > 0,
+ hasName: (state) => Boolean(state.name.length),
+ },
+ },
+ },
+ });
+};
+
+describe('~/lib/utils/vuex_module_mappers', () => {
+ let store;
+ let wrapper;
+
+ const getJsonInTemplate = (testId) =>
+ JSON.parse(wrapper.find(`[data-testid="${testId}"]`).text());
+ const getMappedState = () => getJsonInTemplate('state');
+ const getMappedGetters = () => getJsonInTemplate('getters');
+
+ beforeEach(() => {
+ store = createTestStore();
+
+ wrapper = mount(TestComponent, {
+ propsData: {
+ vuexModule: TEST_MODULE_NAME,
+ },
+ store,
+ localVue,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('from module defined by prop', () => {
+ it('maps state', () => {
+ expect(getMappedState()).toEqual({
+ name: store.state[TEST_MODULE_NAME].name,
+ value: store.state[TEST_MODULE_NAME].count,
+ });
+ });
+
+ it('maps getters', () => {
+ expect(getMappedGetters()).toEqual({
+ hasName: true,
+ hasValue: false,
+ });
+ });
+
+ it('maps action', () => {
+ jest.spyOn(store, 'dispatch');
+
+ expect(store.dispatch).not.toHaveBeenCalled();
+
+ wrapper.vm.increment(10);
+
+ expect(store.dispatch).toHaveBeenCalledWith(`${TEST_MODULE_NAME}/increment`, 10);
+ });
+ });
+
+ describe('with non-string object value', () => {
+ it('throws helpful error', () => {
+ expect(() => mapVuexModuleActions((vm) => vm.bogus, { foo: () => {} })).toThrowError(
+ REQUIRE_STRING_ERROR_MESSAGE,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/components/log_advanced_filters_spec.js b/spec/frontend/logs/components/log_advanced_filters_spec.js
index 111542ff33e..4e4052eb4d8 100644
--- a/spec/frontend/logs/components/log_advanced_filters_spec.js
+++ b/spec/frontend/logs/components/log_advanced_filters_spec.js
@@ -4,6 +4,7 @@ import { convertToFixedRange } from '~/lib/utils/datetime_range';
import LogAdvancedFilters from '~/logs/components/log_advanced_filters.vue';
import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
import { createStore } from '~/logs/stores';
+import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { defaultTimeRange } from '~/vue_shared/constants';
import { mockPods, mockSearch } from '../mock_data';
@@ -77,7 +78,7 @@ describe('LogAdvancedFilters', () => {
expect(getSearchToken(TOKEN_TYPE_POD_NAME)).toMatchObject({
title: 'Pod name',
unique: true,
- operators: [expect.objectContaining({ value: '=' })],
+ operators: OPERATOR_IS_ONLY,
});
});
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index 92c2f82af27..d5118bbde8c 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -191,7 +191,7 @@ describe('Logs Store actions', () => {
});
it('should commit RECEIVE_ENVIRONMENTS_DATA_SUCCESS mutation on correct data', () => {
- mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, { environments: mockEnvironments });
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, mockEnvironments);
return testAction(
fetchEnvironments,
mockEnvironmentsEndpoint,
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index af5434f7068..5e04e20801a 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -3,6 +3,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import MembersFilteredSearchBar from '~/members/components/filter_sort/members_filtered_search_bar.vue';
import { MEMBER_TYPES } from '~/members/constants';
+import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
const localVue = createLocalVue();
@@ -65,7 +66,7 @@ describe('MembersFilteredSearchBar', () => {
title: '2FA',
token: GlFilteredSearchToken,
unique: true,
- operators: [{ value: '=', description: 'is' }],
+ operators: OPERATOR_IS_ONLY,
options: [
{ value: 'enabled', title: 'Enabled' },
{ value: 'disabled', title: 'Disabled' },
@@ -99,7 +100,7 @@ describe('MembersFilteredSearchBar', () => {
title: 'Membership',
token: GlFilteredSearchToken,
unique: true,
- operators: [{ value: '=', description: 'is' }],
+ operators: OPERATOR_IS_ONLY,
options: [
{ value: 'exclude', title: 'Direct' },
{ value: 'only', title: 'Inherited' },
@@ -146,6 +147,21 @@ describe('MembersFilteredSearchBar', () => {
},
]);
});
+
+ it('parses and passes search param with multiple words to `FilteredSearchBar` component as `initialFilterValue` prop', () => {
+ window.location.search = '?search=foo+bar+baz';
+
+ createComponent();
+
+ expect(findFilteredSearchBar().props('initialFilterValue')).toEqual([
+ {
+ type: 'filtered-search-term',
+ value: {
+ data: 'foo bar baz',
+ },
+ },
+ ]);
+ });
});
describe('when filter bar is submitted', () => {
@@ -175,6 +191,17 @@ describe('MembersFilteredSearchBar', () => {
expect(window.location.href).toBe('https://localhost/?two_factor=enabled&search=foobar');
});
+ it('adds search query param with multiple words', () => {
+ createComponent();
+
+ findFilteredSearchBar().vm.$emit('onFilter', [
+ { type: 'two_factor', value: { data: 'enabled', operator: '=' } },
+ { type: 'filtered-search-term', value: { data: 'foo bar baz' } },
+ ]);
+
+ expect(window.location.href).toBe('https://localhost/?two_factor=enabled&search=foo+bar+baz');
+ });
+
it('adds sort query param', () => {
window.location.search = '?sort=name_asc';
diff --git a/spec/frontend/members/components/members_tabs_spec.js b/spec/frontend/members/components/members_tabs_spec.js
new file mode 100644
index 00000000000..28614b52706
--- /dev/null
+++ b/spec/frontend/members/components/members_tabs_spec.js
@@ -0,0 +1,194 @@
+import Vue, { nextTick } from 'vue';
+import Vuex from 'vuex';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import MembersApp from '~/members/components/app.vue';
+import MembersTabs from '~/members/components/members_tabs.vue';
+import { MEMBER_TYPES } from '~/members/constants';
+import { pagination } from '../mock_data';
+
+describe('MembersApp', () => {
+ Vue.use(Vuex);
+
+ let wrapper;
+
+ const createComponent = ({ totalItems = 10, options = {} } = {}) => {
+ const store = new Vuex.Store({
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ pagination: {
+ ...pagination,
+ totalItems,
+ },
+ filteredSearchBar: {
+ searchParam: 'search',
+ },
+ },
+ },
+ [MEMBER_TYPES.group]: {
+ namespaced: true,
+ state: {
+ pagination: {
+ ...pagination,
+ totalItems,
+ paramName: 'groups_page',
+ },
+ filteredSearchBar: {
+ searchParam: 'search_groups',
+ },
+ },
+ },
+ [MEMBER_TYPES.invite]: {
+ namespaced: true,
+ state: {
+ pagination: {
+ ...pagination,
+ totalItems,
+ paramName: 'invited_page',
+ },
+ filteredSearchBar: {
+ searchParam: 'search_invited',
+ },
+ },
+ },
+ [MEMBER_TYPES.accessRequest]: {
+ namespaced: true,
+ state: {
+ pagination: {
+ ...pagination,
+ totalItems,
+ paramName: 'access_requests_page',
+ },
+ filteredSearchBar: {
+ searchParam: 'search_access_requests',
+ },
+ },
+ },
+ },
+ });
+
+ wrapper = mountExtended(MembersTabs, {
+ store,
+ stubs: ['members-app'],
+ provide: {
+ canManageMembers: true,
+ },
+ ...options,
+ });
+
+ return nextTick();
+ };
+
+ const findTabs = () => wrapper.findAllByRole('tab').wrappers;
+ const findTabByText = (text) => findTabs().find((tab) => tab.text().includes(text));
+ const findActiveTab = () => wrapper.findByRole('tab', { selected: true });
+
+ beforeEach(() => {
+ delete window.location;
+ window.location = new URL('https://localhost');
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when tabs have a count', () => {
+ it('renders tabs with count', async () => {
+ await createComponent();
+
+ const tabs = findTabs();
+
+ expect(tabs[0].text()).toBe('Members 10');
+ expect(tabs[1].text()).toBe('Groups 10');
+ expect(tabs[2].text()).toBe('Invited 10');
+ expect(tabs[3].text()).toBe('Access requests 10');
+ expect(findActiveTab().text()).toContain('Members');
+ });
+
+ it('renders `MembersApp` and passes `namespace` prop', async () => {
+ await createComponent();
+
+ const membersApps = wrapper.findAllComponents(MembersApp).wrappers;
+
+ expect(membersApps[0].attributes('namespace')).toBe(MEMBER_TYPES.user);
+ expect(membersApps[1].attributes('namespace')).toBe(MEMBER_TYPES.group);
+ expect(membersApps[2].attributes('namespace')).toBe(MEMBER_TYPES.invite);
+ expect(membersApps[3].attributes('namespace')).toBe(MEMBER_TYPES.accessRequest);
+ });
+ });
+
+ describe('when tabs do not have a count', () => {
+ it('only renders `Members` tab', async () => {
+ await createComponent({ totalItems: 0 });
+
+ expect(findTabByText('Members')).not.toBeUndefined();
+ expect(findTabByText('Groups')).toBeUndefined();
+ expect(findTabByText('Invited')).toBeUndefined();
+ expect(findTabByText('Access requests')).toBeUndefined();
+ });
+ });
+
+ describe('when url param matches `filteredSearchBar.searchParam`', () => {
+ beforeEach(() => {
+ window.location.search = '?search_groups=foo+bar';
+ });
+
+ const expectGroupsTabActive = () => {
+ expect(findActiveTab().text()).toContain('Groups');
+ };
+
+ describe('when tab has a count', () => {
+ it('sets tab that corresponds to search param as active tab', async () => {
+ await createComponent();
+
+ expectGroupsTabActive();
+ });
+ });
+
+ describe('when tab does not have a count', () => {
+ it('sets tab that corresponds to search param as active tab', async () => {
+ await createComponent({ totalItems: 0 });
+
+ expectGroupsTabActive();
+ });
+ });
+ });
+
+ describe('when url param matches `pagination.paramName`', () => {
+ beforeEach(() => {
+ window.location.search = '?invited_page=2';
+ });
+
+ const expectInvitedTabActive = () => {
+ expect(findActiveTab().text()).toContain('Invited');
+ };
+
+ describe('when tab has a count', () => {
+ it('sets tab that corresponds to pagination param as active tab', async () => {
+ await createComponent();
+
+ expectInvitedTabActive();
+ });
+ });
+
+ describe('when tab does not have a count', () => {
+ it('sets tab that corresponds to pagination param as active tab', async () => {
+ await createComponent({ totalItems: 0 });
+
+ expectInvitedTabActive();
+ });
+ });
+ });
+
+ describe('when `canManageMembers` is `false`', () => {
+ it('shows all tabs except `Invited` and `Access requests`', async () => {
+ await createComponent({ options: { provide: { canManageMembers: false } } });
+
+ expect(findTabByText('Members')).not.toBeUndefined();
+ expect(findTabByText('Groups')).not.toBeUndefined();
+ expect(findTabByText('Invited')).toBeUndefined();
+ expect(findTabByText('Access requests')).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 5cf1f40a8f4..5308d7651a3 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -1,4 +1,4 @@
-import { GlBadge, GlTable } from '@gitlab/ui';
+import { GlBadge, GlPagination, GlTable } from '@gitlab/ui';
import {
getByText as getByTextHelper,
getByTestId as getByTestIdHelper,
@@ -6,6 +6,7 @@ import {
} from '@testing-library/dom';
import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
import Vuex from 'vuex';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import CreatedAt from '~/members/components/table/created_at.vue';
import ExpirationDatepicker from '~/members/components/table/expiration_datepicker.vue';
import ExpiresAt from '~/members/components/table/expires_at.vue';
@@ -16,7 +17,13 @@ import MembersTable from '~/members/components/table/members_table.vue';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
import { MEMBER_TYPES } from '~/members/constants';
import * as initUserPopovers from '~/user_popovers';
-import { member as memberMock, directMember, invite, accessRequest } from '../../mock_data';
+import {
+ member as memberMock,
+ directMember,
+ invite,
+ accessRequest,
+ pagination,
+} from '../../mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -36,6 +43,7 @@ describe('MembersTable', () => {
table: { 'data-qa-selector': 'members_list' },
tr: { 'data-qa-selector': 'member_row' },
},
+ pagination,
...state,
},
},
@@ -66,6 +74,8 @@ describe('MembersTable', () => {
});
};
+ const url = 'https://localhost/foo-bar/-/project_members';
+
const getByText = (text, options) =>
createWrapper(getByTextHelper(wrapper.element, text, options));
@@ -78,6 +88,14 @@ describe('MembersTable', () => {
`[data-label="${tableCellLabel}"][role="cell"]`,
);
+ const findPagination = () => extendedWrapper(wrapper.find(GlPagination));
+
+ const expectCorrectLinkToPage2 = () => {
+ expect(findPagination().findByText('2', { selector: 'a' }).attributes('href')).toBe(
+ `${url}?page=2`,
+ );
+ };
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
@@ -219,4 +237,80 @@ describe('MembersTable', () => {
expect(findTable().find('tbody tr').attributes('data-qa-selector')).toBe('member_row');
});
+
+ describe('when required pagination data is provided', () => {
+ beforeEach(() => {
+ delete window.location;
+ });
+
+ it('renders `gl-pagination` component with correct props', () => {
+ window.location = new URL(url);
+
+ createComponent();
+
+ const glPagination = findPagination();
+
+ expect(glPagination.exists()).toBe(true);
+ expect(glPagination.props()).toMatchObject({
+ value: pagination.currentPage,
+ perPage: pagination.perPage,
+ totalItems: pagination.totalItems,
+ prevText: 'Prev',
+ nextText: 'Next',
+ labelNextPage: 'Go to next page',
+ labelPrevPage: 'Go to previous page',
+ align: 'center',
+ });
+ });
+
+ it('uses `pagination.paramName` to generate the pagination links', () => {
+ window.location = new URL(url);
+
+ createComponent({
+ pagination: {
+ currentPage: 1,
+ perPage: 5,
+ totalItems: 10,
+ paramName: 'page',
+ },
+ });
+
+ expectCorrectLinkToPage2();
+ });
+
+ it('removes any url params defined as `null` in the `params` attribute', () => {
+ window.location = new URL(`${url}?search_groups=foo`);
+
+ createComponent({
+ pagination: {
+ currentPage: 1,
+ perPage: 5,
+ totalItems: 10,
+ paramName: 'page',
+ params: { search_groups: null },
+ },
+ });
+
+ expectCorrectLinkToPage2();
+ });
+ });
+
+ describe.each`
+ attribute | value
+ ${'paramName'} | ${null}
+ ${'currentPage'} | ${null}
+ ${'perPage'} | ${null}
+ ${'totalItems'} | ${0}
+ `('when pagination.$attribute is $value', ({ attribute, value }) => {
+ it('does not render `gl-pagination`', () => {
+ createComponent({
+ pagination: {
+ ...pagination,
+ [attribute]: value,
+ },
+ });
+
+ expect(findPagination().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/members/index_spec.js b/spec/frontend/members/index_spec.js
index 8b645d9b059..b07534ae4ed 100644
--- a/spec/frontend/members/index_spec.js
+++ b/spec/frontend/members/index_spec.js
@@ -2,7 +2,7 @@ import { createWrapper } from '@vue/test-utils';
import MembersApp from '~/members/components/app.vue';
import { MEMBER_TYPES } from '~/members/constants';
import { initMembersApp } from '~/members/index';
-import { membersJsonString, members } from './mock_data';
+import { members, pagination, dataAttribute } from './mock_data';
describe('initMembersApp', () => {
let el;
@@ -23,10 +23,7 @@ describe('initMembersApp', () => {
beforeEach(() => {
el = document.createElement('div');
- el.setAttribute('data-members', membersJsonString);
- el.setAttribute('data-source-id', '234');
- el.setAttribute('data-can-manage-members', 'true');
- el.setAttribute('data-member-path', '/groups/foo-bar/-/group_members/:id');
+ el.setAttribute('data-members-data', dataAttribute);
window.gon = { current_user_id: 123 };
});
@@ -50,6 +47,12 @@ describe('initMembersApp', () => {
expect(vm.$store.state[MEMBER_TYPES.user].members).toEqual(members);
});
+ it('parses and sets `pagination` in Vuex store', () => {
+ setup();
+
+ expect(vm.$store.state[MEMBER_TYPES.user].pagination).toEqual(pagination);
+ });
+
it('sets `tableFields` in Vuex store', () => {
setup();
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index a47b7ab2118..d0a7c36349b 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -79,3 +79,28 @@ export const directMember = { ...member, isDirectMember: true };
export const inheritedMember = { ...member, isDirectMember: false };
export const member2faEnabled = { ...member, user: { ...member.user, twoFactorEnabled: true } };
+
+export const paginationData = {
+ current_page: 1,
+ per_page: 5,
+ total_items: 10,
+ param_name: 'page',
+ params: { search_groups: null },
+};
+
+export const pagination = {
+ currentPage: 1,
+ perPage: 5,
+ totalItems: 10,
+ paramName: 'page',
+ params: { search_groups: null },
+};
+
+export const dataAttribute = JSON.stringify({
+ members,
+ pagination: paginationData,
+ source_id: 234,
+ can_manage_members: true,
+ member_path: '/groups/foo-bar/-/group_members/:id',
+ ldap_override_path: '/groups/ldap-group/-/group_members/:id/override',
+});
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index bfb5a4bc7d3..72696979722 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -20,8 +20,9 @@ import {
member2faEnabled,
group,
invite,
- membersJsonString,
members,
+ pagination,
+ dataAttribute,
} from './mock_data';
const IS_CURRENT_USER_ID = 123;
@@ -258,20 +259,20 @@ describe('Members Utils', () => {
beforeEach(() => {
el = document.createElement('div');
- el.setAttribute('data-members', membersJsonString);
- el.setAttribute('data-source-id', '234');
- el.setAttribute('data-can-manage-members', 'true');
+ el.setAttribute('data-members-data', dataAttribute);
});
afterEach(() => {
el = null;
});
- it('correctly parses the data attributes', () => {
- expect(parseDataAttributes(el)).toEqual({
+ it('correctly parses the data attribute', () => {
+ expect(parseDataAttributes(el)).toMatchObject({
members,
+ pagination,
sourceId: 234,
canManageMembers: true,
+ memberPath: '/groups/foo-bar/-/group_members/:id',
});
});
});
diff --git a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
index eaa3b1c5d53..a09edb50f20 100644
--- a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
+++ b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
@@ -57,7 +57,7 @@ describe('Merge Conflict Resolver App', () => {
const title = findConflictsCount();
expect(title.exists()).toBe(true);
- expect(title.text().trim()).toBe('Showing 3 conflicts between test-conflicts and master');
+ expect(title.text().trim()).toBe('Showing 3 conflicts between test-conflicts and main');
});
describe('files', () => {
@@ -82,20 +82,20 @@ describe('Merge Conflict Resolver App', () => {
const interactiveButton = findFileInteractiveButton(findFiles().at(0));
const inlineButton = findFileInlineButton(findFiles().at(0));
- expect(interactiveButton.classes('active')).toBe(true);
- expect(inlineButton.classes('active')).toBe(false);
+ expect(interactiveButton.props('selected')).toBe(true);
+ expect(inlineButton.props('selected')).toBe(false);
});
it('clicking inline set inline as default', async () => {
mountComponent();
const inlineButton = findFileInlineButton(findFiles().at(0));
- expect(inlineButton.classes('active')).toBe(false);
+ expect(inlineButton.props('selected')).toBe(false);
- inlineButton.trigger('click');
+ inlineButton.vm.$emit('click');
await wrapper.vm.$nextTick();
- expect(inlineButton.classes('active')).toBe(true);
+ expect(inlineButton.props('selected')).toBe(true);
});
it('inline mode shows a inline-conflict-lines', () => {
@@ -110,7 +110,7 @@ describe('Merge Conflict Resolver App', () => {
it('parallel mode shows a parallel-conflict-lines', async () => {
mountComponent();
- findSideBySideButton().trigger('click');
+ findSideBySideButton().vm.$emit('click');
await wrapper.vm.$nextTick();
const parallelConflictLinesComponent = findParallelConflictLines(findFiles().at(0));
diff --git a/spec/frontend/merge_conflicts/mock_data.js b/spec/frontend/merge_conflicts/mock_data.js
index 8948f2a3c1e..69ba46dbe60 100644
--- a/spec/frontend/merge_conflicts/mock_data.js
+++ b/spec/frontend/merge_conflicts/mock_data.js
@@ -1,9 +1,9 @@
export const conflictsMock = {
- target_branch: 'master',
+ target_branch: 'main',
source_branch: 'test-conflicts',
commit_sha: '6dbf385a3c7bf01e09b5d2d9e5d72f8fb8c590a3',
commit_message:
- "Merge branch 'master' into 'test-conflicts'\n\n# Conflicts:\n# .gitlab-ci.yml\n# README.md",
+ "Merge branch 'main' into 'test-conflicts'\n\n# Conflicts:\n# .gitlab-ci.yml\n# README.md",
files: [
{
old_path: '.gitlab-ci.yml',
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index e873edaad3b..98503636d33 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -9,6 +9,8 @@ exports[`Dashboard template matches the default snapshot 1`] = `
metricsendpoint="/monitoring/monitor-project/-/environments/1/additional_metrics.json"
prometheusstatus=""
>
+ <alerts-deprecation-warning-stub />
+
<div
class="prometheus-graphs-header d-sm-flex flex-sm-wrap pt-2 pr-1 pb-0 pl-2 border-bottom bg-gray-light"
>
diff --git a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
index 400ac2e8f85..8af6075a416 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
@@ -28,6 +28,7 @@ describe('dashboard invalid url parameters', () => {
},
},
options,
+ provide: { hasManagedPrometheus: false },
});
};
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 5c7042d4cb5..0c2f85c7298 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,3 +1,4 @@
+import { GlAlert } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import VueDraggable from 'vuedraggable';
@@ -7,7 +8,6 @@ import axios from '~/lib/utils/axios_utils';
import { ESC_KEY } from '~/lib/utils/keys';
import { objectToQuery } from '~/lib/utils/url_utility';
import Dashboard from '~/monitoring/components/dashboard.vue';
-
import DashboardHeader from '~/monitoring/components/dashboard_header.vue';
import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
import EmptyState from '~/monitoring/components/empty_state.vue';
@@ -17,6 +17,7 @@ import LinksSection from '~/monitoring/components/links_section.vue';
import { dashboardEmptyStates, metricStates } from '~/monitoring/constants';
import { createStore } from '~/monitoring/stores';
import * as types from '~/monitoring/stores/mutation_types';
+import AlertDeprecationWarning from '~/vue_shared/components/alerts_deprecation_warning.vue';
import {
metricsDashboardViewModel,
metricsDashboardPanelCount,
@@ -46,6 +47,7 @@ describe('Dashboard', () => {
stubs: {
DashboardHeader,
},
+ provide: { hasManagedPrometheus: false },
...options,
});
};
@@ -59,6 +61,9 @@ describe('Dashboard', () => {
'dashboard-panel': true,
'dashboard-header': DashboardHeader,
},
+ provide: {
+ hasManagedPrometheus: false,
+ },
...options,
});
};
@@ -812,4 +817,25 @@ describe('Dashboard', () => {
expect(dashboardPanel.exists()).toBe(true);
});
});
+
+ describe('deprecation notice', () => {
+ beforeEach(() => {
+ setupStoreWithData(store);
+ });
+
+ const findDeprecationNotice = () =>
+ wrapper.find(AlertDeprecationWarning).findComponent(GlAlert);
+
+ it('shows the deprecation notice when available', () => {
+ createMountedWrapper({}, { provide: { hasManagedPrometheus: true } });
+
+ expect(findDeprecationNotice().exists()).toBe(true);
+ });
+
+ it('hides the deprecation notice when not available', () => {
+ createMountedWrapper();
+
+ expect(findDeprecationNotice().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index 9830b6d047f..090613b0f1e 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -31,6 +31,7 @@ describe('dashboard invalid url parameters', () => {
store,
stubs: { 'graph-group': true, 'dashboard-panel': true, 'dashboard-header': DashboardHeader },
...options,
+ provide: { hasManagedPrometheus: false },
});
};
diff --git a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
index c9241834789..589354e7849 100644
--- a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
+++ b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
@@ -6,7 +6,7 @@ import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue'
import { dashboardGitResponse } from '../mock_data';
-const defaultBranch = 'master';
+const defaultBranch = 'main';
const starredDashboards = dashboardGitResponse.filter(({ starred }) => starred);
const notStarredDashboards = dashboardGitResponse.filter(({ starred }) => !starred);
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
index 51b4106d4b1..0dd3afd7c83 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
@@ -16,7 +16,7 @@ const createMountedWrapper = (props = {}) => {
};
describe('DuplicateDashboardForm', () => {
- const defaultBranch = 'master';
+ const defaultBranch = 'main';
const findByRef = (ref) => wrapper.find({ ref });
const setValue = (ref, val) => {
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
index 1bc89e509b5..7e7a7a66d77 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
@@ -37,7 +37,7 @@ describe('duplicate dashboard modal', () => {
return shallowMount(DuplicateDashboardModal, {
propsData: {
- defaultBranch: 'master',
+ defaultBranch: 'main',
modalId: 'id',
},
store,
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 29a7c86491d..00be5868ba3 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -15,7 +15,7 @@ const customDashboardsData = new Array(30).fill(null).map((_, idx) => ({
can_edit: true,
system_dashboard: false,
out_of_the_box_dashboard: false,
- project_blob_path: `${mockProjectDir}/blob/master/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
+ project_blob_path: `${mockProjectDir}/blob/main/dashboards/.gitlab/dashboards/dashboard_${idx}.yml`,
path: `.gitlab/dashboards/dashboard_${idx}.yml`,
starred: false,
}));
@@ -32,7 +32,7 @@ export const anomalyDeploymentData = [
iid: 3,
sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
- name: 'master',
+ name: 'main',
},
created_at: '2019-08-19T22:00:00.000Z',
deployed_at: '2019-08-19T22:01:00.000Z',
@@ -44,7 +44,7 @@ export const anomalyDeploymentData = [
iid: 2,
sha: 'f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
- name: 'master',
+ name: 'main',
},
created_at: '2019-08-19T23:00:00.000Z',
deployed_at: '2019-08-19T23:00:00.000Z',
@@ -61,7 +61,7 @@ export const deploymentData = [
commitUrl:
'http://test.host/frontend-fixtures/environments-project/-/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
- name: 'master',
+ name: 'main',
},
created_at: '2019-07-16T10:14:25.589Z',
tag: false,
@@ -75,7 +75,7 @@ export const deploymentData = [
commitUrl:
'http://test.host/frontend-fixtures/environments-project/-/commit/f5bcd1d9dac6fa4137e2510b9ccd134ef2e84187',
ref: {
- name: 'master',
+ name: 'main',
},
created_at: '2019-07-16T11:14:25.589Z',
tag: false,
@@ -187,7 +187,7 @@ export const dashboardGitResponse = [
can_edit: true,
system_dashboard: false,
out_of_the_box_dashboard: false,
- project_blob_path: `${mockProjectDir}/-/blob/master/.gitlab/dashboards/dashboard.yml`,
+ project_blob_path: `${mockProjectDir}/-/blob/main/.gitlab/dashboards/dashboard.yml`,
path: '.gitlab/dashboards/dashboard.yml',
starred: true,
user_starred_path: `${mockProjectDir}/metrics_user_starred_dashboards?dashboard_path=.gitlab/dashboards/dashboard.yml`,
@@ -224,7 +224,7 @@ export const selfMonitoringDashboardGitResponse = [
can_edit: true,
system_dashboard: false,
out_of_the_box_dashboard: false,
- project_blob_path: `${mockProjectDir}/-/blob/master/.gitlab/dashboards/dashboard.yml`,
+ project_blob_path: `${mockProjectDir}/-/blob/main/.gitlab/dashboards/dashboard.yml`,
path: '.gitlab/dashboards/dashboard.yml',
starred: true,
user_starred_path: `${mockProjectDir}/metrics_user_starred_dashboards?dashboard_path=.gitlab/dashboards/dashboard.yml`,
@@ -572,7 +572,7 @@ export const storeVariables = [
];
export const dashboardHeaderProps = {
- defaultBranch: 'master',
+ defaultBranch: 'main',
isRearrangingPanels: false,
selectedTimeRange: {
start: '2020-01-01T00:00:00.000Z',
@@ -581,7 +581,7 @@ export const dashboardHeaderProps = {
};
export const dashboardActionsMenuProps = {
- defaultBranch: 'master',
+ defaultBranch: 'main',
addingMetricsAvailable: true,
customMetricsPath: 'https://path/to/customMetrics',
validateQueryPath: 'https://path/to/validateQuery',
diff --git a/spec/frontend/monitoring/router_spec.js b/spec/frontend/monitoring/router_spec.js
index b027d60f61e..2a712d4361f 100644
--- a/spec/frontend/monitoring/router_spec.js
+++ b/spec/frontend/monitoring/router_spec.js
@@ -20,6 +20,8 @@ const MockApp = {
template: `<router-view :dashboard-props="dashboardProps"/>`,
};
+const provide = { hasManagedPrometheus: false };
+
describe('Monitoring router', () => {
let router;
let store;
@@ -37,6 +39,7 @@ describe('Monitoring router', () => {
localVue,
store,
router,
+ provide,
});
};
diff --git a/spec/frontend/nav/components/top_nav_app_spec.js b/spec/frontend/nav/components/top_nav_app_spec.js
new file mode 100644
index 00000000000..06700ce748e
--- /dev/null
+++ b/spec/frontend/nav/components/top_nav_app_spec.js
@@ -0,0 +1,68 @@
+import { GlNavItemDropdown, GlTooltip } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import TopNavApp from '~/nav/components/top_nav_app.vue';
+import TopNavDropdownMenu from '~/nav/components/top_nav_dropdown_menu.vue';
+import { TEST_NAV_DATA } from '../mock_data';
+
+describe('~/nav/components/top_nav_app.vue', () => {
+ let wrapper;
+
+ const createComponent = (mountFn = shallowMount) => {
+ wrapper = mountFn(TopNavApp, {
+ propsData: {
+ navData: TEST_NAV_DATA,
+ },
+ });
+ };
+
+ const findNavItemDropdown = () => wrapper.findComponent(GlNavItemDropdown);
+ const findMenu = () => wrapper.findComponent(TopNavDropdownMenu);
+ const findTooltip = () => wrapper.findComponent(GlTooltip);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders nav item dropdown', () => {
+ expect(findNavItemDropdown().attributes('href')).toBeUndefined();
+ expect(findNavItemDropdown().attributes()).toMatchObject({
+ icon: 'dot-grid',
+ text: TEST_NAV_DATA.activeTitle,
+ 'no-flip': '',
+ });
+ });
+
+ it('renders top nav dropdown menu', () => {
+ expect(findMenu().props()).toStrictEqual({
+ primary: TEST_NAV_DATA.primary,
+ secondary: TEST_NAV_DATA.secondary,
+ views: TEST_NAV_DATA.views,
+ });
+ });
+
+ it('renders tooltip', () => {
+ expect(findTooltip().attributes()).toMatchObject({
+ 'boundary-padding': '0',
+ placement: 'right',
+ title: TopNavApp.TOOLTIP,
+ });
+ });
+ });
+
+ describe('when full mounted', () => {
+ beforeEach(() => {
+ createComponent(mount);
+ });
+
+ it('has dropdown toggle as tooltip target', () => {
+ const targetFn = findTooltip().props('target');
+
+ expect(targetFn()).toBe(wrapper.find('.js-top-nav-dropdown-toggle').element);
+ });
+ });
+});
diff --git a/spec/frontend/nav/components/top_nav_container_view_spec.js b/spec/frontend/nav/components/top_nav_container_view_spec.js
new file mode 100644
index 00000000000..b08d75f36ce
--- /dev/null
+++ b/spec/frontend/nav/components/top_nav_container_view_spec.js
@@ -0,0 +1,114 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import FrequentItemsApp from '~/frequent_items/components/app.vue';
+import { FREQUENT_ITEMS_PROJECTS } from '~/frequent_items/constants';
+import eventHub from '~/frequent_items/event_hub';
+import TopNavContainerView from '~/nav/components/top_nav_container_view.vue';
+import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
+import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue';
+import { TEST_NAV_DATA } from '../mock_data';
+
+const DEFAULT_PROPS = {
+ frequentItemsDropdownType: FREQUENT_ITEMS_PROJECTS.namespace,
+ frequentItemsVuexModule: FREQUENT_ITEMS_PROJECTS.vuexModule,
+ linksPrimary: TEST_NAV_DATA.primary,
+ linksSecondary: TEST_NAV_DATA.secondary,
+};
+const TEST_OTHER_PROPS = {
+ namespace: 'projects',
+ currentUserName: '',
+ currentItem: {},
+};
+
+describe('~/nav/components/top_nav_container_view.vue', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(TopNavContainerView, {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...TEST_OTHER_PROPS,
+ ...props,
+ },
+ });
+ };
+
+ const findMenuItems = (parent = wrapper) => parent.findAll(TopNavMenuItem);
+ const findMenuItemsModel = (parent = wrapper) =>
+ findMenuItems(parent).wrappers.map((x) => x.props());
+ const findMenuItemGroups = () => wrapper.findAll('[data-testid="menu-item-group"]');
+ const findMenuItemGroupsModel = () => findMenuItemGroups().wrappers.map(findMenuItemsModel);
+ const findFrequentItemsApp = () => {
+ const parent = wrapper.findComponent(VuexModuleProvider);
+
+ return {
+ vuexModule: parent.props('vuexModule'),
+ props: parent.findComponent(FrequentItemsApp).props(),
+ };
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each(['projects', 'groups'])(
+ 'emits frequent items event to event hub (%s)',
+ async (frequentItemsDropdownType) => {
+ const listener = jest.fn();
+ eventHub.$on(`${frequentItemsDropdownType}-dropdownOpen`, listener);
+ createComponent({ frequentItemsDropdownType });
+
+ expect(listener).not.toHaveBeenCalled();
+
+ await nextTick();
+
+ expect(listener).toHaveBeenCalled();
+ },
+ );
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders frequent items app', () => {
+ expect(findFrequentItemsApp()).toEqual({
+ vuexModule: DEFAULT_PROPS.frequentItemsVuexModule,
+ props: TEST_OTHER_PROPS,
+ });
+ });
+
+ it('renders menu item groups', () => {
+ expect(findMenuItemGroupsModel()).toEqual([
+ TEST_NAV_DATA.primary.map((menuItem) => ({ menuItem })),
+ TEST_NAV_DATA.secondary.map((menuItem) => ({ menuItem })),
+ ]);
+ });
+
+ it('only the first group does not have margin top', () => {
+ expect(findMenuItemGroups().wrappers.map((x) => x.classes('gl-mt-3'))).toEqual([false, true]);
+ });
+
+ it('only the first menu item does not have margin top', () => {
+ const actual = findMenuItems(findMenuItemGroups().at(1)).wrappers.map((x) =>
+ x.classes('gl-mt-1'),
+ );
+
+ expect(actual).toEqual([false, ...TEST_NAV_DATA.secondary.slice(1).fill(true)]);
+ });
+ });
+
+ describe('without secondary links', () => {
+ beforeEach(() => {
+ createComponent({
+ linksSecondary: [],
+ });
+ });
+
+ it('renders one menu item group', () => {
+ expect(findMenuItemGroupsModel()).toEqual([
+ TEST_NAV_DATA.primary.map((menuItem) => ({ menuItem })),
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
new file mode 100644
index 00000000000..d9bba22238a
--- /dev/null
+++ b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
@@ -0,0 +1,157 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import TopNavDropdownMenu from '~/nav/components/top_nav_dropdown_menu.vue';
+import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
+import { TEST_NAV_DATA } from '../mock_data';
+
+const SECONDARY_GROUP_CLASSES = TopNavDropdownMenu.SECONDARY_GROUP_CLASS.split(' ');
+
+describe('~/nav/components/top_nav_dropdown_menu.vue', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(TopNavDropdownMenu, {
+ propsData: {
+ primary: TEST_NAV_DATA.primary,
+ secondary: TEST_NAV_DATA.secondary,
+ views: TEST_NAV_DATA.views,
+ ...props,
+ },
+ });
+ };
+
+ const findMenuItems = (parent = wrapper) => parent.findAll('[data-testid="menu-item"]');
+ const findMenuItemsModel = (parent = wrapper) =>
+ findMenuItems(parent).wrappers.map((x) => ({
+ menuItem: x.props('menuItem'),
+ isActive: x.classes('active'),
+ }));
+ const findMenuItemGroups = () => wrapper.findAll('[data-testid="menu-item-group"]');
+ const findMenuItemGroupsModel = () =>
+ findMenuItemGroups().wrappers.map((x) => ({
+ classes: x.classes(),
+ items: findMenuItemsModel(x),
+ }));
+ const findMenuSidebar = () => wrapper.find('[data-testid="menu-sidebar"]');
+ const findMenuSubview = () => wrapper.findComponent(KeepAliveSlots);
+ const hasFullWidthMenuSidebar = () => findMenuSidebar().classes('gl-w-full');
+
+ const createItemsGroupModelExpectation = ({
+ primary = TEST_NAV_DATA.primary,
+ secondary = TEST_NAV_DATA.secondary,
+ activeIndex = -1,
+ } = {}) => [
+ {
+ classes: [],
+ items: primary.map((menuItem, index) => ({ isActive: index === activeIndex, menuItem })),
+ },
+ {
+ classes: SECONDARY_GROUP_CLASSES,
+ items: secondary.map((menuItem) => ({ isActive: false, menuItem })),
+ },
+ ];
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders menu item groups', () => {
+ expect(findMenuItemGroupsModel()).toEqual(createItemsGroupModelExpectation());
+ });
+
+ it('has full width menu sidebar', () => {
+ expect(hasFullWidthMenuSidebar()).toBe(true);
+ });
+
+ it('renders hidden subview with no slot key', () => {
+ const subview = findMenuSubview();
+
+ expect(subview.isVisible()).toBe(false);
+ expect(subview.props()).toEqual({ slotKey: '' });
+ });
+
+ it('the first menu item in a group does not render margin top', () => {
+ const actual = findMenuItems(findMenuItemGroups().at(0)).wrappers.map((x) =>
+ x.classes('gl-mt-1'),
+ );
+
+ expect(actual).toEqual([false, ...TEST_NAV_DATA.primary.slice(1).fill(true)]);
+ });
+ });
+
+ describe('with pre-initialized active view', () => {
+ const primaryWithActive = [
+ TEST_NAV_DATA.primary[0],
+ {
+ ...TEST_NAV_DATA.primary[1],
+ active: true,
+ },
+ ...TEST_NAV_DATA.primary.slice(2),
+ ];
+
+ beforeEach(() => {
+ createComponent({
+ primary: primaryWithActive,
+ });
+ });
+
+ it('renders menu item groups', () => {
+ expect(findMenuItemGroupsModel()).toEqual(
+ createItemsGroupModelExpectation({ primary: primaryWithActive, activeIndex: 1 }),
+ );
+ });
+
+ it('does not have full width menu sidebar', () => {
+ expect(hasFullWidthMenuSidebar()).toBe(false);
+ });
+
+ it('renders visible subview with slot key', () => {
+ const subview = findMenuSubview();
+
+ expect(subview.isVisible()).toBe(true);
+ expect(subview.props('slotKey')).toBe(primaryWithActive[1].view);
+ });
+
+ it('does not change view if non-view menu item is clicked', async () => {
+ const secondaryLink = findMenuItems().at(primaryWithActive.length);
+
+ // Ensure this doesn't have a view
+ expect(secondaryLink.props('menuItem').view).toBeUndefined();
+
+ secondaryLink.vm.$emit('click');
+
+ await nextTick();
+
+ expect(findMenuSubview().props('slotKey')).toBe(primaryWithActive[1].view);
+ });
+
+ describe('when other view menu item is clicked', () => {
+ let primaryLink;
+
+ beforeEach(async () => {
+ primaryLink = findMenuItems().at(0);
+ primaryLink.vm.$emit('click');
+ await nextTick();
+ });
+
+ it('clicked on link with view', () => {
+ expect(primaryLink.props('menuItem').view).toBeTruthy();
+ });
+
+ it('changes active view', () => {
+ expect(findMenuSubview().props('slotKey')).toBe(primaryWithActive[0].view);
+ });
+
+ it('changes active status on menu item', () => {
+ expect(findMenuItemGroupsModel()).toStrictEqual(
+ createItemsGroupModelExpectation({ primary: primaryWithActive, activeIndex: 0 }),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/nav/components/top_nav_menu_item_spec.js b/spec/frontend/nav/components/top_nav_menu_item_spec.js
new file mode 100644
index 00000000000..579af13d08a
--- /dev/null
+++ b/spec/frontend/nav/components/top_nav_menu_item_spec.js
@@ -0,0 +1,74 @@
+import { GlButton, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import TopNavMenuItem from '~/nav/components/top_nav_menu_item.vue';
+
+const TEST_MENU_ITEM = {
+ title: 'Cheeseburger',
+ icon: 'search',
+ href: '/pretty/good/burger',
+ view: 'burger-view',
+};
+
+describe('~/nav/components/top_nav_menu_item.vue', () => {
+ let listener;
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(TopNavMenuItem, {
+ propsData: {
+ menuItem: TEST_MENU_ITEM,
+ ...props,
+ },
+ listeners: {
+ click: listener,
+ },
+ });
+ };
+
+ const findButton = () => wrapper.find(GlButton);
+ const findButtonIcons = () =>
+ findButton()
+ .findAllComponents(GlIcon)
+ .wrappers.map((x) => x.props('name'));
+
+ beforeEach(() => {
+ listener = jest.fn();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders button href and text', () => {
+ const button = findButton();
+
+ expect(button.attributes('href')).toBe(TEST_MENU_ITEM.href);
+ expect(button.text()).toBe(TEST_MENU_ITEM.title);
+ });
+
+ it('passes listeners to button', () => {
+ expect(listener).not.toHaveBeenCalled();
+
+ findButton().vm.$emit('click', 'TEST');
+
+ expect(listener).toHaveBeenCalledWith('TEST');
+ });
+ });
+
+ describe.each`
+ desc | menuItem | expectedIcons
+ ${'default'} | ${TEST_MENU_ITEM} | ${[TEST_MENU_ITEM.icon, 'chevron-right']}
+ ${'with no icon'} | ${{ ...TEST_MENU_ITEM, icon: null }} | ${['chevron-right']}
+ ${'with no view'} | ${{ ...TEST_MENU_ITEM, view: null }} | ${[TEST_MENU_ITEM.icon]}
+ ${'with no icon or view'} | ${{ ...TEST_MENU_ITEM, view: null, icon: null }} | ${[]}
+ `('$desc', ({ menuItem, expectedIcons }) => {
+ beforeEach(() => {
+ createComponent({ menuItem });
+ });
+
+ it(`renders expected icons ${JSON.stringify(expectedIcons)}`, () => {
+ expect(findButtonIcons()).toEqual(expectedIcons);
+ });
+ });
+});
diff --git a/spec/frontend/nav/mock_data.js b/spec/frontend/nav/mock_data.js
new file mode 100644
index 00000000000..2987d8deb16
--- /dev/null
+++ b/spec/frontend/nav/mock_data.js
@@ -0,0 +1,35 @@
+import { range } from 'lodash';
+
+export const TEST_NAV_DATA = {
+ activeTitle: 'Test Active Title',
+ primary: [
+ ...['projects', 'groups'].map((view) => ({
+ id: view,
+ href: null,
+ title: view,
+ view,
+ })),
+ ...range(0, 2).map((idx) => ({
+ id: `primary-link-${idx}`,
+ href: `/path/to/primary/${idx}`,
+ title: `Title ${idx}`,
+ })),
+ ],
+ secondary: range(0, 2).map((idx) => ({
+ id: `secondary-link-${idx}`,
+ href: `/path/to/secondary/${idx}`,
+ title: `SecTitle ${idx}`,
+ })),
+ views: {
+ projects: {
+ namespace: 'projects',
+ currentUserName: '',
+ currentItem: {},
+ },
+ groups: {
+ namespace: 'groups',
+ currentUserName: '',
+ currentItem: {},
+ },
+ },
+};
diff --git a/spec/frontend/notebook/cells/markdown_spec.js b/spec/frontend/notebook/cells/markdown_spec.js
index 219d74595bd..d250ffed1a9 100644
--- a/spec/frontend/notebook/cells/markdown_spec.js
+++ b/spec/frontend/notebook/cells/markdown_spec.js
@@ -27,7 +27,7 @@ describe('Markdown component', () => {
return vm.$nextTick();
});
- it('does not render promot', () => {
+ it('does not render prompt', () => {
expect(vm.$el.querySelector('.prompt span')).toBeNull();
});
@@ -50,6 +50,41 @@ describe('Markdown component', () => {
expect(vm.$el.querySelector('a').getAttribute('href')).toBeNull();
});
+ describe('tables', () => {
+ beforeEach(() => {
+ json = getJSONFixture('blob/notebook/markdown-table.json');
+ });
+
+ it('renders images and text', () => {
+ vm = new Component({
+ propsData: {
+ cell: json.cells[0],
+ },
+ }).$mount();
+
+ return vm.$nextTick().then(() => {
+ const images = vm.$el.querySelectorAll('img');
+ expect(images.length).toBe(5);
+
+ const columns = vm.$el.querySelectorAll('td');
+ expect(columns.length).toBe(6);
+
+ expect(columns[0].textContent).toEqual('Hello ');
+ expect(columns[1].textContent).toEqual('Test ');
+ expect(columns[2].textContent).toEqual('World ');
+ expect(columns[3].textContent).toEqual('Fake ');
+ expect(columns[4].textContent).toEqual('External image: ');
+ expect(columns[5].textContent).toEqual('Empty');
+
+ expect(columns[0].innerHTML).toContain('<img src="data:image/jpeg;base64');
+ expect(columns[1].innerHTML).toContain('<img src="data:image/png;base64');
+ expect(columns[2].innerHTML).toContain('<img src="data:image/jpeg;base64');
+ expect(columns[3].innerHTML).toContain('<img>');
+ expect(columns[4].innerHTML).toContain('<img src="https://www.google.com/');
+ });
+ });
+ });
+
describe('katex', () => {
beforeEach(() => {
json = getJSONFixture('blob/notebook/math.json');
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index b717bab7c3f..b140eea9439 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -437,6 +437,7 @@ describe('issue_comment_form component', () => {
await findCloseReopenButton().trigger('click');
await wrapper.vm.$nextTick;
+ await wrapper.vm.$nextTick;
expect(flash).toHaveBeenCalledWith(
`Something went wrong while closing the ${type}. Please try again later.`,
@@ -472,6 +473,7 @@ describe('issue_comment_form component', () => {
await findCloseReopenButton().trigger('click');
await wrapper.vm.$nextTick;
+ await wrapper.vm.$nextTick;
expect(flash).toHaveBeenCalledWith(
`Something went wrong while reopening the ${type}. Please try again later.`,
@@ -489,6 +491,8 @@ describe('issue_comment_form component', () => {
await findCloseReopenButton().trigger('click');
+ await wrapper.vm.$nextTick();
+
expect(refreshUserMergeRequestCounts).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/notes/old_notes_spec.js b/spec/frontend/notes/old_notes_spec.js
index 432b660c4b3..0cf43b8fd97 100644
--- a/spec/frontend/notes/old_notes_spec.js
+++ b/spec/frontend/notes/old_notes_spec.js
@@ -28,7 +28,7 @@ window.gl = window.gl || {};
gl.utils = gl.utils || {};
gl.utils.disableButtonIfEmptyField = () => {};
-// the following test is unreliable and failing in master 2-3 times a day
+// the following test is unreliable and failing in main 2-3 times a day
// see https://gitlab.com/gitlab-org/gitlab/issues/206906#note_290602581
// eslint-disable-next-line jest/no-disabled-tests
describe.skip('Old Notes (~/notes.js)', () => {
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index f972ff0d2e4..9b7456d54bc 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -253,85 +253,6 @@ describe('Actions Notes Store', () => {
});
});
- describe('fetchData', () => {
- describe('given there are no notes', () => {
- const lastFetchedAt = '13579';
-
- beforeEach(() => {
- axiosMock
- .onGet(notesDataMock.notesPath)
- .replyOnce(200, { notes: [], last_fetched_at: lastFetchedAt });
- });
-
- it('should commit SET_LAST_FETCHED_AT', () =>
- testAction(
- actions.fetchData,
- undefined,
- { notesData: notesDataMock },
- [{ type: 'SET_LAST_FETCHED_AT', payload: lastFetchedAt }],
- [],
- ));
- });
-
- describe('given there are notes', () => {
- const lastFetchedAt = '12358';
-
- beforeEach(() => {
- axiosMock
- .onGet(notesDataMock.notesPath)
- .replyOnce(200, { notes: discussionMock.notes, last_fetched_at: lastFetchedAt });
- });
-
- it('should dispatch updateOrCreateNotes, startTaskList and commit SET_LAST_FETCHED_AT', () =>
- testAction(
- actions.fetchData,
- undefined,
- { notesData: notesDataMock },
- [{ type: 'SET_LAST_FETCHED_AT', payload: lastFetchedAt }],
- [
- { type: 'updateOrCreateNotes', payload: discussionMock.notes },
- { type: 'startTaskList' },
- { type: 'updateResolvableDiscussionsCounts' },
- ],
- ));
- });
-
- describe('paginated notes feature flag enabled', () => {
- const lastFetchedAt = '12358';
-
- beforeEach(() => {
- window.gon = { features: { paginatedNotes: true } };
-
- axiosMock.onGet(notesDataMock.notesPath).replyOnce(200, {
- notes: discussionMock.notes,
- more: false,
- last_fetched_at: lastFetchedAt,
- });
- });
-
- afterEach(() => {
- window.gon = null;
- });
-
- it('should dispatch setFetchingState, setNotesFetchedState, setLoadingState, updateOrCreateNotes, startTaskList and commit SET_LAST_FETCHED_AT', () => {
- return testAction(
- actions.fetchData,
- null,
- { notesData: notesDataMock, isFetching: true },
- [{ type: 'SET_LAST_FETCHED_AT', payload: lastFetchedAt }],
- [
- { type: 'setFetchingState', payload: false },
- { type: 'setNotesFetchedState', payload: true },
- { type: 'setLoadingState', payload: false },
- { type: 'updateOrCreateNotes', payload: discussionMock.notes },
- { type: 'startTaskList' },
- { type: 'updateResolvableDiscussionsCounts' },
- ],
- );
- });
- });
- });
-
describe('poll', () => {
beforeEach((done) => {
axiosMock
diff --git a/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
index a6bb9e868ee..8a2793c0010 100644
--- a/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
+++ b/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
@@ -1,16 +1,16 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`MavenInstallation gradle renders all the messages 1`] = `
+exports[`MavenInstallation groovy renders all the messages 1`] = `
<div>
<installation-title-stub
- options="[object Object],[object Object]"
+ options="[object Object],[object Object],[object Object]"
packagetype="maven"
/>
<code-instruction-stub
class="gl-mb-5"
copytext="Copy Gradle Groovy DSL install command"
- instruction="foo/gradle/install"
+ instruction="foo/gradle/groovy/install"
label="Gradle Groovy DSL install command"
trackingaction="copy_gradle_install_command"
trackinglabel="code_instruction"
@@ -18,7 +18,7 @@ exports[`MavenInstallation gradle renders all the messages 1`] = `
<code-instruction-stub
copytext="Copy add Gradle Groovy DSL repository command"
- instruction="foo/gradle/add/source"
+ instruction="foo/gradle/groovy/add/source"
label="Add Gradle Groovy DSL repository command"
multiline="true"
trackingaction="copy_gradle_add_to_source_command"
@@ -27,10 +27,37 @@ exports[`MavenInstallation gradle renders all the messages 1`] = `
</div>
`;
+exports[`MavenInstallation kotlin renders all the messages 1`] = `
+<div>
+ <installation-title-stub
+ options="[object Object],[object Object],[object Object]"
+ packagetype="maven"
+ />
+
+ <code-instruction-stub
+ class="gl-mb-5"
+ copytext="Copy Gradle Kotlin DSL install command"
+ instruction="foo/gradle/kotlin/install"
+ label="Gradle Kotlin DSL install command"
+ trackingaction="copy_kotlin_install_command"
+ trackinglabel="code_instruction"
+ />
+
+ <code-instruction-stub
+ copytext="Copy add Gradle Kotlin DSL repository command"
+ instruction="foo/gradle/kotlin/add/source"
+ label="Add Gradle Kotlin DSL repository command"
+ multiline="true"
+ trackingaction="copy_kotlin_add_to_source_command"
+ trackinglabel="code_instruction"
+ />
+</div>
+`;
+
exports[`MavenInstallation maven renders all the messages 1`] = `
<div>
<installation-title-stub
- options="[object Object],[object Object]"
+ options="[object Object],[object Object],[object Object]"
packagetype="maven"
/>
diff --git a/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
index 6903d342d6a..015c7b94dde 100644
--- a/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
+++ b/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
@@ -3,26 +3,18 @@
exports[`NpmInstallation renders all the messages 1`] = `
<div>
<installation-title-stub
- options="[object Object]"
+ options="[object Object],[object Object]"
packagetype="npm"
/>
<code-instruction-stub
copytext="Copy npm command"
instruction="npm i @Test/package"
- label="npm command"
+ label=""
trackingaction="copy_npm_install_command"
trackinglabel="code_instruction"
/>
- <code-instruction-stub
- copytext="Copy yarn command"
- instruction="yarn add @Test/package"
- label="yarn command"
- trackingaction="copy_yarn_install_command"
- trackinglabel="code_instruction"
- />
-
<h3
class="gl-font-lg"
>
@@ -32,19 +24,11 @@ exports[`NpmInstallation renders all the messages 1`] = `
<code-instruction-stub
copytext="Copy npm setup command"
instruction="echo @Test:registry=undefined/ >> .npmrc"
- label="npm command"
+ label=""
trackingaction="copy_npm_setup_command"
trackinglabel="code_instruction"
/>
- <code-instruction-stub
- copytext="Copy yarn setup command"
- instruction="echo \\\\\\"@Test:registry\\\\\\" \\\\\\"undefined/\\\\\\" >> .yarnrc"
- label="yarn command"
- trackingaction="copy_yarn_setup_command"
- trackinglabel="code_instruction"
- />
-
<gl-sprintf-stub
message="You may also need to setup authentication using an auth token. %{linkStart}See the documentation%{linkEnd} to find out more."
/>
diff --git a/spec/frontend/packages/details/components/maven_installation_spec.js b/spec/frontend/packages/details/components/maven_installation_spec.js
index d49a7c0b561..4972fe70a3d 100644
--- a/spec/frontend/packages/details/components/maven_installation_spec.js
+++ b/spec/frontend/packages/details/components/maven_installation_spec.js
@@ -17,8 +17,10 @@ describe('MavenInstallation', () => {
const xmlCodeBlock = 'foo/xml';
const mavenCommandStr = 'foo/command';
const mavenSetupXml = 'foo/setup';
- const gradleGroovyInstallCommandText = 'foo/gradle/install';
- const gradleGroovyAddSourceCommandText = 'foo/gradle/add/source';
+ const gradleGroovyInstallCommandText = 'foo/gradle/groovy/install';
+ const gradleGroovyAddSourceCommandText = 'foo/gradle/groovy/add/source';
+ const gradleKotlinInstallCommandText = 'foo/gradle/kotlin/install';
+ const gradleKotlinAddSourceCommandText = 'foo/gradle/kotlin/add/source';
const store = new Vuex.Store({
state: {
@@ -31,6 +33,8 @@ describe('MavenInstallation', () => {
mavenSetupXml: () => mavenSetupXml,
gradleGroovyInstalCommand: () => gradleGroovyInstallCommandText,
gradleGroovyAddSourceCommand: () => gradleGroovyAddSourceCommandText,
+ gradleKotlinInstalCommand: () => gradleKotlinInstallCommandText,
+ gradleKotlinAddSourceCommand: () => gradleKotlinAddSourceCommandText,
},
});
@@ -59,8 +63,9 @@ describe('MavenInstallation', () => {
expect(findInstallationTitle().props()).toMatchObject({
packageType: 'maven',
options: [
- { value: 'maven', label: 'Show Maven commands' },
- { value: 'groovy', label: 'Show Gradle Groovy DSL commands' },
+ { value: 'maven', label: 'Maven XML' },
+ { value: 'groovy', label: 'Gradle Groovy DSL' },
+ { value: 'kotlin', label: 'Gradle Kotlin DSL' },
],
});
});
@@ -117,9 +122,9 @@ describe('MavenInstallation', () => {
});
});
- describe('gradle', () => {
+ describe('groovy', () => {
beforeEach(() => {
- createComponent({ data: { instructionType: 'gradle' } });
+ createComponent({ data: { instructionType: 'groovy' } });
});
it('renders all the messages', () => {
@@ -146,4 +151,34 @@ describe('MavenInstallation', () => {
});
});
});
+
+ describe('kotlin', () => {
+ beforeEach(() => {
+ createComponent({ data: { instructionType: 'kotlin' } });
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the gradle install command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: gradleKotlinInstallCommandText,
+ multiline: false,
+ trackingAction: TrackingActions.COPY_KOTLIN_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct gradle command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
+ instruction: gradleKotlinAddSourceCommandText,
+ multiline: true,
+ trackingAction: TrackingActions.COPY_KOTLIN_ADD_TO_SOURCE_COMMAND,
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/packages/details/components/npm_installation_spec.js b/spec/frontend/packages/details/components/npm_installation_spec.js
index 09afcd4fd0a..1c49110bdf8 100644
--- a/spec/frontend/packages/details/components/npm_installation_spec.js
+++ b/spec/frontend/packages/details/components/npm_installation_spec.js
@@ -1,4 +1,5 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
import Vuex from 'vuex';
import { registryUrl as nugetPath } from 'jest/packages/details/mock_data';
import { npmPackage as packageEntity } from 'jest/packages/mock_data';
@@ -14,10 +15,13 @@ localVue.use(Vuex);
describe('NpmInstallation', () => {
let wrapper;
+ const npmInstallationCommandLabel = 'npm i @Test/package';
+ const yarnInstallationCommandLabel = 'yarn add @Test/package';
+
const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
- function createComponent() {
+ function createComponent({ data = {} } = {}) {
const store = new Vuex.Store({
state: {
packageEntity,
@@ -32,6 +36,9 @@ describe('NpmInstallation', () => {
wrapper = shallowMount(NpmInstallation, {
localVue,
store,
+ data() {
+ return data;
+ },
});
}
@@ -52,40 +59,61 @@ describe('NpmInstallation', () => {
expect(findInstallationTitle().exists()).toBe(true);
expect(findInstallationTitle().props()).toMatchObject({
packageType: 'npm',
- options: [{ value: 'npm', label: 'Show NPM commands' }],
+ options: [
+ { value: 'npm', label: 'Show NPM commands' },
+ { value: 'yarn', label: 'Show Yarn commands' },
+ ],
});
});
+
+ it('on change event updates the instructions to show', async () => {
+ createComponent();
+
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(npmInstallationCommandLabel);
+ findInstallationTitle().vm.$emit('change', 'yarn');
+
+ await nextTick();
+
+ expect(findCodeInstructions().at(0).props('instruction')).toBe(yarnInstallationCommandLabel);
+ });
});
- describe('installation commands', () => {
- it('renders the correct npm command', () => {
+ describe('npm', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+ it('renders the correct installation command', () => {
expect(findCodeInstructions().at(0).props()).toMatchObject({
- instruction: 'npm i @Test/package',
+ instruction: npmInstallationCommandLabel,
multiline: false,
trackingAction: TrackingActions.COPY_NPM_INSTALL_COMMAND,
});
});
- it('renders the correct yarn command', () => {
+ it('renders the correct setup command', () => {
expect(findCodeInstructions().at(1).props()).toMatchObject({
- instruction: 'yarn add @Test/package',
+ instruction: 'echo @Test:registry=undefined/ >> .npmrc',
multiline: false,
- trackingAction: TrackingActions.COPY_YARN_INSTALL_COMMAND,
+ trackingAction: TrackingActions.COPY_NPM_SETUP_COMMAND,
});
});
});
- describe('setup commands', () => {
- it('renders the correct npm command', () => {
- expect(findCodeInstructions().at(2).props()).toMatchObject({
- instruction: 'echo @Test:registry=undefined/ >> .npmrc',
+ describe('yarn', () => {
+ beforeEach(() => {
+ createComponent({ data: { instructionType: 'yarn' } });
+ });
+
+ it('renders the correct setup command', () => {
+ expect(findCodeInstructions().at(0).props()).toMatchObject({
+ instruction: yarnInstallationCommandLabel,
multiline: false,
- trackingAction: TrackingActions.COPY_NPM_SETUP_COMMAND,
+ trackingAction: TrackingActions.COPY_YARN_INSTALL_COMMAND,
});
});
- it('renders the correct yarn command', () => {
- expect(findCodeInstructions().at(3).props()).toMatchObject({
+ it('renders the correct registry command', () => {
+ expect(findCodeInstructions().at(1).props()).toMatchObject({
instruction: 'echo \\"@Test:registry\\" \\"undefined/\\" >> .yarnrc',
multiline: false,
trackingAction: TrackingActions.COPY_YARN_SETUP_COMMAND,
diff --git a/spec/frontend/packages/details/components/package_files_spec.js b/spec/frontend/packages/details/components/package_files_spec.js
index b4e62bac8a3..bcf1b6d56f0 100644
--- a/spec/frontend/packages/details/components/package_files_spec.js
+++ b/spec/frontend/packages/details/components/package_files_spec.js
@@ -11,8 +11,10 @@ describe('Package Files', () => {
const findAllRows = () => wrapper.findAll('[data-testid="file-row"');
const findFirstRow = () => findAllRows().at(0);
+ const findSecondRow = () => findAllRows().at(1);
const findFirstRowDownloadLink = () => findFirstRow().find('[data-testid="download-link"');
const findFirstRowCommitLink = () => findFirstRow().find('[data-testid="commit-link"');
+ const findSecondRowCommitLink = () => findSecondRow().find('[data-testid="commit-link"');
const findFirstRowFileIcon = () => findFirstRow().find(FileIcon);
const findFirstRowCreatedAt = () => findFirstRow().find(TimeAgoTooltip);
@@ -126,5 +128,14 @@ describe('Package Files', () => {
expect(findFirstRowCommitLink().exists()).toBe(false);
});
});
+
+ describe('when only one file lacks an associated pipeline', () => {
+ it('renders the commit when it exists and not otherwise', () => {
+ createComponent([npmFiles[0], mavenFiles[0]]);
+
+ expect(findFirstRowCommitLink().exists()).toBe(true);
+ expect(findSecondRowCommitLink().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
index 005adece56e..8210511bf8f 100644
--- a/spec/frontend/packages/details/store/getters_spec.js
+++ b/spec/frontend/packages/details/store/getters_spec.js
@@ -19,6 +19,8 @@ import {
groupExists,
gradleGroovyInstalCommand,
gradleGroovyAddSourceCommand,
+ gradleKotlinInstalCommand,
+ gradleKotlinAddSourceCommand,
} from '~/packages/details/store/getters';
import {
conanPackage,
@@ -259,6 +261,24 @@ describe('Getters PackageDetails Store', () => {
});
});
+ describe('gradle kotlin string getters', () => {
+ it('gets the correct gradleKotlinInstalCommand', () => {
+ setupState();
+
+ expect(gradleKotlinInstalCommand(state)).toMatchInlineSnapshot(
+ `"implementation(\\"com.test.app:test-app:1.0-SNAPSHOT\\")"`,
+ );
+ });
+
+ it('gets the correct gradleKotlinAddSourceCommand', () => {
+ setupState();
+
+ expect(gradleKotlinAddSourceCommand(state)).toMatchInlineSnapshot(
+ `"maven(\\"foo/registry\\")"`,
+ );
+ });
+ });
+
describe('check if group', () => {
it('is set', () => {
setupState({ groupListUrl: '/groups/composer/-/packages' });
diff --git a/spec/frontend/packages/list/stores/actions_spec.js b/spec/frontend/packages/list/stores/actions_spec.js
index b5b0177eb4e..52966c1be5e 100644
--- a/spec/frontend/packages/list/stores/actions_spec.js
+++ b/spec/frontend/packages/list/stores/actions_spec.js
@@ -121,6 +121,32 @@ describe('Actions Package list store', () => {
},
);
});
+
+ it('should force the terraform_module type when forceTerraform is true', (done) => {
+ testAction(
+ actions.requestPackagesList,
+ undefined,
+ { config: { isGroupPage: false, resourceId: 1, forceTerraform: true }, sorting, filter },
+ [],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'receivePackagesListSuccess', payload: { data: 'foo', headers } },
+ { type: 'setLoading', payload: false },
+ ],
+ () => {
+ expect(Api.projectPackages).toHaveBeenCalledWith(1, {
+ params: {
+ page: 1,
+ per_page: 20,
+ sort: sorting.sort,
+ order_by: sorting.orderBy,
+ package_type: 'terraform_module',
+ },
+ });
+ done();
+ },
+ );
+ });
});
describe('receivePackagesListSuccess', () => {
diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
index 03b98478f3e..f4e617ecafe 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
@@ -34,6 +34,8 @@ exports[`packages_list_row renders 1`] = `
</gl-link-stub>
<!---->
+
+ <!---->
</div>
<!---->
diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages/shared/components/package_list_row_spec.js
index fd54cd0f25d..bd15d48c4eb 100644
--- a/spec/frontend/packages/shared/components/package_list_row_spec.js
+++ b/spec/frontend/packages/shared/components/package_list_row_spec.js
@@ -1,8 +1,11 @@
+import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
import PackagePath from '~/packages/shared/components/package_path.vue';
import PackageTags from '~/packages/shared/components/package_tags.vue';
+import { PACKAGE_ERROR_STATUS } from '~/packages/shared/constants';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
import { packageList } from '../../mock_data';
@@ -20,7 +23,10 @@ describe('packages_list_row', () => {
const findPackagePath = () => wrapper.find(PackagePath);
const findDeleteButton = () => wrapper.find('[data-testid="action-delete"]');
const findPackageIconAndName = () => wrapper.find(PackageIconAndName);
- const findInfrastructureIconAndName = () => wrapper.find(InfrastructureIconAndName);
+ const findInfrastructureIconAndName = () => wrapper.findComponent(InfrastructureIconAndName);
+ const findListItem = () => wrapper.findComponent(ListItem);
+ const findPackageLink = () => wrapper.findComponent(GlLink);
+ const findWarningIcon = () => wrapper.find('[data-testid="warning-icon"]');
const mountComponent = ({
isGroup = false,
@@ -44,6 +50,9 @@ describe('packages_list_row', () => {
showPackageType,
disableDelete,
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
});
};
@@ -146,4 +155,31 @@ describe('packages_list_row', () => {
expect(findInfrastructureIconAndName().exists()).toBe(true);
});
});
+
+ describe(`when the package is in ${PACKAGE_ERROR_STATUS} status`, () => {
+ beforeEach(() => {
+ mountComponent({ packageEntity: { ...packageWithoutTags, status: PACKAGE_ERROR_STATUS } });
+ });
+
+ it('list item has a disabled prop', () => {
+ expect(findListItem().props('disabled')).toBe(true);
+ });
+
+ it('details link is disabled', () => {
+ expect(findPackageLink().attributes('disabled')).toBe('true');
+ });
+
+ it('has a warning icon', () => {
+ const icon = findWarningIcon();
+ const tooltip = getBinding(icon.element, 'gl-tooltip');
+ expect(icon.props('icon')).toBe('warning');
+ expect(tooltip.value).toMatchObject({
+ title: 'Invalid Package: failed metadata extraction',
+ });
+ });
+
+ it('delete button is disabled', () => {
+ expect(findDeleteButton().props('disabled')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/packages/shared/components/package_path_spec.js b/spec/frontend/packages/shared/components/package_path_spec.js
index 3c9cd3387ba..edbdd55c1d7 100644
--- a/spec/frontend/packages/shared/components/package_path_spec.js
+++ b/spec/frontend/packages/shared/components/package_path_spec.js
@@ -39,48 +39,66 @@ describe('PackagePath', () => {
const pathPieces = path.split('/').slice(1);
const hasTooltip = shouldExist.includes(ELLIPSIS_ICON);
- beforeEach(() => {
- mountComponent({ path });
- });
+ describe('not disabled component', () => {
+ beforeEach(() => {
+ mountComponent({ path });
+ });
- it('should have a base icon', () => {
- expect(findItem(BASE_ICON).exists()).toBe(true);
- });
+ it('should have a base icon', () => {
+ expect(findItem(BASE_ICON).exists()).toBe(true);
+ });
- it('should have a root link', () => {
- const root = findItem(ROOT_LINK);
- expect(root.exists()).toBe(true);
- expect(root.attributes('href')).toBe(rootUrl);
- });
+ it('should have a root link', () => {
+ const root = findItem(ROOT_LINK);
+ expect(root.exists()).toBe(true);
+ expect(root.attributes('href')).toBe(rootUrl);
+ });
- if (hasTooltip) {
- it('should have a tooltip', () => {
- const tooltip = findTooltip(findItem(ELLIPSIS_ICON));
- expect(tooltip).toBeDefined();
- expect(tooltip.value).toMatchObject({
- title: path,
+ if (hasTooltip) {
+ it('should have a tooltip', () => {
+ const tooltip = findTooltip(findItem(ELLIPSIS_ICON));
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toMatchObject({
+ title: path,
+ });
});
- });
- }
+ }
- if (shouldExist.length) {
- it.each(shouldExist)(`should have %s`, (element) => {
- expect(findItem(element).exists()).toBe(true);
- });
- }
+ if (shouldExist.length) {
+ it.each(shouldExist)(`should have %s`, (element) => {
+ expect(findItem(element).exists()).toBe(true);
+ });
+ }
- if (shouldNotExist.length) {
- it.each(shouldNotExist)(`should not have %s`, (element) => {
- expect(findItem(element).exists()).toBe(false);
+ if (shouldNotExist.length) {
+ it.each(shouldNotExist)(`should not have %s`, (element) => {
+ expect(findItem(element).exists()).toBe(false);
+ });
+ }
+
+ if (shouldExist.includes(LEAF_LINK)) {
+ it('the last link should be the last piece of the path', () => {
+ const leaf = findItem(LEAF_LINK);
+ expect(leaf.attributes('href')).toBe(`/${path}`);
+ expect(leaf.text()).toBe(pathPieces[pathPieces.length - 1]);
+ });
+ }
+ });
+
+ describe('disabled component', () => {
+ beforeEach(() => {
+ mountComponent({ path, disabled: true });
});
- }
- if (shouldExist.includes(LEAF_LINK)) {
- it('the last link should be the last piece of the path', () => {
- const leaf = findItem(LEAF_LINK);
- expect(leaf.attributes('href')).toBe(`/${path}`);
- expect(leaf.text()).toBe(pathPieces[pathPieces.length - 1]);
+ it('root link is disabled', () => {
+ expect(findItem(ROOT_LINK).attributes('disabled')).toBe('true');
});
- }
+
+ if (shouldExist.includes(LEAF_LINK)) {
+ it('the last link is disabled', () => {
+ expect(findItem(LEAF_LINK).attributes('disabled')).toBe('true');
+ });
+ }
+ });
});
});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap b/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap
new file mode 100644
index 00000000000..f2087733d2b
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/__snapshots__/settings_titles_spec.js.snap
@@ -0,0 +1,18 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`settings_titles renders properly 1`] = `
+<div>
+ <h5
+ class="gl-border-b-solid gl-border-b-1 gl-border-gray-200"
+ >
+
+ foo
+
+ </h5>
+
+ <p>
+ bar
+ </p>
+
+</div>
+`;
diff --git a/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
new file mode 100644
index 00000000000..0bbb1ce3436
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/duplicates_settings_spec.js
@@ -0,0 +1,146 @@
+import { GlSprintf, GlToggle, GlFormGroup, GlFormInput } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import component from '~/packages_and_registries/settings/group/components/duplicates_settings.vue';
+
+import {
+ DUPLICATES_TOGGLE_LABEL,
+ DUPLICATES_ALLOWED_ENABLED,
+ DUPLICATES_ALLOWED_DISABLED,
+ DUPLICATES_SETTING_EXCEPTION_TITLE,
+ DUPLICATES_SETTINGS_EXCEPTION_LEGEND,
+} from '~/packages_and_registries/settings/group/constants';
+
+describe('Duplicates Settings', () => {
+ let wrapper;
+
+ const defaultProps = {
+ duplicatesAllowed: false,
+ duplicateExceptionRegex: 'foo',
+ modelNames: {
+ allowed: 'allowedModel',
+ exception: 'exceptionModel',
+ },
+ };
+
+ const mountComponent = (propsData = defaultProps) => {
+ wrapper = shallowMount(component, {
+ propsData,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findToggle = () => wrapper.findComponent(GlToggle);
+ const findToggleLabel = () => wrapper.find('[data-testid="toggle-label"');
+
+ const findInputGroup = () => wrapper.findComponent(GlFormGroup);
+ const findInput = () => wrapper.findComponent(GlFormInput);
+
+ it('has a toggle', () => {
+ mountComponent();
+
+ expect(findToggle().exists()).toBe(true);
+ expect(findToggle().props()).toMatchObject({
+ label: DUPLICATES_TOGGLE_LABEL,
+ value: defaultProps.duplicatesAllowed,
+ });
+ });
+
+ it('toggle emits an update event', () => {
+ mountComponent();
+
+ findToggle().vm.$emit('change', false);
+
+ expect(wrapper.emitted('update')).toStrictEqual([
+ [{ [defaultProps.modelNames.allowed]: false }],
+ ]);
+ });
+
+ describe('when the duplicates are disabled', () => {
+ it('the toggle has the disabled message', () => {
+ mountComponent();
+
+ expect(findToggleLabel().exists()).toBe(true);
+ expect(findToggleLabel().text()).toMatchInterpolatedText(DUPLICATES_ALLOWED_DISABLED);
+ });
+
+ it('shows a form group with an input field', () => {
+ mountComponent();
+
+ expect(findInputGroup().exists()).toBe(true);
+
+ expect(findInputGroup().attributes()).toMatchObject({
+ 'label-for': 'maven-duplicated-settings-regex-input',
+ label: DUPLICATES_SETTING_EXCEPTION_TITLE,
+ description: DUPLICATES_SETTINGS_EXCEPTION_LEGEND,
+ });
+ });
+
+ it('shows an input field', () => {
+ mountComponent();
+
+ expect(findInput().exists()).toBe(true);
+
+ expect(findInput().attributes()).toMatchObject({
+ id: 'maven-duplicated-settings-regex-input',
+ value: defaultProps.duplicateExceptionRegex,
+ });
+ });
+
+ it('input change event emits an update event', () => {
+ mountComponent();
+
+ findInput().vm.$emit('change', 'bar');
+
+ expect(wrapper.emitted('update')).toStrictEqual([
+ [{ [defaultProps.modelNames.exception]: 'bar' }],
+ ]);
+ });
+
+ describe('valid state', () => {
+ it('form group has correct props', () => {
+ mountComponent();
+
+ expect(findInputGroup().attributes()).toMatchObject({
+ state: 'true',
+ 'invalid-feedback': '',
+ });
+ });
+ });
+
+ describe('invalid state', () => {
+ it('form group has correct props', () => {
+ const propsWithError = {
+ ...defaultProps,
+ duplicateExceptionRegexError: 'some error string',
+ };
+
+ mountComponent(propsWithError);
+
+ expect(findInputGroup().attributes()).toMatchObject({
+ 'invalid-feedback': propsWithError.duplicateExceptionRegexError,
+ });
+ });
+ });
+ });
+
+ describe('when the duplicates are enabled', () => {
+ it('has the correct toggle label', () => {
+ mountComponent({ ...defaultProps, duplicatesAllowed: true });
+
+ expect(findToggleLabel().exists()).toBe(true);
+ expect(findToggleLabel().text()).toMatchInterpolatedText(DUPLICATES_ALLOWED_ENABLED);
+ });
+
+ it('hides the form input group', () => {
+ mountComponent({ ...defaultProps, duplicatesAllowed: true });
+
+ expect(findInputGroup().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/generic_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/generic_settings_spec.js
new file mode 100644
index 00000000000..4eafeedd55e
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/generic_settings_spec.js
@@ -0,0 +1,54 @@
+import { shallowMount } from '@vue/test-utils';
+import GenericSettings from '~/packages_and_registries/settings/group/components/generic_settings.vue';
+import SettingsTitles from '~/packages_and_registries/settings/group/components/settings_titles.vue';
+
+describe('generic_settings', () => {
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMount(GenericSettings, {
+ scopedSlots: {
+ default: '<div data-testid="default-slot">{{props.modelNames}}</div>',
+ },
+ });
+ };
+
+ const findSettingsTitle = () => wrapper.findComponent(SettingsTitles);
+ const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('title component', () => {
+ it('has a title component', () => {
+ mountComponent();
+
+ expect(findSettingsTitle().exists()).toBe(true);
+ });
+
+ it('passes the correct props', () => {
+ mountComponent();
+
+ expect(findSettingsTitle().props()).toMatchObject({
+ title: 'Generic',
+ subTitle: 'Settings for Generic packages',
+ });
+ });
+ });
+
+ describe('default slot', () => {
+ it('accept a default slots', () => {
+ mountComponent();
+
+ expect(findDefaultSlot().exists()).toBe(true);
+ });
+
+ it('binds model names', () => {
+ mountComponent();
+
+ expect(findDefaultSlot().text()).toContain('genericDuplicatesAllowed');
+ expect(findDefaultSlot().text()).toContain('genericDuplicateExceptionRegex');
+ });
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
index be0d7114e6e..14ee3f3e3b8 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/group_settings_app_spec.js
@@ -3,6 +3,8 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import DuplicatesSettings from '~/packages_and_registries/settings/group/components/duplicates_settings.vue';
+import GenericSettings from '~/packages_and_registries/settings/group/components/generic_settings.vue';
import component from '~/packages_and_registries/settings/group/components/group_settings_app.vue';
import MavenSettings from '~/packages_and_registries/settings/group/components/maven_settings.vue';
import {
@@ -63,6 +65,8 @@ describe('Group Settings App', () => {
stubs: {
GlSprintf,
SettingsBlock,
+ MavenSettings,
+ GenericSettings,
},
mocks: {
$toast: {
@@ -78,14 +82,17 @@ describe('Group Settings App', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- const findSettingsBlock = () => wrapper.find(SettingsBlock);
+ const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
const findDescription = () => wrapper.find('[data-testid="description"');
- const findLink = () => wrapper.find(GlLink);
- const findMavenSettings = () => wrapper.find(MavenSettings);
- const findAlert = () => wrapper.find(GlAlert);
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findMavenSettings = () => wrapper.findComponent(MavenSettings);
+ const findMavenDuplicatedSettings = () => findMavenSettings().findComponent(DuplicatesSettings);
+ const findGenericSettings = () => wrapper.findComponent(GenericSettings);
+ const findGenericDuplicatedSettings = () =>
+ findGenericSettings().findComponent(DuplicatesSettings);
const waitForApolloQueryAndRender = async () => {
await waitForPromises();
@@ -93,7 +100,7 @@ describe('Group Settings App', () => {
};
const emitSettingsUpdate = (override) => {
- findMavenSettings().vm.$emit('update', {
+ findMavenDuplicatedSettings().vm.$emit('update', {
mavenDuplicateExceptionRegex: ')',
...override,
});
@@ -152,7 +159,7 @@ describe('Group Settings App', () => {
it('assigns duplication allowness and exception props', async () => {
mountComponent();
- expect(findMavenSettings().props('loading')).toBe(true);
+ expect(findMavenDuplicatedSettings().props('loading')).toBe(true);
await waitForApolloQueryAndRender();
@@ -161,10 +168,10 @@ describe('Group Settings App', () => {
mavenDuplicateExceptionRegex,
} = groupPackageSettingsMock.data.group.packageSettings;
- expect(findMavenSettings().props()).toMatchObject({
- mavenDuplicatesAllowed,
- mavenDuplicateExceptionRegex,
- mavenDuplicateExceptionRegexError: '',
+ expect(findMavenDuplicatedSettings().props()).toMatchObject({
+ duplicatesAllowed: mavenDuplicatesAllowed,
+ duplicateExceptionRegex: mavenDuplicateExceptionRegex,
+ duplicateExceptionRegexError: '',
loading: false,
});
});
@@ -183,6 +190,49 @@ describe('Group Settings App', () => {
});
});
+ describe('generic settings', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findGenericSettings().exists()).toBe(true);
+ });
+
+ it('assigns duplication allowness and exception props', async () => {
+ mountComponent();
+
+ expect(findGenericDuplicatedSettings().props('loading')).toBe(true);
+
+ await waitForApolloQueryAndRender();
+
+ const {
+ genericDuplicatesAllowed,
+ genericDuplicateExceptionRegex,
+ } = groupPackageSettingsMock.data.group.packageSettings;
+
+ expect(findGenericDuplicatedSettings().props()).toMatchObject({
+ duplicatesAllowed: genericDuplicatesAllowed,
+ duplicateExceptionRegex: genericDuplicateExceptionRegex,
+ duplicateExceptionRegexError: '',
+ loading: false,
+ });
+ });
+
+ it('on update event calls the mutation', async () => {
+ const mutationResolver = jest.fn().mockResolvedValue(groupPackageSettingsMutationMock());
+ mountComponent({ mutationResolver });
+
+ await waitForApolloQueryAndRender();
+
+ findMavenDuplicatedSettings().vm.$emit('update', {
+ genericDuplicateExceptionRegex: ')',
+ });
+
+ expect(mutationResolver).toHaveBeenCalledWith({
+ input: { genericDuplicateExceptionRegex: ')', namespacePath: 'foo_group_path' },
+ });
+ });
+ });
+
describe('settings update', () => {
describe('success state', () => {
it('shows a success alert', async () => {
@@ -200,26 +250,26 @@ describe('Group Settings App', () => {
});
it('has an optimistic response', async () => {
- const mavenDuplicateExceptionRegex = 'latest[master]something';
+ const mavenDuplicateExceptionRegex = 'latest[main]something';
mountComponent();
await waitForApolloQueryAndRender();
- expect(findMavenSettings().props('mavenDuplicateExceptionRegex')).toBe('');
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe('');
emitSettingsUpdate({ mavenDuplicateExceptionRegex });
// wait for apollo to update the model with the optimistic response
await wrapper.vm.$nextTick();
- expect(findMavenSettings().props('mavenDuplicateExceptionRegex')).toBe(
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe(
mavenDuplicateExceptionRegex,
);
// wait for the call to resolve
await waitForPromises();
- expect(findMavenSettings().props('mavenDuplicateExceptionRegex')).toBe(
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegex')).toBe(
mavenDuplicateExceptionRegex,
);
});
@@ -245,7 +295,7 @@ describe('Group Settings App', () => {
await waitForApolloQueryAndRender();
// errors are bound to the component
- expect(findMavenSettings().props('mavenDuplicateExceptionRegexError')).toBe(
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegexError')).toBe(
groupPackageSettingsMutationErrorMock.errors[0].extensions.problems[0].message,
);
@@ -258,7 +308,7 @@ describe('Group Settings App', () => {
await wrapper.vm.$nextTick();
// errors are reset on mutation call
- expect(findMavenSettings().props('mavenDuplicateExceptionRegexError')).toBe('');
+ expect(findMavenDuplicatedSettings().props('duplicateExceptionRegexError')).toBe('');
});
it.each`
diff --git a/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js
index 859d3587223..22644b97b43 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js
@@ -1,156 +1,54 @@
-import { GlSprintf, GlToggle, GlFormGroup, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import component from '~/packages_and_registries/settings/group/components/maven_settings.vue';
+import MavenSettings from '~/packages_and_registries/settings/group/components/maven_settings.vue';
+import SettingsTitles from '~/packages_and_registries/settings/group/components/settings_titles.vue';
-import {
- MAVEN_TITLE,
- MAVEN_SETTINGS_SUBTITLE,
- MAVEN_DUPLICATES_ALLOWED_DISABLED,
- MAVEN_DUPLICATES_ALLOWED_ENABLED,
- MAVEN_SETTING_EXCEPTION_TITLE,
- MAVEN_SETTINGS_EXCEPTION_LEGEND,
-} from '~/packages_and_registries/settings/group/constants';
-
-describe('Maven Settings', () => {
+describe('maven_settings', () => {
let wrapper;
- const defaultProps = {
- mavenDuplicatesAllowed: false,
- mavenDuplicateExceptionRegex: 'foo',
- };
-
- const mountComponent = (propsData = defaultProps) => {
- wrapper = shallowMount(component, {
- propsData,
- stubs: {
- GlSprintf,
+ const mountComponent = () => {
+ wrapper = shallowMount(MavenSettings, {
+ scopedSlots: {
+ default: '<div data-testid="default-slot">{{props.modelNames}}</div>',
},
});
};
+ const findSettingsTitle = () => wrapper.findComponent(SettingsTitles);
+ const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
+
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- const findTitle = () => wrapper.find('h5');
- const findSubTitle = () => wrapper.find('p');
- const findToggle = () => wrapper.find(GlToggle);
- const findToggleLabel = () => wrapper.find('[data-testid="toggle-label"');
-
- const findInputGroup = () => wrapper.find(GlFormGroup);
- const findInput = () => wrapper.find(GlFormInput);
-
- it('has a title', () => {
- mountComponent();
-
- expect(findTitle().exists()).toBe(true);
- expect(findTitle().text()).toBe(MAVEN_TITLE);
- });
-
- it('has a subtitle', () => {
- mountComponent();
-
- expect(findSubTitle().exists()).toBe(true);
- expect(findSubTitle().text()).toBe(MAVEN_SETTINGS_SUBTITLE);
- });
-
- it('has a toggle', () => {
- mountComponent();
-
- expect(findToggle().exists()).toBe(true);
- expect(findToggle().props()).toMatchObject({
- label: component.i18n.MAVEN_TOGGLE_LABEL,
- value: defaultProps.mavenDuplicatesAllowed,
- });
- });
-
- it('toggle emits an update event', () => {
- mountComponent();
-
- findToggle().vm.$emit('change', false);
-
- expect(wrapper.emitted('update')).toEqual([[{ mavenDuplicatesAllowed: false }]]);
- });
-
- describe('when the duplicates are disabled', () => {
- it('the toggle has the disabled message', () => {
+ describe('title component', () => {
+ it('has a title component', () => {
mountComponent();
- expect(findToggleLabel().exists()).toBe(true);
- expect(findToggleLabel().text()).toMatchInterpolatedText(MAVEN_DUPLICATES_ALLOWED_DISABLED);
+ expect(findSettingsTitle().exists()).toBe(true);
});
- it('shows a form group with an input field', () => {
+ it('passes the correct props', () => {
mountComponent();
- expect(findInputGroup().exists()).toBe(true);
-
- expect(findInputGroup().attributes()).toMatchObject({
- 'label-for': 'maven-duplicated-settings-regex-input',
- label: MAVEN_SETTING_EXCEPTION_TITLE,
- description: MAVEN_SETTINGS_EXCEPTION_LEGEND,
+ expect(findSettingsTitle().props()).toMatchObject({
+ title: 'Maven',
+ subTitle: 'Settings for Maven packages',
});
});
+ });
- it('shows an input field', () => {
+ describe('default slot', () => {
+ it('accept a default slots', () => {
mountComponent();
- expect(findInput().exists()).toBe(true);
-
- expect(findInput().attributes()).toMatchObject({
- id: 'maven-duplicated-settings-regex-input',
- value: defaultProps.mavenDuplicateExceptionRegex,
- });
+ expect(findDefaultSlot().exists()).toBe(true);
});
- it('input change event emits an update event', () => {
+ it('binds model names', () => {
mountComponent();
- findInput().vm.$emit('change', 'bar');
-
- expect(wrapper.emitted('update')).toEqual([[{ mavenDuplicateExceptionRegex: 'bar' }]]);
- });
-
- describe('valid state', () => {
- it('form group has correct props', () => {
- mountComponent();
-
- expect(findInputGroup().attributes()).toMatchObject({
- state: 'true',
- 'invalid-feedback': '',
- });
- });
- });
-
- describe('invalid state', () => {
- it('form group has correct props', () => {
- const propsWithError = {
- ...defaultProps,
- mavenDuplicateExceptionRegexError: 'some error string',
- };
-
- mountComponent(propsWithError);
-
- expect(findInputGroup().attributes()).toMatchObject({
- 'invalid-feedback': propsWithError.mavenDuplicateExceptionRegexError,
- });
- });
- });
- });
-
- describe('when the duplicates are enabled', () => {
- it('has the correct toggle label', () => {
- mountComponent({ ...defaultProps, mavenDuplicatesAllowed: true });
-
- expect(findToggleLabel().exists()).toBe(true);
- expect(findToggleLabel().text()).toMatchInterpolatedText(MAVEN_DUPLICATES_ALLOWED_ENABLED);
- });
-
- it('hides the form input group', () => {
- mountComponent({ ...defaultProps, mavenDuplicatesAllowed: true });
-
- expect(findInputGroup().exists()).toBe(false);
+ expect(findDefaultSlot().text()).toContain('mavenDuplicatesAllowed');
+ expect(findDefaultSlot().text()).toContain('mavenDuplicateExceptionRegex');
});
});
});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js b/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js
new file mode 100644
index 00000000000..a61edad8685
--- /dev/null
+++ b/spec/frontend/packages_and_registries/settings/group/components/settings_titles_spec.js
@@ -0,0 +1,25 @@
+import { shallowMount } from '@vue/test-utils';
+import SettingsTitles from '~/packages_and_registries/settings/group/components/settings_titles.vue';
+
+describe('settings_titles', () => {
+ let wrapper;
+
+ const mountComponent = () => {
+ wrapper = shallowMount(SettingsTitles, {
+ propsData: {
+ title: 'foo',
+ subTitle: 'bar',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders properly', () => {
+ mountComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js b/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js
index e1a46f97318..03133bf1158 100644
--- a/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/graphl/utils/cache_update_spec.js
@@ -9,7 +9,7 @@ describe('Package and Registries settings group cache updates', () => {
updateNamespacePackageSettings: {
packageSettings: {
mavenDuplicatesAllowed: false,
- mavenDuplicateExceptionRegex: 'latest[master]something',
+ mavenDuplicateExceptionRegex: 'latest[main]something',
},
},
},
diff --git a/spec/frontend/packages_and_registries/settings/group/mock_data.js b/spec/frontend/packages_and_registries/settings/group/mock_data.js
index 777c0898de0..65119e288a1 100644
--- a/spec/frontend/packages_and_registries/settings/group/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/group/mock_data.js
@@ -4,6 +4,8 @@ export const groupPackageSettingsMock = {
packageSettings: {
mavenDuplicatesAllowed: true,
mavenDuplicateExceptionRegex: '',
+ genericDuplicatesAllowed: true,
+ genericDuplicateExceptionRegex: '',
},
},
},
@@ -14,7 +16,9 @@ export const groupPackageSettingsMutationMock = (override) => ({
updateNamespacePackageSettings: {
packageSettings: {
mavenDuplicatesAllowed: true,
- mavenDuplicateExceptionRegex: 'latest[master]something',
+ mavenDuplicateExceptionRegex: 'latest[main]something',
+ genericDuplicatesAllowed: true,
+ genericDuplicateExceptionRegex: 'latest[main]somethingGeneric',
},
errors: [],
...override,
@@ -26,20 +30,20 @@ export const groupPackageSettingsMutationErrorMock = {
errors: [
{
message:
- 'Variable $input of type UpdateNamespacePackageSettingsInput! was provided invalid value for mavenDuplicateExceptionRegex (latest[master]somethingj)) is an invalid regexp: unexpected ): latest[master]somethingj)))',
+ 'Variable $input of type UpdateNamespacePackageSettingsInput! was provided invalid value for mavenDuplicateExceptionRegex (latest[main]somethingj)) is an invalid regexp: unexpected ): latest[main]somethingj)))',
locations: [{ line: 1, column: 41 }],
extensions: {
value: {
namespacePath: 'gitlab-org',
- mavenDuplicateExceptionRegex: 'latest[master]something))',
+ mavenDuplicateExceptionRegex: 'latest[main]something))',
},
problems: [
{
path: ['mavenDuplicateExceptionRegex'],
explanation:
- 'latest[master]somethingj)) is an invalid regexp: unexpected ): latest[master]something))',
+ 'latest[main]somethingj)) is an invalid regexp: unexpected ): latest[main]something))',
message:
- 'latest[master]somethingj)) is an invalid regexp: unexpected ): latest[master]something))',
+ 'latest[main]somethingj)) is an invalid regexp: unexpected ): latest[main]something))',
},
],
},
diff --git a/spec/frontend/registry/settings/__snapshots__/utils_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
index 7062773b46b..7062773b46b 100644
--- a/spec/frontend/registry/settings/__snapshots__/utils_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/__snapshots__/utils_spec.js.snap
diff --git a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
index 7a52b4a5d0f..7a52b4a5d0f 100644
--- a/spec/frontend/registry/settings/components/__snapshots__/settings_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/settings_form_spec.js.snap
diff --git a/spec/frontend/registry/settings/components/expiration_dropdown_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js
index f777f7ec9de..c56244a9138 100644
--- a/spec/frontend/registry/settings/components/expiration_dropdown_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_dropdown_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlFormGroup, GlFormSelect } from 'jest/registry/shared/stubs';
-import component from '~/registry/settings/components/expiration_dropdown.vue';
+import component from '~/packages_and_registries/settings/project/components/expiration_dropdown.vue';
describe('ExpirationDropdown', () => {
let wrapper;
diff --git a/spec/frontend/registry/settings/components/expiration_input_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_input_spec.js
index b91599a2789..dd876d1d295 100644
--- a/spec/frontend/registry/settings/components/expiration_input_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_input_spec.js
@@ -1,8 +1,8 @@
import { GlSprintf, GlFormInput, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { GlFormGroup } from 'jest/registry/shared/stubs';
-import component from '~/registry/settings/components/expiration_input.vue';
-import { NAME_REGEX_LENGTH } from '~/registry/settings/constants';
+import component from '~/packages_and_registries/settings/project/components/expiration_input.vue';
+import { NAME_REGEX_LENGTH } from '~/packages_and_registries/settings/project/constants';
describe('ExpirationInput', () => {
let wrapper;
diff --git a/spec/frontend/registry/settings/components/expiration_run_text_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_run_text_spec.js
index 753bb10ad08..854830391c5 100644
--- a/spec/frontend/registry/settings/components/expiration_run_text_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_run_text_spec.js
@@ -1,8 +1,11 @@
import { GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { GlFormGroup } from 'jest/registry/shared/stubs';
-import component from '~/registry/settings/components/expiration_run_text.vue';
-import { NEXT_CLEANUP_LABEL, NOT_SCHEDULED_POLICY_TEXT } from '~/registry/settings/constants';
+import component from '~/packages_and_registries/settings/project/components/expiration_run_text.vue';
+import {
+ NEXT_CLEANUP_LABEL,
+ NOT_SCHEDULED_POLICY_TEXT,
+} from '~/packages_and_registries/settings/project/constants';
describe('ExpirationToggle', () => {
let wrapper;
diff --git a/spec/frontend/registry/settings/components/expiration_toggle_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_toggle_spec.js
index 7598f6adc89..3a3eb089b43 100644
--- a/spec/frontend/registry/settings/components/expiration_toggle_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/expiration_toggle_spec.js
@@ -1,11 +1,11 @@
import { GlToggle, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { GlFormGroup } from 'jest/registry/shared/stubs';
-import component from '~/registry/settings/components/expiration_toggle.vue';
+import component from '~/packages_and_registries/settings/project/components/expiration_toggle.vue';
import {
ENABLED_TOGGLE_DESCRIPTION,
DISABLED_TOGGLE_DESCRIPTION,
-} from '~/registry/settings/constants';
+} from '~/packages_and_registries/settings/project/constants';
describe('ExpirationToggle', () => {
let wrapper;
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
index fd53efa884f..a725941f7f6 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/registry_settings_app_spec.js
@@ -2,14 +2,14 @@ import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import component from '~/registry/settings/components/registry_settings_app.vue';
-import SettingsForm from '~/registry/settings/components/settings_form.vue';
+import component from '~/packages_and_registries/settings/project/components/registry_settings_app.vue';
+import SettingsForm from '~/packages_and_registries/settings/project/components/settings_form.vue';
import {
FETCH_SETTINGS_ERROR_MESSAGE,
UNAVAILABLE_FEATURE_INTRO_TEXT,
UNAVAILABLE_USER_FEATURE_TEXT,
-} from '~/registry/settings/constants';
-import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.query.graphql';
+} from '~/packages_and_registries/settings/project/constants';
+import expirationPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_expiration_policy.query.graphql';
import {
expirationPolicyPayload,
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
index ad94da6ca66..7e5383d7ff1 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/settings_form_spec.js
@@ -2,15 +2,15 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import component from '~/registry/settings/components/settings_form.vue';
+import { GlCard, GlLoadingIcon } from 'jest/registry/shared/stubs';
+import component from '~/packages_and_registries/settings/project/components/settings_form.vue';
import {
UPDATE_SETTINGS_ERROR_MESSAGE,
UPDATE_SETTINGS_SUCCESS_MESSAGE,
-} from '~/registry/settings/constants';
-import updateContainerExpirationPolicyMutation from '~/registry/settings/graphql/mutations/update_container_expiration_policy.mutation.graphql';
-import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.query.graphql';
+} from '~/packages_and_registries/settings/project/constants';
+import updateContainerExpirationPolicyMutation from '~/packages_and_registries/settings/project/graphql/mutations/update_container_expiration_policy.mutation.graphql';
+import expirationPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_expiration_policy.query.graphql';
import Tracking from '~/tracking';
-import { GlCard, GlLoadingIcon } from '../../shared/stubs';
import { expirationPolicyPayload, expirationPolicyMutationPayload } from '../mock_data';
const localVue = createLocalVue();
diff --git a/spec/frontend/registry/settings/graphql/cache_updated_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js
index 73655b6917b..4d6bd65bd93 100644
--- a/spec/frontend/registry/settings/graphql/cache_updated_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/graphql/cache_updated_spec.js
@@ -1,5 +1,5 @@
-import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.query.graphql';
-import { updateContainerExpirationPolicy } from '~/registry/settings/graphql/utils/cache_update';
+import expirationPolicyQuery from '~/packages_and_registries/settings/project/graphql/queries/get_expiration_policy.query.graphql';
+import { updateContainerExpirationPolicy } from '~/packages_and_registries/settings/project/graphql/utils/cache_update';
describe('Registry settings cache update', () => {
let client;
diff --git a/spec/frontend/registry/settings/mock_data.js b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
index 9778f409010..9778f409010 100644
--- a/spec/frontend/registry/settings/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
diff --git a/spec/frontend/registry/settings/utils_spec.js b/spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js
index 7bc627908af..4c81671cd46 100644
--- a/spec/frontend/registry/settings/utils_spec.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/utils_spec.js
@@ -2,7 +2,7 @@ import {
formOptionsGenerator,
optionLabelGenerator,
olderThanTranslationGenerator,
-} from '~/registry/settings/utils';
+} from '~/packages_and_registries/settings/project/utils';
describe('Utils', () => {
describe('optionLabelGenerator', () => {
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index 9f02e5b9432..4c644a0d05f 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -8,6 +8,10 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
/>
</p>
+ <oncall-schedules-list-stub
+ schedules="schedule1,schedule2"
+ />
+
<p>
<gl-sprintf-stub
message="To confirm, type %{username}"
diff --git a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
index 318b6d16008..93d9ee43179 100644
--- a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
+++ b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
@@ -1,6 +1,7 @@
import { GlButton, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import DeleteUserModal from '~/pages/admin/users/components/delete_user_modal.vue';
+import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
import ModalStub from './stubs/modal_stub';
const TEST_DELETE_USER_URL = 'delete-url';
@@ -17,13 +18,14 @@ describe('User Operation confirmation modal', () => {
.filter((w) => w.attributes('variant') === variant && w.attributes('category') === category)
.at(0);
const findForm = () => wrapper.find('form');
- const findUsernameInput = () => wrapper.find(GlFormInput);
+ const findUsernameInput = () => wrapper.findComponent(GlFormInput);
const findPrimaryButton = () => findButton('danger', 'primary');
const findSecondaryButton = () => findButton('danger', 'secondary');
const findAuthenticityToken = () => new FormData(findForm().element).get('authenticity_token');
const getUsername = () => findUsernameInput().attributes('value');
const getMethodParam = () => new FormData(findForm().element).get('_method');
const getFormAction = () => findForm().attributes('action');
+ const findOnCallSchedulesList = () => wrapper.findComponent(OncallSchedulesList);
const setUsername = (username) => {
findUsernameInput().vm.$emit('input', username);
@@ -31,6 +33,7 @@ describe('User Operation confirmation modal', () => {
const username = 'username';
const badUsername = 'bad_username';
+ const oncallSchedules = '["schedule1", "schedule2"]';
const createComponent = (props = {}) => {
wrapper = shallowMount(DeleteUserModal, {
@@ -43,6 +46,7 @@ describe('User Operation confirmation modal', () => {
deleteUserUrl: TEST_DELETE_USER_URL,
blockUserUrl: TEST_BLOCK_USER_URL,
csrfToken: TEST_CSRF,
+ oncallSchedules,
...props,
},
stubs: {
@@ -145,4 +149,19 @@ describe('User Operation confirmation modal', () => {
});
});
});
+
+ describe('Related oncall-schedules list', () => {
+ it('does NOT render the list when user has no related schedules', () => {
+ createComponent({ oncallSchedules: '[]' });
+ expect(findOnCallSchedulesList().exists()).toBe(false);
+ });
+
+ it('renders the list when user has related schedules', () => {
+ createComponent();
+
+ const schedules = findOnCallSchedulesList();
+ expect(schedules.exists()).toBe(true);
+ expect(schedules.props('schedules')).toEqual(JSON.parse(oncallSchedules));
+ });
+ });
});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index 2992c7f0624..6d853120232 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -1,5 +1,5 @@
-import { GlForm, GlFormInputGroup, GlFormInput } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlFormInputGroup, GlFormInput, GlForm } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import AxiosMockAdapter from 'axios-mock-adapter';
import { kebabCase } from 'lodash';
@@ -43,8 +43,8 @@ describe('ForkForm component', () => {
axiosMock.onGet(DEFAULT_PROPS.endpoint).replyOnce(statusCode, data);
};
- const createComponent = (props = {}, data = {}) => {
- wrapper = shallowMount(ForkForm, {
+ const createComponentFactory = (mountFn) => (props = {}, data = {}) => {
+ wrapper = mountFn(ForkForm, {
provide: {
newGroupPath: 'some/groups/path',
visibilityHelpPath: 'some/visibility/help/path',
@@ -65,6 +65,9 @@ describe('ForkForm component', () => {
});
};
+ const createComponent = createComponentFactory(shallowMount);
+ const createFullComponent = createComponentFactory(mount);
+
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
window.gon = {
@@ -99,44 +102,6 @@ describe('ForkForm component', () => {
expect(cancelButton.attributes('href')).toBe(projectFullPath);
});
- it('make POST request with project param', async () => {
- jest.spyOn(axios, 'post');
-
- const namespaceId = 20;
-
- mockGetRequest();
- createComponent(
- {},
- {
- selectedNamespace: {
- id: namespaceId,
- },
- },
- );
-
- wrapper.find(GlForm).vm.$emit('submit', { preventDefault: () => {} });
-
- const {
- projectId,
- projectDescription,
- projectName,
- projectPath,
- projectVisibility,
- } = DEFAULT_PROPS;
-
- const url = `/api/${GON_API_VERSION}/projects/${projectId}/fork`;
- const project = {
- description: projectDescription,
- id: projectId,
- name: projectName,
- namespace_id: namespaceId,
- path: projectPath,
- visibility: projectVisibility,
- };
-
- expect(axios.post).toHaveBeenCalledWith(url, project);
- });
-
it('has input with csrf token', () => {
mockGetRequest();
createComponent();
@@ -258,9 +223,7 @@ describe('ForkForm component', () => {
projectVisibility: project,
},
{
- selectedNamespace: {
- visibility: namespace,
- },
+ form: { fields: { namespace: { value: { visibility: namespace } } } },
},
);
@@ -274,34 +237,101 @@ describe('ForkForm component', () => {
describe('onSubmit', () => {
beforeEach(() => {
jest.spyOn(urlUtility, 'redirectTo').mockImplementation();
+
+ mockGetRequest();
+ createFullComponent(
+ {},
+ {
+ namespaces: MOCK_NAMESPACES_RESPONSE,
+ form: {
+ state: true,
+ },
+ },
+ );
});
- it('redirect to POST web_url response', async () => {
- const webUrl = `new/fork-project`;
+ const selectedMockNamespaceIndex = 1;
+ const namespaceId = MOCK_NAMESPACES_RESPONSE[selectedMockNamespaceIndex].id;
- jest.spyOn(axios, 'post').mockResolvedValue({ data: { web_url: webUrl } });
+ const fillForm = async () => {
+ const namespaceOptions = findForkUrlInput().findAll('option');
- mockGetRequest();
- createComponent();
+ await namespaceOptions.at(selectedMockNamespaceIndex + 1).setSelected();
+ };
- await wrapper.vm.onSubmit();
+ const submitForm = async () => {
+ await fillForm();
+ const form = wrapper.find(GlForm);
- expect(urlUtility.redirectTo).toHaveBeenCalledWith(webUrl);
+ await form.trigger('submit');
+ await wrapper.vm.$nextTick();
+ };
+
+ describe('with invalid form', () => {
+ it('does not make POST request', async () => {
+ jest.spyOn(axios, 'post');
+
+ expect(axios.post).not.toHaveBeenCalled();
+ });
+
+ it('does not redirect the current page', async () => {
+ await submitForm();
+
+ expect(urlUtility.redirectTo).not.toHaveBeenCalled();
+ });
});
- it('display flash when POST is unsuccessful', async () => {
- const dummyError = 'Fork project failed';
+ describe('with valid form', () => {
+ beforeEach(() => {
+ fillForm();
+ });
- jest.spyOn(axios, 'post').mockRejectedValue(dummyError);
+ it('make POST request with project param', async () => {
+ jest.spyOn(axios, 'post');
+
+ await submitForm();
+
+ const {
+ projectId,
+ projectDescription,
+ projectName,
+ projectPath,
+ projectVisibility,
+ } = DEFAULT_PROPS;
+
+ const url = `/api/${GON_API_VERSION}/projects/${projectId}/fork`;
+ const project = {
+ description: projectDescription,
+ id: projectId,
+ name: projectName,
+ namespace_id: namespaceId,
+ path: projectPath,
+ visibility: projectVisibility,
+ };
- mockGetRequest();
- createComponent();
+ expect(axios.post).toHaveBeenCalledWith(url, project);
+ });
+
+ it('redirect to POST web_url response', async () => {
+ const webUrl = `new/fork-project`;
+ jest.spyOn(axios, 'post').mockResolvedValue({ data: { web_url: webUrl } });
+
+ await submitForm();
+
+ expect(urlUtility.redirectTo).toHaveBeenCalledWith(webUrl);
+ });
+
+ it('display flash when POST is unsuccessful', async () => {
+ const dummyError = 'Fork project failed';
+
+ jest.spyOn(axios, 'post').mockRejectedValue(dummyError);
- await wrapper.vm.onSubmit();
+ await submitForm();
- expect(urlUtility.redirectTo).not.toHaveBeenCalled();
- expect(createFlash).toHaveBeenCalledWith({
- message: dummyError,
+ expect(urlUtility.redirectTo).not.toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({
+ message: dummyError,
+ });
});
});
});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap
index 8b54a06ac7c..350669433f0 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap
@@ -44,9 +44,7 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
class="progress-bar"
role="progressbar"
style="width: 22.22222222222222%;"
- >
- <!---->
- </div>
+ />
</div>
</div>
@@ -68,7 +66,7 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
class="learn-gitlab-section-card-header"
>
<img
- src="/assets/learn_gitlab/section_workspace.svg"
+ src="workspace.svg"
/>
<h2
@@ -134,9 +132,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Set up CI/CD"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Set up CI/CD
+
+ Set up CI/CD
+
</a>
</span>
@@ -148,9 +153,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Start a free Ultimate trial"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Start a free Ultimate trial
+
+ Start a free Ultimate trial
+
</a>
</span>
@@ -162,9 +174,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Add code owners"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Add code owners
+
+ Add code owners
+
</a>
</span>
@@ -183,9 +202,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Add merge request approval"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Add merge request approval
+
+ Add merge request approval
+
</a>
</span>
@@ -218,7 +244,7 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
class="learn-gitlab-section-card-header"
>
<img
- src="/assets/learn_gitlab/section_plan.svg"
+ src="plan.svg"
/>
<h2
@@ -240,9 +266,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Create an issue"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Create an issue
+
+ Create an issue
+
</a>
</span>
@@ -254,9 +287,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Submit a merge request"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Submit a merge request
+
+ Submit a merge request
+
</a>
</span>
@@ -282,7 +322,7 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
class="learn-gitlab-section-card-header"
>
<img
- src="/assets/learn_gitlab/section_deploy.svg"
+ src="deploy.svg"
/>
<h2
@@ -304,9 +344,16 @@ exports[`Learn GitLab Design A renders correctly 1`] = `
<span>
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Run a Security scan using CI/CD"
+ data-track-property="Growth::Conversion::Experiment::LearnGitLabA"
href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
>
- Run a Security scan using CI/CD
+
+ Run a Security scan using CI/CD
+
</a>
</span>
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap
index 07c7f2df09e..c9d8ab4566c 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap
@@ -44,9 +44,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="progress-bar"
role="progressbar"
style="width: 22.22222222222222%;"
- >
- <!---->
- </div>
+ />
</div>
</div>
@@ -110,6 +108,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Invite your colleagues"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -168,6 +169,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Create or import a repository"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -218,6 +222,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Set-up CI/CD"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -268,6 +275,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Try GitLab Ultimate for free"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -323,6 +333,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Add code owners"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -378,6 +391,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Enable require merge approvals"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -444,6 +460,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Create an issue"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -494,6 +513,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Submit a merge request (MR)"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
@@ -560,6 +582,9 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
<a
class="gl-link"
+ data-track-action="click_link"
+ data-track-label="Run a Security scan using CI/CD"
+ data-track-property="Growth::Activation::Experiment::LearnGitLabB"
href="http://example.com/"
rel="noopener noreferrer"
target="_blank"
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap
index ad8db0822cc..9e00ace761c 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap
@@ -11,7 +11,7 @@ exports[`Learn GitLab Section Card renders correctly 1`] = `
class="learn-gitlab-section-card-header"
>
<img
- src="/assets/learn_gitlab/section_workspace.svg"
+ src="workspace.svg"
/>
<h2
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js
index 64ace341038..ac997c1f237 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js
@@ -1,13 +1,13 @@
import { GlProgressBar } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import LearnGitlabA from '~/pages/projects/learn_gitlab/components/learn_gitlab_a.vue';
-import { testActions } from './mock_data';
+import { testActions, testSections } from './mock_data';
describe('Learn GitLab Design A', () => {
let wrapper;
const createWrapper = () => {
- wrapper = mount(LearnGitlabA, { propsData: { actions: testActions } });
+ wrapper = mount(LearnGitlabA, { propsData: { actions: testActions, sections: testSections } });
};
beforeEach(() => {
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js
index de6aca08235..3a511a009a9 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js
@@ -3,6 +3,7 @@ import LearnGitlabSectionCard from '~/pages/projects/learn_gitlab/components/lea
import { testActions } from './mock_data';
const defaultSection = 'workspace';
+const testImage = 'workspace.svg';
describe('Learn GitLab Section Card', () => {
let wrapper;
@@ -14,7 +15,7 @@ describe('Learn GitLab Section Card', () => {
const createWrapper = () => {
wrapper = shallowMount(LearnGitlabSectionCard, {
- propsData: { section: defaultSection, actions: testActions },
+ propsData: { section: defaultSection, actions: testActions, svg: testImage },
});
};
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
index d6ee2b00c8e..8d6ac737db8 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
@@ -45,3 +45,15 @@ export const testActions = {
svg: 'http://example.com/images/illustration.svg',
},
};
+
+export const testSections = {
+ workspace: {
+ svg: 'workspace.svg',
+ },
+ deploy: {
+ svg: 'deploy.svg',
+ },
+ plan: {
+ svg: 'plan.svg',
+ },
+};
diff --git a/spec/frontend/pages/projects/new/components/app_spec.js b/spec/frontend/pages/projects/new/components/app_spec.js
new file mode 100644
index 00000000000..b604e636243
--- /dev/null
+++ b/spec/frontend/pages/projects/new/components/app_spec.js
@@ -0,0 +1,77 @@
+import { shallowMount } from '@vue/test-utils';
+import { assignGitlabExperiment } from 'helpers/experimentation_helper';
+import App from '~/pages/projects/new/components/app.vue';
+import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
+
+describe('Experimental new project creation app', () => {
+ let wrapper;
+
+ const findNewNamespacePage = () => wrapper.findComponent(NewNamespacePage);
+
+ const createComponent = (propsData) => {
+ wrapper = shallowMount(App, { propsData });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('new_repo experiment', () => {
+ it('passes new_repo experiment', () => {
+ createComponent();
+
+ expect(findNewNamespacePage().props().experiment).toBe('new_repo');
+ });
+
+ describe('when in the candidate variant', () => {
+ assignGitlabExperiment('new_repo', 'candidate');
+
+ it('has "repository" in the panel title', () => {
+ createComponent();
+
+ expect(findNewNamespacePage().props().panels[0].title).toBe(
+ 'Create blank project/repository',
+ );
+ });
+ });
+
+ describe('when in the control variant', () => {
+ assignGitlabExperiment('new_repo', 'control');
+
+ it('has "project" in the panel title', () => {
+ createComponent();
+
+ expect(findNewNamespacePage().props().panels[0].title).toBe('Create blank project');
+ });
+ });
+ });
+
+ it('passes custom new project guideline text to underlying component', () => {
+ const DEMO_GUIDELINES = 'Demo guidelines';
+ const guidelineSelector = '#new-project-guideline';
+ createComponent({
+ newProjectGuidelines: DEMO_GUIDELINES,
+ });
+
+ expect(wrapper.find(guidelineSelector).text()).toBe(DEMO_GUIDELINES);
+ });
+
+ it.each`
+ isCiCdAvailable | outcome
+ ${false} | ${'do not show CI/CD panel'}
+ ${true} | ${'show CI/CD panel'}
+ `('$outcome when isCiCdAvailable is $isCiCdAvailable', ({ isCiCdAvailable }) => {
+ createComponent({
+ isCiCdAvailable,
+ });
+
+ expect(
+ Boolean(
+ wrapper
+ .findComponent(NewNamespacePage)
+ .props()
+ .panels.find((p) => p.name === 'cicd_for_external_repo'),
+ ),
+ ).toBe(isCiCdAvailable);
+ });
+});
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/new_project_push_tip_popover_spec.js b/spec/frontend/pages/projects/new/components/new_project_push_tip_popover_spec.js
index 1ce16640d4a..d4cf8c78600 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/new_project_push_tip_popover_spec.js
+++ b/spec/frontend/pages/projects/new/components/new_project_push_tip_popover_spec.js
@@ -1,6 +1,6 @@
import { GlPopover, GlFormInputGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import NewProjectPushTipPopover from '~/projects/experiment_new_project_creation/components/new_project_push_tip_popover.vue';
+import NewProjectPushTipPopover from '~/pages/projects/new/components/new_project_push_tip_popover.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('New project push tip popover', () => {
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index 8ab0b87d2ee..1cac8ef8ee2 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -1,9 +1,16 @@
+import { GlLoadingIcon, GlModal } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import ContentEditor from '~/content_editor/components/content_editor.vue';
import WikiForm from '~/pages/shared/wikis/components/wiki_form.vue';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
describe('WikiForm', () => {
let wrapper;
+ let mock;
const findForm = () => wrapper.find('form');
const findTitle = () => wrapper.find('#wiki_title');
@@ -11,10 +18,28 @@ describe('WikiForm', () => {
const findContent = () => wrapper.find('#wiki_content');
const findMessage = () => wrapper.find('#wiki_message');
const findSubmitButton = () => wrapper.findByTestId('wiki-submit-button');
- const findCancelButton = () => wrapper.findByTestId('wiki-cancel-button');
- const findTitleHelpLink = () => wrapper.findByTestId('wiki-title-help-link');
+ const findCancelButton = () => wrapper.findByRole('link', { name: 'Cancel' });
+ const findUseNewEditorButton = () => wrapper.findByRole('button', { name: 'Use new editor' });
+ const findSwitchToOldEditorButton = () =>
+ wrapper.findByRole('button', { name: 'Switch to old editor' });
+ const findTitleHelpLink = () => wrapper.findByRole('link', { name: 'More Information.' });
const findMarkdownHelpLink = () => wrapper.findByTestId('wiki-markdown-help-link');
+ const setFormat = (value) => {
+ const format = findFormat();
+ format.find(`option[value=${value}]`).setSelected();
+ format.element.dispatchEvent(new Event('change'));
+ };
+
+ const triggerFormSubmit = () => findForm().element.dispatchEvent(new Event('submit'));
+
+ const dispatchBeforeUnload = () => {
+ const e = new Event('beforeunload');
+ jest.spyOn(e, 'preventDefault');
+ window.dispatchEvent(e);
+ return e;
+ };
+
const pageInfoNew = {
persisted: false,
uploadsPath: '/project/path/-/wikis/attachments',
@@ -35,7 +60,10 @@ describe('WikiForm', () => {
path: '/project/path/-/wikis/home',
};
- function createWrapper(persisted = false, pageInfo = {}) {
+ function createWrapper(
+ persisted = false,
+ { pageInfo, glFeatures } = { glFeatures: { wikiContentEditor: false } },
+ ) {
wrapper = extendedWrapper(
mount(
WikiForm,
@@ -51,16 +79,20 @@ describe('WikiForm', () => {
...(persisted ? pageInfoPersisted : pageInfoNew),
...pageInfo,
},
+ glFeatures,
},
},
{ attachToDocument: true },
),
);
-
- jest.spyOn(wrapper.vm, 'onBeforeUnload');
}
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
afterEach(() => {
+ mock.restore();
wrapper.destroy();
wrapper = null;
});
@@ -101,7 +133,7 @@ describe('WikiForm', () => {
`('updates the link help message when format=$value is selected', async ({ value, text }) => {
createWrapper();
- findFormat().find(`option[value=${value}]`).setSelected();
+ setFormat(value);
await wrapper.vm.$nextTick();
@@ -113,9 +145,9 @@ describe('WikiForm', () => {
await wrapper.vm.$nextTick();
- window.dispatchEvent(new Event('beforeunload'));
-
- expect(wrapper.vm.onBeforeUnload).not.toHaveBeenCalled();
+ const e = dispatchBeforeUnload();
+ expect(typeof e.returnValue).not.toBe('string');
+ expect(e.preventDefault).not.toHaveBeenCalled();
});
it.each`
@@ -156,19 +188,18 @@ describe('WikiForm', () => {
});
it('sets before unload warning', () => {
- window.dispatchEvent(new Event('beforeunload'));
+ const e = dispatchBeforeUnload();
- expect(wrapper.vm.onBeforeUnload).toHaveBeenCalled();
+ expect(e.preventDefault).toHaveBeenCalledTimes(1);
});
it('when form submitted, unsets before unload warning', async () => {
- findForm().element.dispatchEvent(new Event('submit'));
+ triggerFormSubmit();
await wrapper.vm.$nextTick();
- window.dispatchEvent(new Event('beforeunload'));
-
- expect(wrapper.vm.onBeforeUnload).not.toHaveBeenCalled();
+ const e = dispatchBeforeUnload();
+ expect(e.preventDefault).not.toHaveBeenCalled();
});
});
@@ -219,4 +250,212 @@ describe('WikiForm', () => {
},
);
});
+
+ describe('when feature flag wikiContentEditor is enabled', () => {
+ beforeEach(() => {
+ createWrapper(true, { glFeatures: { wikiContentEditor: true } });
+ });
+
+ it.each`
+ format | buttonExists
+ ${'markdown'} | ${true}
+ ${'rdoc'} | ${false}
+ `(
+ 'switch to new editor button exists: $buttonExists if format is $format',
+ async ({ format, buttonExists }) => {
+ setFormat(format);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findUseNewEditorButton().exists()).toBe(buttonExists);
+ },
+ );
+
+ const assertOldEditorIsVisible = () => {
+ expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
+ expect(wrapper.findComponent(MarkdownField).exists()).toBe(true);
+ expect(findSubmitButton().props('disabled')).toBe(false);
+
+ expect(wrapper.text()).not.toContain(
+ "Switching will discard any changes you've made in the new editor.",
+ );
+ expect(wrapper.text()).not.toContain(
+ "This editor is in beta and may not display the page's contents properly.",
+ );
+ };
+
+ it('shows old editor by default', assertOldEditorIsVisible);
+
+ describe('switch format to rdoc', () => {
+ beforeEach(async () => {
+ setFormat('rdoc');
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('continues to show the old editor', assertOldEditorIsVisible);
+
+ describe('switch format back to markdown', () => {
+ beforeEach(async () => {
+ setFormat('rdoc');
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it(
+ 'still shows the old editor and does not automatically switch to the content editor ',
+ assertOldEditorIsVisible,
+ );
+ });
+ });
+
+ describe('clicking "use new editor": editor fails to load', () => {
+ beforeEach(async () => {
+ mock.onPost(/preview-markdown/).reply(400);
+
+ await findUseNewEditorButton().trigger('click');
+
+ // try waiting for content editor to load (but it will never actually load)
+ await waitForPromises();
+ });
+
+ it('editor is shown in a perpetual loading state', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
+ });
+
+ it('disables the submit button', () => {
+ expect(findSubmitButton().props('disabled')).toBe(true);
+ });
+
+ describe('clicking "switch to old editor"', () => {
+ beforeEach(() => {
+ return findSwitchToOldEditorButton().trigger('click');
+ });
+
+ it('switches to old editor directly without showing a modal', () => {
+ expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
+ expect(wrapper.findComponent(MarkdownField).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('clicking "use new editor": editor loads successfully', () => {
+ beforeEach(() => {
+ mock.onPost(/preview-markdown/).reply(200, { body: '<p>hello <strong>world</strong></p>' });
+
+ findUseNewEditorButton().trigger('click');
+ });
+
+ it('shows a loading indicator for the rich text editor', () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('shows warnings that the rich text editor is in beta and may not work properly', () => {
+ expect(wrapper.text()).toContain(
+ "Switching will discard any changes you've made in the new editor.",
+ );
+ expect(wrapper.text()).toContain(
+ "This editor is in beta and may not display the page's contents properly.",
+ );
+ });
+
+ it('shows the rich text editor when loading finishes', async () => {
+ // wait for content editor to load
+ await waitForPromises();
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(ContentEditor).exists()).toBe(true);
+ });
+
+ it('disables the format dropdown', () => {
+ expect(findFormat().element.getAttribute('disabled')).toBeDefined();
+ });
+
+ describe('when wiki content is updated', () => {
+ beforeEach(async () => {
+ // wait for content editor to load
+ await waitForPromises();
+
+ wrapper.vm.contentEditor.tiptapEditor.commands.setContent(
+ '<p>hello __world__ from content editor</p>',
+ true,
+ );
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('sets before unload warning', () => {
+ const e = dispatchBeforeUnload();
+ expect(e.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('unsets before unload warning on form submit', async () => {
+ triggerFormSubmit();
+
+ await wrapper.vm.$nextTick();
+
+ const e = dispatchBeforeUnload();
+ expect(e.preventDefault).not.toHaveBeenCalled();
+ });
+ });
+
+ it('updates content from content editor on form submit', async () => {
+ // old value
+ expect(findContent().element.value).toBe('My page content');
+
+ // wait for content editor to load
+ await waitForPromises();
+
+ triggerFormSubmit();
+
+ await wrapper.vm.$nextTick();
+
+ expect(findContent().element.value).toBe('hello **world**');
+ });
+
+ describe('clicking "switch to old editor"', () => {
+ let modal;
+
+ beforeEach(async () => {
+ modal = wrapper.findComponent(GlModal);
+ jest.spyOn(modal.vm, 'show');
+
+ findSwitchToOldEditorButton().trigger('click');
+ });
+
+ it('shows a modal confirming the change', () => {
+ expect(modal.vm.show).toHaveBeenCalled();
+ });
+
+ describe('confirming "switch to old editor" in the modal', () => {
+ beforeEach(async () => {
+ wrapper.vm.contentEditor.tiptapEditor.commands.setContent(
+ '<p>hello __world__ from content editor</p>',
+ true,
+ );
+
+ wrapper.findComponent(GlModal).vm.$emit('primary');
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('switches to old editor', () => {
+ expect(wrapper.findComponent(ContentEditor).exists()).toBe(false);
+ expect(wrapper.findComponent(MarkdownField).exists()).toBe(true);
+ });
+
+ it('does not show a warning about content editor', () => {
+ expect(wrapper.text()).not.toContain(
+ "This editor is in beta and may not display the page's contents properly.",
+ );
+ });
+
+ it('the old editor retains its old value and does not use the content from the content editor', () => {
+ expect(findContent().element.value).toBe('My page content');
+ });
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
new file mode 100644
index 00000000000..8a4f07c4d88
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
@@ -0,0 +1,47 @@
+import { getByRole } from '@testing-library/dom';
+import { mount } from '@vue/test-utils';
+import FirstPipelineCard from '~/pipeline_editor/components/drawer/cards/first_pipeline_card.vue';
+import PipelineVisualReference from '~/pipeline_editor/components/drawer/ui/pipeline_visual_reference.vue';
+
+describe('First pipeline card', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ ciExamplesHelpPagePath: '/pipelines/examples',
+ runnerHelpPagePath: '/help/runners',
+ };
+
+ const createComponent = () => {
+ wrapper = mount(FirstPipelineCard, {
+ provide: {
+ ...defaultProvide,
+ },
+ });
+ };
+
+ const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name }).href;
+ const findPipelinesLink = () => getLinkByName(/examples and templates/i);
+ const findRunnersLink = () => getLinkByName(/make sure your instance has runners available/i);
+ const findVisualReference = () => wrapper.findComponent(PipelineVisualReference);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the title', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.title);
+ });
+
+ it('renders the content', () => {
+ expect(findVisualReference().exists()).toBe(true);
+ });
+
+ it('renders the links', () => {
+ expect(findRunnersLink()).toContain(defaultProvide.runnerHelpPagePath);
+ expect(findPipelinesLink()).toContain(defaultProvide.ciExamplesHelpPagePath);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/getting_started_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/getting_started_card_spec.js
new file mode 100644
index 00000000000..c592e959068
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/getting_started_card_spec.js
@@ -0,0 +1,26 @@
+import { shallowMount } from '@vue/test-utils';
+import GettingStartedCard from '~/pipeline_editor/components/drawer/cards/getting_started_card.vue';
+
+describe('Getting started card', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(GettingStartedCard);
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the title', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.title);
+ });
+
+ it('renders the content', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.firstParagraph);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js
new file mode 100644
index 00000000000..3c8821d05a7
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/pipeline_config_reference_card_spec.js
@@ -0,0 +1,51 @@
+import { getByRole } from '@testing-library/dom';
+import { mount } from '@vue/test-utils';
+import PipelineConfigReferenceCard from '~/pipeline_editor/components/drawer/cards/pipeline_config_reference_card.vue';
+
+describe('Pipeline config reference card', () => {
+ let wrapper;
+
+ const defaultProvide = {
+ ciExamplesHelpPagePath: 'help/ci/examples/',
+ ciHelpPagePath: 'help/ci/introduction',
+ needsHelpPagePath: 'help/ci/yaml#needs',
+ ymlHelpPagePath: 'help/ci/yaml',
+ };
+
+ const createComponent = () => {
+ wrapper = mount(PipelineConfigReferenceCard, {
+ provide: {
+ ...defaultProvide,
+ },
+ });
+ };
+
+ const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name }).href;
+ const findCiExamplesLink = () => getLinkByName(/CI\/CD examples and templates/i);
+ const findCiIntroLink = () => getLinkByName(/GitLab CI\/CD concepts/i);
+ const findNeedsLink = () => getLinkByName(/Needs keyword/i);
+ const findYmlSyntaxLink = () => getLinkByName(/.gitlab-ci.yml syntax reference/i);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the title', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.title);
+ });
+
+ it('renders the content', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.firstParagraph);
+ });
+
+ it('renders the links', () => {
+ expect(findCiExamplesLink()).toContain(defaultProvide.ciExamplesHelpPagePath);
+ expect(findCiIntroLink()).toContain(defaultProvide.ciHelpPagePath);
+ expect(findNeedsLink()).toContain(defaultProvide.needsHelpPagePath);
+ expect(findYmlSyntaxLink()).toContain(defaultProvide.ymlHelpPagePath);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/visualize_and_lint_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/visualize_and_lint_card_spec.js
new file mode 100644
index 00000000000..bebd2484c1d
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/visualize_and_lint_card_spec.js
@@ -0,0 +1,26 @@
+import { shallowMount } from '@vue/test-utils';
+import VisualizeAndLintCard from '~/pipeline_editor/components/drawer/cards/getting_started_card.vue';
+
+describe('Visual and Lint card', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(VisualizeAndLintCard);
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the title', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.title);
+ });
+
+ it('renders the content', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.firstParagraph);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js b/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
new file mode 100644
index 00000000000..1b68cd3dc43
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/pipeline_editor_drawer_spec.js
@@ -0,0 +1,142 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import FirstPipelineCard from '~/pipeline_editor/components/drawer/cards/first_pipeline_card.vue';
+import GettingStartedCard from '~/pipeline_editor/components/drawer/cards/getting_started_card.vue';
+import PipelineConfigReferenceCard from '~/pipeline_editor/components/drawer/cards/pipeline_config_reference_card.vue';
+import VisualizeAndLintCard from '~/pipeline_editor/components/drawer/cards/visualize_and_lint_card.vue';
+import PipelineEditorDrawer from '~/pipeline_editor/components/drawer/pipeline_editor_drawer.vue';
+import { DRAWER_EXPANDED_KEY } from '~/pipeline_editor/constants';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
+
+describe('Pipeline editor drawer', () => {
+ useLocalStorageSpy();
+
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineEditorDrawer, {
+ stubs: { LocalStorageSync },
+ });
+ };
+
+ const findFirstPipelineCard = () => wrapper.findComponent(FirstPipelineCard);
+ const findGettingStartedCard = () => wrapper.findComponent(GettingStartedCard);
+ const findPipelineConfigReferenceCard = () => wrapper.findComponent(PipelineConfigReferenceCard);
+ const findToggleBtn = () => wrapper.findComponent(GlButton);
+ const findVisualizeAndLintCard = () => wrapper.findComponent(VisualizeAndLintCard);
+
+ const findArrowIcon = () => wrapper.find('[data-testid="toggle-icon"]');
+ const findCollapseText = () => wrapper.find('[data-testid="collapse-text"]');
+ const findDrawerContent = () => wrapper.find('[data-testid="drawer-content"]');
+
+ const clickToggleBtn = async () => findToggleBtn().vm.$emit('click');
+
+ afterEach(() => {
+ wrapper.destroy();
+ localStorage.clear();
+ });
+
+ it('it sets the drawer to be opened by default', async () => {
+ createComponent();
+
+ expect(findDrawerContent().exists()).toBe(false);
+
+ await nextTick();
+
+ expect(findDrawerContent().exists()).toBe(true);
+ });
+
+ describe('when the drawer is collapsed', () => {
+ beforeEach(async () => {
+ createComponent();
+ await clickToggleBtn();
+ });
+
+ it('shows the left facing arrow icon', () => {
+ expect(findArrowIcon().props('name')).toBe('chevron-double-lg-left');
+ });
+
+ it('does not show the collapse text', () => {
+ expect(findCollapseText().exists()).toBe(false);
+ });
+
+ it('does not show the drawer content', () => {
+ expect(findDrawerContent().exists()).toBe(false);
+ });
+
+ it('can open the drawer by clicking on the toggle button', async () => {
+ expect(findDrawerContent().exists()).toBe(false);
+
+ await clickToggleBtn();
+
+ expect(findDrawerContent().exists()).toBe(true);
+ });
+ });
+
+ describe('when the drawer is expanded', () => {
+ beforeEach(async () => {
+ createComponent();
+ });
+
+ it('shows the right facing arrow icon', () => {
+ expect(findArrowIcon().props('name')).toBe('chevron-double-lg-right');
+ });
+
+ it('shows the collapse text', () => {
+ expect(findCollapseText().exists()).toBe(true);
+ });
+
+ it('shows the drawer content', () => {
+ expect(findDrawerContent().exists()).toBe(true);
+ });
+
+ it('shows all the introduction cards', () => {
+ expect(findFirstPipelineCard().exists()).toBe(true);
+ expect(findGettingStartedCard().exists()).toBe(true);
+ expect(findPipelineConfigReferenceCard().exists()).toBe(true);
+ expect(findVisualizeAndLintCard().exists()).toBe(true);
+ });
+
+ it('can close the drawer by clicking on the toggle button', async () => {
+ expect(findDrawerContent().exists()).toBe(true);
+
+ await clickToggleBtn();
+
+ expect(findDrawerContent().exists()).toBe(false);
+ });
+ });
+
+ describe('local storage', () => {
+ it('saves the drawer expanded value to local storage', async () => {
+ localStorage.setItem(DRAWER_EXPANDED_KEY, 'false');
+
+ createComponent();
+ await clickToggleBtn();
+
+ expect(localStorage.setItem.mock.calls).toEqual([
+ [DRAWER_EXPANDED_KEY, 'false'],
+ [DRAWER_EXPANDED_KEY, 'true'],
+ ]);
+ });
+
+ it('loads the drawer collapsed when local storage is set to `false`, ', async () => {
+ localStorage.setItem(DRAWER_EXPANDED_KEY, false);
+ createComponent();
+
+ await nextTick();
+
+ expect(findDrawerContent().exists()).toBe(false);
+ });
+
+ it('loads the drawer expanded when local storage is set to `true`, ', async () => {
+ localStorage.setItem(DRAWER_EXPANDED_KEY, true);
+ createComponent();
+
+ await nextTick();
+
+ expect(findDrawerContent().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/drawer/ui/demo_job_pill_spec.js b/spec/frontend/pipeline_editor/components/drawer/ui/demo_job_pill_spec.js
new file mode 100644
index 00000000000..edd2b45569a
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/ui/demo_job_pill_spec.js
@@ -0,0 +1,27 @@
+import { shallowMount } from '@vue/test-utils';
+import DemoJobPill from '~/pipeline_editor/components/drawer/ui/demo_job_pill.vue';
+
+describe('Demo job pill', () => {
+ let wrapper;
+ const jobName = 'my-build-job';
+
+ const createComponent = () => {
+ wrapper = shallowMount(DemoJobPill, {
+ propsData: {
+ jobName,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the jobName', () => {
+ expect(wrapper.text()).toContain(jobName);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/drawer/ui/pipeline_visual_reference_spec.js b/spec/frontend/pipeline_editor/components/drawer/ui/pipeline_visual_reference_spec.js
new file mode 100644
index 00000000000..e4834544484
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/drawer/ui/pipeline_visual_reference_spec.js
@@ -0,0 +1,31 @@
+import { shallowMount } from '@vue/test-utils';
+import DemoJobPill from '~/pipeline_editor/components/drawer/ui/demo_job_pill.vue';
+import PipelineVisualReference from '~/pipeline_editor/components/drawer/ui/pipeline_visual_reference.vue';
+
+describe('Demo job pill', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(PipelineVisualReference);
+ };
+
+ const findAllDemoJobPills = () => wrapper.findAllComponents(DemoJobPill);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders all stage names', () => {
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.stageNames.build);
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.stageNames.test);
+ expect(wrapper.text()).toContain(wrapper.vm.$options.i18n.stageNames.deploy);
+ });
+
+ it('renders all job pills', () => {
+ expect(findAllDemoJobPills()).toHaveLength(4);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
index 3bf5a291c69..7a5b01fb04a 100644
--- a/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/text_editor_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { EDITOR_READY_EVENT } from '~/editor/constants';
+import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
import {
mockCiConfigPath,
@@ -59,6 +60,10 @@ describe('Pipeline Editor | Text editor component', () => {
const findEditor = () => wrapper.findComponent(MockEditorLite);
+ beforeEach(() => {
+ EditorLiteExtension.deferRerender = jest.fn();
+ });
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
diff --git a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
index fa937100982..d6763a7de41 100644
--- a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
+++ b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
@@ -1,11 +1,28 @@
-import { GlDropdown, GlDropdownItem, GlIcon } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import {
+ GlDropdown,
+ GlDropdownItem,
+ GlInfiniteScroll,
+ GlLoadingIcon,
+ GlSearchBoxByType,
+} from '@gitlab/ui';
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BranchSwitcher from '~/pipeline_editor/components/file_nav/branch_switcher.vue';
import { DEFAULT_FAILURE } from '~/pipeline_editor/constants';
-import { mockDefaultBranch, mockProjectBranches, mockProjectFullPath } from '../../mock_data';
+import getAvailableBranches from '~/pipeline_editor/graphql/queries/available_branches.graphql';
+import {
+ mockBranchPaginationLimit,
+ mockDefaultBranch,
+ mockEmptySearchBranches,
+ mockProjectBranches,
+ mockProjectFullPath,
+ mockSearchBranches,
+ mockTotalBranches,
+ mockTotalBranchResults,
+ mockTotalSearchResults,
+} from '../../mock_data';
const localVue = createLocalVue();
localVue.use(VueApollo);
@@ -15,30 +32,64 @@ describe('Pipeline editor branch switcher', () => {
let mockApollo;
let mockAvailableBranchQuery;
- const createComponentWithApollo = () => {
- const resolvers = {
- Query: {
- project: mockAvailableBranchQuery,
+ const createComponent = (
+ { isQueryLoading, mountFn, options } = {
+ isQueryLoading: false,
+ mountFn: shallowMount,
+ options: {},
+ },
+ ) => {
+ wrapper = mountFn(BranchSwitcher, {
+ propsData: {
+ paginationLimit: mockBranchPaginationLimit,
},
- };
-
- mockApollo = createMockApollo([], resolvers);
- wrapper = shallowMount(BranchSwitcher, {
- localVue,
- apolloProvider: mockApollo,
provide: {
projectFullPath: mockProjectFullPath,
+ totalBranches: mockTotalBranches,
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ availableBranches: {
+ loading: isQueryLoading,
+ },
+ },
+ },
},
data() {
return {
+ branches: ['main'],
currentBranch: mockDefaultBranch,
};
},
+ ...options,
+ });
+ };
+
+ const createComponentWithApollo = (mountFn = shallowMount) => {
+ const handlers = [[getAvailableBranches, mockAvailableBranchQuery]];
+ mockApollo = createMockApollo(handlers);
+
+ createComponent({
+ mountFn,
+ options: {
+ localVue,
+ apolloProvider: mockApollo,
+ mocks: {},
+ data() {
+ return {
+ currentBranch: mockDefaultBranch,
+ };
+ },
+ },
});
};
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findInfiniteScroll = () => wrapper.findComponent(GlInfiniteScroll);
beforeEach(() => {
mockAvailableBranchQuery = jest.fn();
@@ -48,7 +99,7 @@ describe('Pipeline editor branch switcher', () => {
wrapper.destroy();
});
- describe('while querying', () => {
+ describe('when querying for the first time', () => {
beforeEach(() => {
createComponentWithApollo();
});
@@ -61,41 +112,31 @@ describe('Pipeline editor branch switcher', () => {
describe('after querying', () => {
beforeEach(async () => {
mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
- createComponentWithApollo();
+ createComponentWithApollo(mount);
await waitForPromises();
});
- it('query is called with correct variables', async () => {
- expect(mockAvailableBranchQuery).toHaveBeenCalledTimes(1);
- expect(mockAvailableBranchQuery).toHaveBeenCalledWith(
- expect.anything(),
- {
- fullPath: mockProjectFullPath,
- },
- expect.anything(),
- expect.anything(),
- );
+ it('renders search box', () => {
+ expect(findSearchBox().exists()).toBe(true);
});
it('renders list of branches', () => {
expect(findDropdown().exists()).toBe(true);
- expect(findDropdownItems()).toHaveLength(mockProjectBranches.repository.branches.length);
+ expect(findDropdownItems()).toHaveLength(mockTotalBranchResults);
});
- it('renders current branch at the top of the list with a check mark', () => {
- const firstDropdownItem = findDropdownItems().at(0);
- const icon = firstDropdownItem.findComponent(GlIcon);
+ it('renders current branch with a check mark', () => {
+ const defaultBranchInDropdown = findDropdownItems().at(0);
- expect(firstDropdownItem.text()).toBe(mockDefaultBranch);
- expect(icon.exists()).toBe(true);
- expect(icon.props('name')).toBe('check');
+ expect(defaultBranchInDropdown.text()).toBe(mockDefaultBranch);
+ expect(defaultBranchInDropdown.props('isChecked')).toBe(true);
});
it('does not render check mark for other branches', () => {
- const secondDropdownItem = findDropdownItems().at(1);
- const icon = secondDropdownItem.findComponent(GlIcon);
+ const nonDefaultBranch = findDropdownItems().at(1);
- expect(icon.classes()).toContain('gl-visibility-hidden');
+ expect(nonDefaultBranch.text()).not.toBe(mockDefaultBranch);
+ expect(nonDefaultBranch.props('isChecked')).toBe(false);
});
});
@@ -120,4 +161,186 @@ describe('Pipeline editor branch switcher', () => {
]);
});
});
+
+ describe('when switching branches', () => {
+ beforeEach(async () => {
+ jest.spyOn(window.history, 'pushState').mockImplementation(() => {});
+ mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
+ createComponentWithApollo(mount);
+ await waitForPromises();
+ });
+
+ it('updates session history when selecting a different branch', async () => {
+ const branch = findDropdownItems().at(1);
+ await branch.vm.$emit('click');
+
+ expect(window.history.pushState).toHaveBeenCalled();
+ expect(window.history.pushState.mock.calls[0][2]).toContain(`?branch_name=${branch.text()}`);
+ });
+
+ it('does not update session history when selecting current branch', async () => {
+ const branch = findDropdownItems().at(0);
+ await branch.vm.$emit('click');
+
+ expect(branch.text()).toBe(mockDefaultBranch);
+ expect(window.history.pushState).not.toHaveBeenCalled();
+ });
+
+ it('emits the refetchContent event when selecting a different branch', async () => {
+ const branch = findDropdownItems().at(1);
+
+ expect(branch.text()).not.toBe(mockDefaultBranch);
+ expect(wrapper.emitted('refetchContent')).toBeUndefined();
+
+ await branch.vm.$emit('click');
+
+ expect(wrapper.emitted('refetchContent')).toBeDefined();
+ expect(wrapper.emitted('refetchContent')).toHaveLength(1);
+ });
+
+ it('does not emit the refetchContent event when selecting the current branch', async () => {
+ const branch = findDropdownItems().at(0);
+
+ expect(branch.text()).toBe(mockDefaultBranch);
+ expect(wrapper.emitted('refetchContent')).toBeUndefined();
+
+ await branch.vm.$emit('click');
+
+ expect(wrapper.emitted('refetchContent')).toBeUndefined();
+ });
+ });
+
+ describe('when searching', () => {
+ beforeEach(async () => {
+ mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
+ createComponentWithApollo(mount);
+ await waitForPromises();
+
+ mockAvailableBranchQuery.mockResolvedValue(mockSearchBranches);
+ });
+
+ describe('with a search term', () => {
+ it('calls query with correct variables', async () => {
+ findSearchBox().vm.$emit('input', 'te');
+ await waitForPromises();
+
+ expect(mockAvailableBranchQuery).toHaveBeenCalledWith({
+ limit: mockTotalBranches, // fetch all branches
+ offset: 0,
+ projectFullPath: mockProjectFullPath,
+ searchPattern: '*te*',
+ });
+ });
+
+ it('fetches new list of branches', async () => {
+ expect(findDropdownItems()).toHaveLength(mockTotalBranchResults);
+
+ findSearchBox().vm.$emit('input', 'te');
+ await waitForPromises();
+
+ expect(findDropdownItems()).toHaveLength(mockTotalSearchResults);
+ });
+
+ it('does not hide dropdown when search result is empty', async () => {
+ mockAvailableBranchQuery.mockResolvedValue(mockEmptySearchBranches);
+ findSearchBox().vm.$emit('input', 'aaaaa');
+ await waitForPromises();
+
+ expect(findDropdown().exists()).toBe(true);
+ expect(findDropdownItems()).toHaveLength(0);
+ });
+ });
+
+ describe('without a search term', () => {
+ beforeEach(async () => {
+ findSearchBox().vm.$emit('input', 'te');
+ await waitForPromises();
+
+ mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
+ });
+
+ it('calls query with correct variables', async () => {
+ findSearchBox().vm.$emit('input', '');
+ await waitForPromises();
+
+ expect(mockAvailableBranchQuery).toHaveBeenCalledWith({
+ limit: mockBranchPaginationLimit, // only fetch first n branches first
+ offset: 0,
+ projectFullPath: mockProjectFullPath,
+ searchPattern: '*',
+ });
+ });
+
+ it('fetches new list of branches', async () => {
+ expect(findDropdownItems()).toHaveLength(mockTotalSearchResults);
+
+ findSearchBox().vm.$emit('input', '');
+ await waitForPromises();
+
+ expect(findDropdownItems()).toHaveLength(mockTotalBranchResults);
+ });
+ });
+ });
+
+ describe('loading icon', () => {
+ test.each`
+ isQueryLoading | isRendered
+ ${true} | ${true}
+ ${false} | ${false}
+ `('checks if query is loading before rendering', ({ isQueryLoading, isRendered }) => {
+ createComponent({ isQueryLoading, mountFn: mount });
+
+ expect(findLoadingIcon().exists()).toBe(isRendered);
+ });
+ });
+
+ describe('when scrolling to the bottom of the list', () => {
+ beforeEach(async () => {
+ mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
+ createComponentWithApollo();
+ await waitForPromises();
+ });
+
+ afterEach(() => {
+ mockAvailableBranchQuery.mockClear();
+ });
+
+ describe('when search term is empty', () => {
+ it('fetches more branches', async () => {
+ expect(mockAvailableBranchQuery).toHaveBeenCalledTimes(1);
+
+ findInfiniteScroll().vm.$emit('bottomReached');
+ await waitForPromises();
+
+ expect(mockAvailableBranchQuery).toHaveBeenCalledTimes(2);
+ });
+
+ it('calls the query with the correct variables', async () => {
+ findInfiniteScroll().vm.$emit('bottomReached');
+ await waitForPromises();
+
+ expect(mockAvailableBranchQuery).toHaveBeenCalledWith({
+ limit: mockBranchPaginationLimit,
+ offset: mockBranchPaginationLimit, // offset changed
+ projectFullPath: mockProjectFullPath,
+ searchPattern: '*',
+ });
+ });
+ });
+
+ describe('when search term exists', () => {
+ it('does not fetch more branches', async () => {
+ findSearchBox().vm.$emit('input', 'te');
+ await waitForPromises();
+
+ expect(mockAvailableBranchQuery).toHaveBeenCalledTimes(2);
+ mockAvailableBranchQuery.mockClear();
+
+ findInfiniteScroll().vm.$emit('bottomReached');
+ await waitForPromises();
+
+ expect(mockAvailableBranchQuery).not.toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
index 27652bb268b..e1dc08b637f 100644
--- a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
@@ -7,16 +7,10 @@ import { mockCiYml, mockLintResponse } from '../../mock_data';
describe('Pipeline editor header', () => {
let wrapper;
- const mockProvide = {
- glFeatures: {
- pipelineStatusForPipelineEditor: true,
- },
- };
const createComponent = ({ provide = {}, props = {} } = {}) => {
wrapper = shallowMount(PipelineEditorHeader, {
provide: {
- ...mockProvide,
...provide,
},
propsData: {
@@ -56,18 +50,4 @@ describe('Pipeline editor header', () => {
expect(findValidationSegment().exists()).toBe(true);
});
});
-
- describe('with pipeline status feature flag off', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: { pipelineStatusForPipelineEditor: false },
- },
- });
- });
-
- it('does not render the pipeline status', () => {
- expect(findPipelineStatus().exists()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index eba853180cd..5cf8d47bc23 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -20,12 +20,6 @@ describe('Pipeline editor tabs component', () => {
const MockTextEditor = {
template: '<div />',
};
- const mockProvide = {
- glFeatures: {
- ciConfigVisualizationTab: true,
- ciConfigMergedTab: true,
- },
- };
const createComponent = ({
props = {},
@@ -44,7 +38,7 @@ describe('Pipeline editor tabs component', () => {
appStatus,
};
},
- provide: { ...mockProvide, ...provide },
+ provide: { ...provide },
stubs: {
TextEditor: MockTextEditor,
EditorTab,
@@ -82,41 +76,24 @@ describe('Pipeline editor tabs component', () => {
});
describe('visualization tab', () => {
- describe('with feature flag on', () => {
- describe('while loading', () => {
- beforeEach(() => {
- createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
- });
-
- it('displays a loading icon if the lint query is loading', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- expect(findPipelineGraph().exists()).toBe(false);
- });
- });
- describe('after loading', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('display the tab and visualization', () => {
- expect(findVisualizationTab().exists()).toBe(true);
- expect(findPipelineGraph().exists()).toBe(true);
- });
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
});
- });
- describe('with feature flag off', () => {
+ it('displays a loading icon if the lint query is loading', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findPipelineGraph().exists()).toBe(false);
+ });
+ });
+ describe('after loading', () => {
beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: { ciConfigVisualizationTab: false },
- },
- });
+ createComponent();
});
- it('does not display the tab or component', () => {
- expect(findVisualizationTab().exists()).toBe(false);
- expect(findPipelineGraph().exists()).toBe(false);
+ it('display the tab and visualization', () => {
+ expect(findVisualizationTab().exists()).toBe(true);
+ expect(findPipelineGraph().exists()).toBe(true);
});
});
});
@@ -148,51 +125,39 @@ describe('Pipeline editor tabs component', () => {
});
describe('merged tab', () => {
- describe('with feature flag on', () => {
- describe('while loading', () => {
- beforeEach(() => {
- createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
- });
-
- it('displays a loading icon if the lint query is loading', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- });
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
});
- describe('when there is a fetch error', () => {
- beforeEach(() => {
- createComponent({ appStatus: EDITOR_APP_STATUS_ERROR });
- });
-
- it('show an error message', () => {
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts.loadMergedYaml);
- });
+ it('displays a loading icon if the lint query is loading', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
- it('does not render the `meged_preview` component', () => {
- expect(findMergedPreview().exists()).toBe(false);
- });
+ describe('when there is a fetch error', () => {
+ beforeEach(() => {
+ createComponent({ appStatus: EDITOR_APP_STATUS_ERROR });
});
- describe('after loading', () => {
- beforeEach(() => {
- createComponent();
- });
+ it('show an error message', () => {
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts.loadMergedYaml);
+ });
- it('display the tab and the merged preview component', () => {
- expect(findMergedTab().exists()).toBe(true);
- expect(findMergedPreview().exists()).toBe(true);
- });
+ it('does not render the `merged_preview` component', () => {
+ expect(findMergedPreview().exists()).toBe(false);
});
});
- describe('with feature flag off', () => {
+
+ describe('after loading', () => {
beforeEach(() => {
- createComponent({ provide: { glFeatures: { ciConfigMergedTab: false } } });
+ createComponent();
});
- it('does not display the merged tab', () => {
- expect(findMergedTab().exists()).toBe(false);
- expect(findMergedPreview().exists()).toBe(false);
+ it('display the tab and the merged preview component', () => {
+ expect(findMergedTab().exists()).toBe(true);
+ expect(findMergedPreview().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
index b444d9dcfea..76c68e21180 100644
--- a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_empty_state_spec.js
@@ -1,11 +1,13 @@
import { GlButton, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import PipelineEditorFileNav from '~/pipeline_editor/components/file_nav/pipeline_editor_file_nav.vue';
import PipelineEditorEmptyState from '~/pipeline_editor/components/ui/pipeline_editor_empty_state.vue';
describe('Pipeline editor empty state', () => {
let wrapper;
const defaultProvide = {
glFeatures: {
+ pipelineEditorBranchSwitcher: true,
pipelineEditorEmptyStateAction: false,
},
emptyStateIllustrationPath: 'my/svg/path',
@@ -17,6 +19,7 @@ describe('Pipeline editor empty state', () => {
});
};
+ const findFileNav = () => wrapper.findComponent(PipelineEditorFileNav);
const findSvgImage = () => wrapper.find('img');
const findTitle = () => wrapper.find('h1');
const findConfirmButton = () => wrapper.findComponent(GlButton);
@@ -45,6 +48,10 @@ describe('Pipeline editor empty state', () => {
expect(findDescription().html()).toContain(wrapper.vm.$options.i18n.body);
});
+ it('renders the file nav', () => {
+ expect(findFileNav().exists()).toBe(true);
+ });
+
describe('with feature flag off', () => {
it('does not renders a CTA button', () => {
expect(findConfirmButton().exists()).toBe(false);
@@ -75,5 +82,17 @@ describe('Pipeline editor empty state', () => {
await findConfirmButton().vm.$emit('click');
expect(wrapper.emitted(expectedEvent)).toHaveLength(1);
});
+
+ describe('with branch switcher feature flag OFF', () => {
+ it('does not render the file nav', () => {
+ createComponent({
+ provide: {
+ glFeatures: { pipelineEditorBranchSwitcher: false },
+ },
+ });
+
+ expect(findFileNav().exists()).toBe(false);
+ });
+ });
});
});
diff --git a/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js
new file mode 100644
index 00000000000..93ebbc648fe
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/ui/pipeline_editor_messages_spec.js
@@ -0,0 +1,137 @@
+import { GlAlert } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import CodeSnippetAlert from '~/pipeline_editor/components/code_snippet_alert/code_snippet_alert.vue';
+import { CODE_SNIPPET_SOURCES } from '~/pipeline_editor/components/code_snippet_alert/constants';
+import PipelineEditorMessages from '~/pipeline_editor/components/ui/pipeline_editor_messages.vue';
+import {
+ COMMIT_FAILURE,
+ COMMIT_SUCCESS,
+ DEFAULT_FAILURE,
+ DEFAULT_SUCCESS,
+ LOAD_FAILURE_UNKNOWN,
+} from '~/pipeline_editor/constants';
+
+describe('Pipeline Editor messages', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(PipelineEditorMessages, {
+ propsData: props,
+ });
+ };
+
+ const findCodeSnippetAlert = () => wrapper.findComponent(CodeSnippetAlert);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
+ describe('success alert', () => {
+ it('shows a message for successful commit type', () => {
+ createComponent({ successType: COMMIT_SUCCESS, showSuccess: true });
+
+ expect(findAlert().text()).toBe(wrapper.vm.$options.successTexts[COMMIT_SUCCESS]);
+ });
+
+ it('does not show alert when there is a successType but visibility is off', () => {
+ createComponent({ successType: COMMIT_SUCCESS, showSuccess: false });
+
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('shows a success alert with default copy if `showSuccess` is true and the `successType` is not valid,', () => {
+ createComponent({ successType: 'random', showSuccess: true });
+
+ expect(findAlert().text()).toBe(wrapper.vm.$options.successTexts[DEFAULT_SUCCESS]);
+ });
+
+ it('emit `hide-success` event when clicking on the dismiss button', async () => {
+ const expectedEvent = 'hide-success';
+
+ createComponent({ successType: COMMIT_SUCCESS, showSuccess: true });
+ expect(wrapper.emitted(expectedEvent)).not.toBeDefined();
+
+ await findAlert().vm.$emit('dismiss');
+
+ expect(wrapper.emitted(expectedEvent)).toBeDefined();
+ });
+ });
+
+ describe('failure alert', () => {
+ it.each`
+ failureType | message | expectedFailureType
+ ${COMMIT_FAILURE} | ${'failed commit'} | ${COMMIT_FAILURE}
+ ${LOAD_FAILURE_UNKNOWN} | ${'loading failure'} | ${LOAD_FAILURE_UNKNOWN}
+ ${'random'} | ${'error without a specified type'} | ${DEFAULT_FAILURE}
+ `('shows a message for $message', ({ failureType, expectedFailureType }) => {
+ createComponent({ failureType, showFailure: true });
+
+ expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[expectedFailureType]);
+ });
+
+ it('show failure reasons when there are some', () => {
+ const failureReasons = ['There was a problem', 'ouppps'];
+ createComponent({ failureType: COMMIT_FAILURE, failureReasons, showFailure: true });
+
+ expect(wrapper.html()).toContain(failureReasons[0]);
+ expect(wrapper.html()).toContain(failureReasons[1]);
+ });
+
+ it('does not show a message for error with a disabled visibility', () => {
+ createComponent({ failureType: 'random', showFailure: false });
+
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('emit `hide-failure` event when clicking on the dismiss button', async () => {
+ const expectedEvent = 'hide-failure';
+
+ createComponent({ failureType: COMMIT_FAILURE, showFailure: true });
+ expect(wrapper.emitted(expectedEvent)).not.toBeDefined();
+
+ await findAlert().vm.$emit('dismiss');
+
+ expect(wrapper.emitted(expectedEvent)).toBeDefined();
+ });
+ });
+
+ describe('code snippet alert', () => {
+ const setCodeSnippetUrlParam = (value) => {
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}/?code_snippet_copied_from=${value}`,
+ });
+ };
+
+ it('does not show by default', () => {
+ createComponent();
+
+ expect(findCodeSnippetAlert().exists()).toBe(false);
+ });
+
+ it.each(CODE_SNIPPET_SOURCES)('shows if URL param is %s, and cleans up URL', (source) => {
+ jest.spyOn(window.history, 'replaceState');
+ setCodeSnippetUrlParam(source);
+ createComponent();
+
+ expect(findCodeSnippetAlert().exists()).toBe(true);
+ expect(window.history.replaceState).toHaveBeenCalledWith({}, document.title, `${TEST_HOST}/`);
+ });
+
+ it('does not show if URL param is invalid', () => {
+ setCodeSnippetUrlParam('foo_bar');
+ createComponent();
+
+ expect(findCodeSnippetAlert().exists()).toBe(false);
+ });
+
+ it('disappears on dismiss', async () => {
+ setCodeSnippetUrlParam('api_fuzzing');
+ createComponent();
+ const alert = findCodeSnippetAlert();
+
+ expect(alert.exists()).toBe(true);
+
+ await alert.vm.$emit('dismiss');
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap b/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap
index 8670c44f6f6..ee5a3cb288f 100644
--- a/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap
+++ b/spec/frontend/pipeline_editor/graphql/__snapshots__/resolvers_spec.js.snap
@@ -17,7 +17,7 @@ Object {
"environment": "prd",
"except": Object {
"refs": Array [
- "master@gitlab-org/gitlab",
+ "main@gitlab-org/gitlab",
"/^release/.*$/@gitlab-org/gitlab",
],
},
@@ -44,7 +44,7 @@ Object {
"environment": "stg",
"except": Object {
"refs": Array [
- "master@gitlab-org/gitlab",
+ "main@gitlab-org/gitlab",
"/^release/.*$/@gitlab-org/gitlab",
],
},
diff --git a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
index f0932fc55d3..d39c0d80296 100644
--- a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
+++ b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
@@ -9,7 +9,6 @@ import {
mockDefaultBranch,
mockLintResponse,
mockProjectFullPath,
- mockProjectBranches,
} from '../mock_data';
jest.mock('~/api', () => {
@@ -47,23 +46,6 @@ describe('~/pipeline_editor/graphql/resolvers', () => {
await expect(result.rawData).resolves.toBe(mockCiYml);
});
});
-
- describe('project', () => {
- it('resolves project data with type names', async () => {
- const result = await resolvers.Query.project();
-
- // eslint-disable-next-line no-underscore-dangle
- expect(result.__typename).toBe('Project');
- });
-
- it('resolves project with available list of branches', async () => {
- const result = await resolvers.Query.project();
-
- expect(result.repository.branches).toHaveLength(
- mockProjectBranches.repository.branches.length,
- );
- });
- });
});
describe('Mutation', () => {
diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js
index 7f651a42231..e08fce3ceb9 100644
--- a/spec/frontend/pipeline_editor/mock_data.js
+++ b/spec/frontend/pipeline_editor/mock_data.js
@@ -4,7 +4,7 @@ import { unwrapStagesWithNeeds } from '~/pipelines/components/unwrapping_utils';
export const mockProjectNamespace = 'user1';
export const mockProjectPath = 'project1';
export const mockProjectFullPath = `${mockProjectNamespace}/${mockProjectPath}`;
-export const mockDefaultBranch = 'master';
+export const mockDefaultBranch = 'main';
export const mockNewMergeRequestPath = '/-/merge_requests/new';
export const mockCommitSha = 'aabbccdd';
export const mockCommitNextSha = 'eeffgghh';
@@ -139,19 +139,54 @@ export const mergeUnwrappedCiConfig = (mergedConfig) => {
};
export const mockProjectBranches = {
- __typename: 'Project',
- repository: {
- __typename: 'Repository',
- branches: [
- { __typename: 'Branch', name: 'master' },
- { __typename: 'Branch', name: 'main' },
- { __typename: 'Branch', name: 'develop' },
- { __typename: 'Branch', name: 'production' },
- { __typename: 'Branch', name: 'test' },
- ],
+ data: {
+ project: {
+ repository: {
+ branchNames: [
+ 'main',
+ 'develop',
+ 'production',
+ 'test',
+ 'better-feature',
+ 'feature-abc',
+ 'update-ci',
+ 'mock-feature',
+ 'test-merge-request',
+ 'staging',
+ ],
+ },
+ },
},
};
+export const mockTotalBranchResults =
+ mockProjectBranches.data.project.repository.branchNames.length;
+
+export const mockSearchBranches = {
+ data: {
+ project: {
+ repository: {
+ branchNames: ['test', 'better-feature', 'update-ci', 'test-merge-request'],
+ },
+ },
+ },
+};
+
+export const mockTotalSearchResults = mockSearchBranches.data.project.repository.branchNames.length;
+
+export const mockEmptySearchBranches = {
+ data: {
+ project: {
+ repository: {
+ branchNames: [],
+ },
+ },
+ },
+};
+
+export const mockBranchPaginationLimit = 10;
+export const mockTotalBranches = 20; // must be greater than mockBranchPaginationLimit to test pagination
+
export const mockProjectPipeline = {
pipeline: {
commitPath: '/-/commit/aabbccdd',
@@ -186,7 +221,7 @@ export const mockLintResponse = {
when: 'on_success',
allow_failure: false,
only: null,
- except: { refs: ['master@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
+ except: { refs: ['main@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
},
{
name: 'job_2',
@@ -199,7 +234,7 @@ export const mockLintResponse = {
when: 'on_success',
allow_failure: true,
only: { refs: ['web', 'chat', 'pushes'] },
- except: { refs: ['master@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
+ except: { refs: ['main@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
},
],
};
@@ -242,7 +277,7 @@ export const mockJobs = [
when: 'on_success',
allowFailure: false,
only: { refs: ['branches@gitlab-org/gitlab'] },
- except: { refs: ['master@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
+ except: { refs: ['main@gitlab-org/gitlab', '/^release/.*$/@gitlab-org/gitlab'] },
},
];
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index d8e3436479c..c88fe159c0d 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -2,17 +2,15 @@ import { GlAlert, GlButton, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import httpStatusCodes from '~/lib/utils/http_status';
-import CodeSnippetAlert from '~/pipeline_editor/components/code_snippet_alert/code_snippet_alert.vue';
-import { CODE_SNIPPET_SOURCES } from '~/pipeline_editor/components/code_snippet_alert/constants';
import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import PipelineEditorEmptyState from '~/pipeline_editor/components/ui/pipeline_editor_empty_state.vue';
-import { COMMIT_SUCCESS, COMMIT_FAILURE, LOAD_FAILURE_UNKNOWN } from '~/pipeline_editor/constants';
+import PipelineEditorMessages from '~/pipeline_editor/components/ui/pipeline_editor_messages.vue';
+import { COMMIT_SUCCESS, COMMIT_FAILURE } from '~/pipeline_editor/constants';
import getCiConfigData from '~/pipeline_editor/graphql/queries/ci_config.graphql';
import PipelineEditorApp from '~/pipeline_editor/pipeline_editor_app.vue';
import PipelineEditorHome from '~/pipeline_editor/pipeline_editor_home.vue';
@@ -56,6 +54,7 @@ describe('Pipeline editor app component', () => {
CommitForm,
PipelineEditorHome,
PipelineEditorTabs,
+ PipelineEditorMessages,
EditorLite: MockEditorLite,
PipelineEditorEmptyState,
},
@@ -92,6 +91,11 @@ describe('Pipeline editor app component', () => {
const options = {
localVue,
+ data() {
+ return {
+ currentBranch: mockDefaultBranch,
+ };
+ },
mocks: {},
apolloProvider: mockApollo,
};
@@ -108,7 +112,6 @@ describe('Pipeline editor app component', () => {
const findEmptyState = () => wrapper.findComponent(PipelineEditorEmptyState);
const findEmptyStateButton = () =>
wrapper.findComponent(PipelineEditorEmptyState).findComponent(GlButton);
- const findCodeSnippetAlert = () => wrapper.findComponent(CodeSnippetAlert);
beforeEach(() => {
mockBlobContentData = jest.fn();
@@ -116,9 +119,6 @@ describe('Pipeline editor app component', () => {
});
afterEach(() => {
- mockBlobContentData.mockReset();
- mockCiConfigData.mockReset();
-
wrapper.destroy();
});
@@ -131,48 +131,6 @@ describe('Pipeline editor app component', () => {
});
});
- describe('code snippet alert', () => {
- const setCodeSnippetUrlParam = (value) => {
- global.jsdom.reconfigure({
- url: `${TEST_HOST}/?code_snippet_copied_from=${value}`,
- });
- };
-
- it('does not show by default', () => {
- createComponent();
-
- expect(findCodeSnippetAlert().exists()).toBe(false);
- });
-
- it.each(CODE_SNIPPET_SOURCES)('shows if URL param is %s, and cleans up URL', (source) => {
- jest.spyOn(window.history, 'replaceState');
- setCodeSnippetUrlParam(source);
- createComponent();
-
- expect(findCodeSnippetAlert().exists()).toBe(true);
- expect(window.history.replaceState).toHaveBeenCalledWith({}, document.title, `${TEST_HOST}/`);
- });
-
- it('does not show if URL param is invalid', () => {
- setCodeSnippetUrlParam('foo_bar');
- createComponent();
-
- expect(findCodeSnippetAlert().exists()).toBe(false);
- });
-
- it('disappears on dismiss', async () => {
- setCodeSnippetUrlParam('api_fuzzing');
- createComponent();
- const alert = findCodeSnippetAlert();
-
- expect(alert.exists()).toBe(true);
-
- await alert.vm.$emit('dismiss');
-
- expect(alert.exists()).toBe(false);
- });
- });
-
describe('when queries are called', () => {
beforeEach(() => {
mockBlobContentData.mockResolvedValue(mockCiYml);
@@ -233,11 +191,14 @@ describe('Pipeline editor app component', () => {
describe('because of a fetching error', () => {
it('shows a unkown error message', async () => {
+ const loadUnknownFailureText = 'The CI configuration was not loaded, please try again.';
+
mockBlobContentData.mockRejectedValueOnce(new Error('My error!'));
await createComponentWithApollo();
expect(findEmptyState().exists()).toBe(false);
- expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[LOAD_FAILURE_UNKNOWN]);
+
+ expect(findAlert().text()).toBe(loadUnknownFailureText);
expect(findEditorHome().exists()).toBe(true);
});
});
@@ -271,6 +232,7 @@ describe('Pipeline editor app component', () => {
describe('when the user commits', () => {
const updateFailureMessage = 'The GitLab CI configuration could not be updated.';
+ const updateSuccessMessage = 'Your changes have been successfully committed.';
describe('and the commit mutation succeeds', () => {
beforeEach(() => {
@@ -281,7 +243,7 @@ describe('Pipeline editor app component', () => {
});
it('shows a confirmation message', () => {
- expect(findAlert().text()).toBe(wrapper.vm.$options.successTexts[COMMIT_SUCCESS]);
+ expect(findAlert().text()).toBe(updateSuccessMessage);
});
it('scrolls to the top of the page to bring attention to the confirmation message', () => {
@@ -337,4 +299,37 @@ describe('Pipeline editor app component', () => {
});
});
});
+
+ describe('when refetching content', () => {
+ it('refetches blob content', async () => {
+ await createComponentWithApollo();
+ jest
+ .spyOn(wrapper.vm.$apollo.queries.initialCiFileContent, 'refetch')
+ .mockImplementation(jest.fn());
+
+ expect(wrapper.vm.$apollo.queries.initialCiFileContent.refetch).toHaveBeenCalledTimes(0);
+
+ await wrapper.vm.refetchContent();
+
+ expect(wrapper.vm.$apollo.queries.initialCiFileContent.refetch).toHaveBeenCalledTimes(1);
+ });
+
+ it('hides start screen when refetch fetches CI file', async () => {
+ mockBlobContentData.mockRejectedValue({
+ response: {
+ status: httpStatusCodes.NOT_FOUND,
+ },
+ });
+ await createComponentWithApollo();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEditorHome().exists()).toBe(false);
+
+ mockBlobContentData.mockResolvedValue(mockCiYml);
+ await wrapper.vm.$apollo.queries.initialCiFileContent.refetch();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findEditorHome().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index a1e3d24acfa..7aba336b8e8 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import CommitSection from '~/pipeline_editor/components/commit/commit_section.vue';
+import PipelineEditorDrawer from '~/pipeline_editor/components/drawer/pipeline_editor_drawer.vue';
import PipelineEditorFileNav from '~/pipeline_editor/components/file_nav/pipeline_editor_file_nav.vue';
import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
@@ -13,7 +14,7 @@ import { mockLintResponse, mockCiYml } from './mock_data';
describe('Pipeline editor home wrapper', () => {
let wrapper;
- const createComponent = ({ props = {} } = {}) => {
+ const createComponent = ({ props = {}, glFeatures = {} } = {}) => {
wrapper = shallowMount(PipelineEditorHome, {
propsData: {
ciConfigData: mockLintResponse,
@@ -22,13 +23,20 @@ describe('Pipeline editor home wrapper', () => {
isNewCiConfigFile: false,
...props,
},
+ provide: {
+ glFeatures: {
+ pipelineEditorDrawer: true,
+ ...glFeatures,
+ },
+ },
});
};
- const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorHeader);
- const findPipelineEditorTabs = () => wrapper.findComponent(PipelineEditorTabs);
const findCommitSection = () => wrapper.findComponent(CommitSection);
const findFileNav = () => wrapper.findComponent(PipelineEditorFileNav);
+ const findPipelineEditorDrawer = () => wrapper.findComponent(PipelineEditorDrawer);
+ const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorHeader);
+ const findPipelineEditorTabs = () => wrapper.findComponent(PipelineEditorTabs);
afterEach(() => {
wrapper.destroy();
@@ -55,6 +63,10 @@ describe('Pipeline editor home wrapper', () => {
it('shows the commit section by default', () => {
expect(findCommitSection().exists()).toBe(true);
});
+
+ it('show the pipeline drawer', () => {
+ expect(findPipelineEditorDrawer().exists()).toBe(true);
+ });
});
describe('commit form toggle', () => {
@@ -82,4 +94,12 @@ describe('Pipeline editor home wrapper', () => {
expect(findCommitSection().exists()).toBe(true);
});
});
+
+ describe('Pipeline drawer', () => {
+ it('hides the drawer when the feature flag is off', () => {
+ createComponent({ glFeatures: { pipelineEditorDrawer: false } });
+
+ expect(findPipelineEditorDrawer().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
index 7ec5818010a..2a3f4f56f36 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -1,13 +1,22 @@
import { GlForm, GlSprintf, GlLoadingIcon } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import CreditCardValidationRequiredAlert from 'ee_component/billings/components/cc_validation_required_alert.vue';
+import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility';
import PipelineNewForm from '~/pipeline_new/components/pipeline_new_form.vue';
import RefsDropdown from '~/pipeline_new/components/refs_dropdown.vue';
-import { mockQueryParams, mockPostParams, mockProjectId, mockError, mockRefs } from '../mock_data';
+import {
+ mockQueryParams,
+ mockPostParams,
+ mockProjectId,
+ mockError,
+ mockRefs,
+ mockCreditCardValidationRequiredError,
+} from '../mock_data';
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
@@ -17,7 +26,7 @@ const projectRefsEndpoint = '/root/project/refs';
const pipelinesPath = '/root/project/-/pipelines';
const configVariablesPath = '/root/project/-/pipelines/config_variables';
const newPipelinePostResponse = { id: 1 };
-const defaultBranch = 'master';
+const defaultBranch = 'main';
describe('Pipeline New Form', () => {
let wrapper;
@@ -187,13 +196,13 @@ describe('Pipeline New Form', () => {
await waitForPromises();
});
it('variables persist between ref changes', async () => {
- selectBranch('master');
+ selectBranch('main');
await waitForPromises();
- const masterInput = findKeyInputs().at(0);
- masterInput.element.value = 'build_var';
- masterInput.trigger('change');
+ const mainInput = findKeyInputs().at(0);
+ mainInput.element.value = 'build_var';
+ mainInput.trigger('change');
await wrapper.vm.$nextTick();
@@ -207,7 +216,7 @@ describe('Pipeline New Form', () => {
await wrapper.vm.$nextTick();
- selectBranch('master');
+ selectBranch('main');
await waitForPromises();
@@ -376,6 +385,32 @@ describe('Pipeline New Form', () => {
it('re-enables the submit button', () => {
expect(findSubmitButton().props('disabled')).toBe(false);
});
+
+ it('does not show the credit card validation required alert', () => {
+ expect(wrapper.findComponent(CreditCardValidationRequiredAlert).exists()).toBe(false);
+ });
+
+ describe('when the error response is credit card validation required', () => {
+ beforeEach(async () => {
+ mock
+ .onPost(pipelinesPath)
+ .reply(httpStatusCodes.BAD_REQUEST, mockCreditCardValidationRequiredError);
+
+ window.gon = {
+ subscriptions_url: TEST_HOST,
+ payment_form_url: TEST_HOST,
+ };
+
+ findForm().vm.$emit('submit', dummySubmitEvent);
+
+ await waitForPromises();
+ });
+
+ it('shows credit card validation required alert', () => {
+ expect(findErrorAlert().exists()).toBe(false);
+ expect(wrapper.findComponent(CreditCardValidationRequiredAlert).exists()).toBe(true);
+ });
+ });
});
describe('when the error response cannot be handled', () => {
diff --git a/spec/frontend/pipeline_new/components/refs_dropdown_spec.js b/spec/frontend/pipeline_new/components/refs_dropdown_spec.js
index 8dafbf230f9..826f2826d3c 100644
--- a/spec/frontend/pipeline_new/components/refs_dropdown_spec.js
+++ b/spec/frontend/pipeline_new/components/refs_dropdown_spec.js
@@ -10,8 +10,8 @@ import RefsDropdown from '~/pipeline_new/components/refs_dropdown.vue';
import { mockRefs, mockFilteredRefs } from '../mock_data';
const projectRefsEndpoint = '/root/project/refs';
-const refShortName = 'master';
-const refFullName = 'refs/heads/master';
+const refShortName = 'main';
+const refFullName = 'refs/heads/main';
jest.mock('~/flash');
diff --git a/spec/frontend/pipeline_new/mock_data.js b/spec/frontend/pipeline_new/mock_data.js
index 4fb58cb8e62..e99684ff417 100644
--- a/spec/frontend/pipeline_new/mock_data.js
+++ b/spec/frontend/pipeline_new/mock_data.js
@@ -1,5 +1,5 @@
export const mockRefs = {
- Branches: ['master', 'branch-1', 'branch-2'],
+ Branches: ['main', 'branch-1', 'branch-2'],
Tags: ['1.0.0', '1.1.0', '1.2.0'],
};
@@ -40,6 +40,28 @@ export const mockError = {
total_warnings: 7,
};
-export const mockBranchRefs = ['master', 'dev', 'release'];
+export const mockCreditCardValidationRequiredError = {
+ errors: ['Credit card required to be on file in order to create a pipeline'],
+ warnings: [],
+ total_warnings: 0,
+};
+
+export const mockBranchRefs = ['main', 'dev', 'release'];
export const mockTagRefs = ['1.0.0', '1.1.0', '1.2.0'];
+
+export const mockVariables = [
+ {
+ uniqueId: 'var-refs/heads/main2',
+ variable_type: 'env_var',
+ key: 'var_without_value',
+ value: '',
+ },
+ {
+ uniqueId: 'var-refs/heads/main3',
+ variable_type: 'env_var',
+ key: 'var_with_value',
+ value: 'test_value',
+ },
+ { uniqueId: 'var-refs/heads/main4', variable_type: 'env_var', key: '', value: '' },
+];
diff --git a/spec/frontend/pipeline_new/utils/filter_variables_spec.js b/spec/frontend/pipeline_new/utils/filter_variables_spec.js
new file mode 100644
index 00000000000..42bc6244456
--- /dev/null
+++ b/spec/frontend/pipeline_new/utils/filter_variables_spec.js
@@ -0,0 +1,21 @@
+import filterVariables from '~/pipeline_new/utils/filter_variables';
+import { mockVariables } from '../mock_data';
+
+describe('Filter variables utility function', () => {
+ it('filters variables that do not contain a key', () => {
+ const expectedVaraibles = [
+ {
+ variable_type: 'env_var',
+ key: 'var_without_value',
+ secret_value: '',
+ },
+ {
+ variable_type: 'env_var',
+ key: 'var_with_value',
+ secret_value: 'test_value',
+ },
+ ];
+
+ expect(filterVariables(mockVariables)).toEqual(expectedVaraibles);
+ });
+});
diff --git a/spec/frontend/pipeline_new/utils/format_refs_spec.js b/spec/frontend/pipeline_new/utils/format_refs_spec.js
index 405a747c3ba..71190f55c16 100644
--- a/spec/frontend/pipeline_new/utils/format_refs_spec.js
+++ b/spec/frontend/pipeline_new/utils/format_refs_spec.js
@@ -5,7 +5,7 @@ import { mockBranchRefs, mockTagRefs } from '../mock_data';
describe('Format refs util', () => {
it('formats branch ref correctly', () => {
expect(formatRefs(mockBranchRefs, BRANCH_REF_TYPE)).toEqual([
- { fullName: 'refs/heads/master', shortName: 'master' },
+ { fullName: 'refs/heads/main', shortName: 'main' },
{ fullName: 'refs/heads/dev', shortName: 'dev' },
{ fullName: 'refs/heads/release', shortName: 'release' },
]);
diff --git a/spec/frontend/pipelines/__snapshots__/parsing_utils_spec.js.snap b/spec/frontend/pipelines/__snapshots__/parsing_utils_spec.js.snap
new file mode 100644
index 00000000000..60625d301c0
--- /dev/null
+++ b/spec/frontend/pipelines/__snapshots__/parsing_utils_spec.js.snap
@@ -0,0 +1,373 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`DAG visualization parsing utilities generateColumnsFromLayersList matches the snapshot 1`] = `
+Array [
+ Object {
+ "groups": Array [
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1482/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1482",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ ],
+ "name": "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
+ "size": 1,
+ "stageName": "build",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "build_b",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1515/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1515",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ ],
+ "name": "build_b",
+ "size": 1,
+ "stageName": "build",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "build_c",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1484/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1484",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ ],
+ "name": "build_c",
+ "size": 1,
+ "stageName": "build",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "build_d 1/3",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1485/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1485",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiJob",
+ "name": "build_d 2/3",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1486/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1486",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiJob",
+ "name": "build_d 3/3",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1487/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1487",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ ],
+ "name": "build_d",
+ "size": 3,
+ "stageName": "build",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "test_c",
+ "needs": Array [],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": null,
+ "detailsPath": "/root/kinder-pipe/-/pipelines/154",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": null,
+ },
+ },
+ ],
+ "name": "test_c",
+ "size": 1,
+ "stageName": "test",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": null,
+ },
+ },
+ ],
+ "id": "layer-0",
+ "name": "",
+ "status": Object {
+ "action": null,
+ },
+ },
+ Object {
+ "groups": Array [
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "test_a",
+ "needs": Array [
+ "build_c",
+ "build_b",
+ "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
+ ],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1514/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1514",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ ],
+ "name": "test_a",
+ "size": 1,
+ "stageName": "test",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "test_b 1/2",
+ "needs": Array [
+ "build_d 3/3",
+ "build_d 2/3",
+ "build_d 1/3",
+ "build_b",
+ "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
+ ],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1489/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1489",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiJob",
+ "name": "test_b 2/2",
+ "needs": Array [
+ "build_d 3/3",
+ "build_d 2/3",
+ "build_d 1/3",
+ "build_b",
+ "build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl",
+ ],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": Object {
+ "__typename": "StatusAction",
+ "buttonTitle": "Retry this job",
+ "icon": "retry",
+ "path": "/root/abcd-dag/-/jobs/1490/retry",
+ "title": "Retry",
+ },
+ "detailsPath": "/root/abcd-dag/-/jobs/1490",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": "passed",
+ },
+ },
+ ],
+ "name": "test_b",
+ "size": 2,
+ "stageName": "test",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": "passed",
+ },
+ },
+ Object {
+ "__typename": "CiGroup",
+ "jobs": Array [
+ Object {
+ "__typename": "CiJob",
+ "name": "test_d",
+ "needs": Array [
+ "build_b",
+ ],
+ "scheduledAt": null,
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "action": null,
+ "detailsPath": "/root/abcd-dag/-/pipelines/153",
+ "group": "success",
+ "hasDetails": true,
+ "icon": "status_success",
+ "tooltip": null,
+ },
+ },
+ ],
+ "name": "test_d",
+ "size": 1,
+ "stageName": "test",
+ "status": Object {
+ "__typename": "DetailedStatus",
+ "group": "success",
+ "icon": "status_success",
+ "label": null,
+ },
+ },
+ ],
+ "id": "layer-1",
+ "name": "",
+ "status": Object {
+ "action": null,
+ },
+ },
+]
+`;
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index e43aa2a02f5..b0dbba37b94 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -4,6 +4,7 @@ import MockAdapter from 'axios-mock-adapter';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import PipelinesFilteredSearch from '~/pipelines/components/pipelines_list/pipelines_filtered_search.vue';
+import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { users, mockSearch, branches, tags } from '../mock_data';
describe('Pipelines filtered search', () => {
@@ -57,7 +58,7 @@ describe('Pipelines filtered search', () => {
title: 'Trigger author',
unique: true,
projectId: '21',
- operators: [expect.objectContaining({ value: '=' })],
+ operators: OPERATOR_IS_ONLY,
});
expect(findBranchToken()).toMatchObject({
@@ -66,7 +67,7 @@ describe('Pipelines filtered search', () => {
title: 'Branch name',
unique: true,
projectId: '21',
- operators: [expect.objectContaining({ value: '=' })],
+ operators: OPERATOR_IS_ONLY,
});
expect(findStatusToken()).toMatchObject({
@@ -74,7 +75,7 @@ describe('Pipelines filtered search', () => {
icon: 'status',
title: 'Status',
unique: true,
- operators: [expect.objectContaining({ value: '=' })],
+ operators: OPERATOR_IS_ONLY,
});
expect(findTagToken()).toMatchObject({
@@ -82,7 +83,7 @@ describe('Pipelines filtered search', () => {
icon: 'tag',
title: 'Tag name',
unique: true,
- operators: [expect.objectContaining({ value: '=' })],
+ operators: OPERATOR_IS_ONLY,
});
});
@@ -138,7 +139,7 @@ describe('Pipelines filtered search', () => {
describe('Url query params', () => {
const params = {
username: 'deja.green',
- ref: 'master',
+ ref: 'main',
};
beforeEach(() => {
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index e8fb036368a..30914ba99a5 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -22,6 +22,7 @@ describe('graph component', () => {
const defaultProps = {
pipeline: generateResponse(mockPipelineResponse, 'root/fungi-xoxo'),
+ showLinks: false,
viewType: STAGE_VIEW,
configPaths: {
metricsPath: '',
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index 8c469966be4..4914a9a1ced 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -15,8 +15,10 @@ import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import * as parsingUtils from '~/pipelines/components/parsing_utils';
-import { mockPipelineResponse } from './mock_data';
+import getUserCallouts from '~/pipelines/graphql/queries/get_user_callouts.query.graphql';
+import { mapCallouts, mockCalloutsResponse, mockPipelineResponse } from './mock_data';
const defaultProvide = {
graphqlResourceEtag: 'frog/amphibirama/etag/',
@@ -30,13 +32,16 @@ describe('Pipeline graph wrapper', () => {
useLocalStorageSpy();
let wrapper;
- const getAlert = () => wrapper.find(GlAlert);
- const getLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const getAlert = () => wrapper.findComponent(GlAlert);
+ const getDependenciesToggle = () => wrapper.find('[data-testid="show-links-toggle"]');
+ const getLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const getLinksLayer = () => wrapper.findComponent(LinksLayer);
const getGraph = () => wrapper.find(PipelineGraph);
const getStageColumnTitle = () => wrapper.find('[data-testid="stage-column-title"]');
const getAllStageColumnGroupsInColumn = () =>
wrapper.find(StageColumnComponent).findAll('[data-testid="stage-column-group"]');
const getViewSelector = () => wrapper.find(GraphViewSelector);
+ const getViewSelectorTrip = () => getViewSelector().findComponent(GlAlert);
const createComponent = ({
apolloProvider,
@@ -59,14 +64,22 @@ describe('Pipeline graph wrapper', () => {
};
const createComponentWithApollo = ({
+ calloutsList = [],
+ data = {},
getPipelineDetailsHandler = jest.fn().mockResolvedValue(mockPipelineResponse),
mountFn = shallowMount,
provide = {},
} = {}) => {
- const requestHandlers = [[getPipelineDetails, getPipelineDetailsHandler]];
+ const callouts = mapCallouts(calloutsList);
+ const getUserCalloutsHandler = jest.fn().mockResolvedValue(mockCalloutsResponse(callouts));
+
+ const requestHandlers = [
+ [getPipelineDetails, getPipelineDetailsHandler],
+ [getUserCallouts, getUserCalloutsHandler],
+ ];
const apolloProvider = createMockApollo(requestHandlers);
- createComponent({ apolloProvider, provide, mountFn });
+ createComponent({ apolloProvider, data, provide, mountFn });
};
afterEach(() => {
@@ -74,6 +87,15 @@ describe('Pipeline graph wrapper', () => {
wrapper = null;
});
+ beforeAll(() => {
+ jest.useFakeTimers();
+ });
+
+ afterAll(() => {
+ jest.runOnlyPendingTimers();
+ jest.useRealTimers();
+ });
+
describe('when data is loading', () => {
it('displays the loading icon', () => {
createComponentWithApollo();
@@ -282,6 +304,87 @@ describe('Pipeline graph wrapper', () => {
});
});
+ describe('when pipelineGraphLayersView feature flag is on and layers view is selected', () => {
+ beforeEach(async () => {
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ data: {
+ currentViewType: LAYER_VIEW,
+ },
+ mountFn: mount,
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('sets showLinks to true', async () => {
+ /* This spec uses .props for performance reasons. */
+ expect(getLinksLayer().exists()).toBe(true);
+ expect(getLinksLayer().props('showLinks')).toBe(false);
+ expect(getViewSelector().props('type')).toBe(LAYER_VIEW);
+ await getDependenciesToggle().trigger('click');
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ expect(wrapper.findComponent(LinksLayer).props('showLinks')).toBe(true);
+ });
+ });
+
+ describe('when pipelineGraphLayersView feature flag is on, layers view is selected, and links are active', () => {
+ beforeEach(async () => {
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ data: {
+ currentViewType: LAYER_VIEW,
+ showLinks: true,
+ },
+ mountFn: mount,
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('shows the hover tip in the view selector', async () => {
+ await getViewSelector().setData({ showLinksActive: true });
+ expect(getViewSelectorTrip().exists()).toBe(true);
+ });
+ });
+
+ describe('when hover tip would otherwise show, but it has been previously dismissed', () => {
+ beforeEach(async () => {
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ data: {
+ currentViewType: LAYER_VIEW,
+ showLinks: true,
+ },
+ mountFn: mount,
+ calloutsList: ['pipeline_needs_hover_tip'.toUpperCase()],
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('does not show the hover tip', async () => {
+ await getViewSelector().setData({ showLinksActive: true });
+ expect(getViewSelectorTrip().exists()).toBe(false);
+ });
+ });
+
describe('when feature flag is on and local storage is set', () => {
beforeEach(async () => {
localStorage.setItem(VIEW_TYPE_KEY, LAYER_VIEW);
@@ -299,10 +402,45 @@ describe('Pipeline graph wrapper', () => {
await wrapper.vm.$nextTick();
});
+ afterEach(() => {
+ localStorage.clear();
+ });
+
it('reads the view type from localStorage when available', () => {
- expect(wrapper.find('[data-testid="pipeline-view-selector"] code').text()).toContain(
- 'needs:',
- );
+ const viewSelectorNeedsSegment = wrapper
+ .findAll('[data-testid="pipeline-view-selector"] > label')
+ .at(1);
+ expect(viewSelectorNeedsSegment.classes()).toContain('active');
+ });
+ });
+
+ describe('when feature flag is on and local storage is set, but the graph does not use needs', () => {
+ beforeEach(async () => {
+ const nonNeedsResponse = { ...mockPipelineResponse };
+ nonNeedsResponse.data.project.pipeline.usesNeeds = false;
+
+ localStorage.setItem(VIEW_TYPE_KEY, LAYER_VIEW);
+
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ mountFn: mount,
+ getPipelineDetailsHandler: jest.fn().mockResolvedValue(nonNeedsResponse),
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ localStorage.clear();
+ });
+
+ it('still passes stage type to graph', () => {
+ expect(getGraph().props('viewType')).toBe(STAGE_VIEW);
});
});
diff --git a/spec/frontend/pipelines/graph/graph_view_selector_spec.js b/spec/frontend/pipelines/graph/graph_view_selector_spec.js
new file mode 100644
index 00000000000..5b2a29de443
--- /dev/null
+++ b/spec/frontend/pipelines/graph/graph_view_selector_spec.js
@@ -0,0 +1,189 @@
+import { GlAlert, GlLoadingIcon, GlSegmentedControl } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import { LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants';
+import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
+
+describe('the graph view selector component', () => {
+ let wrapper;
+
+ const findDependenciesToggle = () => wrapper.find('[data-testid="show-links-toggle"]');
+ const findViewTypeSelector = () => wrapper.findComponent(GlSegmentedControl);
+ const findStageViewLabel = () => findViewTypeSelector().findAll('label').at(0);
+ const findLayersViewLabel = () => findViewTypeSelector().findAll('label').at(1);
+ const findSwitcherLoader = () => wrapper.find('[data-testid="switcher-loading-state"]');
+ const findToggleLoader = () => findDependenciesToggle().find(GlLoadingIcon);
+ const findHoverTip = () => wrapper.findComponent(GlAlert);
+
+ const defaultProps = {
+ showLinks: false,
+ tipPreviouslyDismissed: false,
+ type: STAGE_VIEW,
+ };
+
+ const defaultData = {
+ hoverTipDismissed: false,
+ isToggleLoading: false,
+ isSwitcherLoading: false,
+ showLinksActive: false,
+ };
+
+ const createComponent = ({ data = {}, mountFn = shallowMount, props = {} } = {}) => {
+ wrapper = mountFn(GraphViewSelector, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ data() {
+ return {
+ ...defaultData,
+ ...data,
+ };
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when showing stage view', () => {
+ beforeEach(() => {
+ createComponent({ mountFn: mount });
+ });
+
+ it('shows the Stage view label as active in the selector', () => {
+ expect(findStageViewLabel().classes()).toContain('active');
+ });
+
+ it('does not show the Job dependencies (links) toggle', () => {
+ expect(findDependenciesToggle().exists()).toBe(false);
+ });
+ });
+
+ describe('when showing Job dependencies view', () => {
+ beforeEach(() => {
+ createComponent({
+ mountFn: mount,
+ props: {
+ type: LAYER_VIEW,
+ },
+ });
+ });
+
+ it('shows the Job dependencies view label as active in the selector', () => {
+ expect(findLayersViewLabel().classes()).toContain('active');
+ });
+
+ it('shows the Job dependencies (links) toggle', () => {
+ expect(findDependenciesToggle().exists()).toBe(true);
+ });
+ });
+
+ describe('events', () => {
+ beforeEach(() => {
+ jest.useFakeTimers();
+ createComponent({
+ mountFn: mount,
+ props: {
+ type: LAYER_VIEW,
+ },
+ });
+ });
+
+ it('shows loading state and emits updateViewType when view type toggled', async () => {
+ expect(wrapper.emitted().updateViewType).toBeUndefined();
+ expect(findSwitcherLoader().exists()).toBe(false);
+
+ await findStageViewLabel().trigger('click');
+ /*
+ Loading happens before the event is emitted or timers are run.
+ Then we run the timer because the event is emitted in setInterval
+ which is what gives the loader a chace to show up.
+ */
+ expect(findSwitcherLoader().exists()).toBe(true);
+ jest.runOnlyPendingTimers();
+
+ expect(wrapper.emitted().updateViewType).toHaveLength(1);
+ expect(wrapper.emitted().updateViewType).toEqual([[STAGE_VIEW]]);
+ });
+
+ it('shows loading state and emits updateShowLinks when show links toggle is clicked', async () => {
+ expect(wrapper.emitted().updateShowLinksState).toBeUndefined();
+ expect(findToggleLoader().exists()).toBe(false);
+
+ await findDependenciesToggle().trigger('click');
+ /*
+ Loading happens before the event is emitted or timers are run.
+ Then we run the timer because the event is emitted in setInterval
+ which is what gives the loader a chace to show up.
+ */
+ expect(findToggleLoader().exists()).toBe(true);
+ jest.runOnlyPendingTimers();
+
+ expect(wrapper.emitted().updateShowLinksState).toHaveLength(1);
+ expect(wrapper.emitted().updateShowLinksState).toEqual([[true]]);
+ });
+ });
+
+ describe('hover tip callout', () => {
+ describe('when links are live and it has not been previously dismissed', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ showLinks: true,
+ },
+ data: {
+ showLinksActive: true,
+ },
+ mountFn: mount,
+ });
+ });
+
+ it('is displayed', () => {
+ expect(findHoverTip().exists()).toBe(true);
+ expect(findHoverTip().text()).toBe(wrapper.vm.$options.i18n.hoverTipText);
+ });
+
+ it('emits dismissHoverTip event when the tip is dismissed', async () => {
+ expect(wrapper.emitted().dismissHoverTip).toBeUndefined();
+ await findHoverTip().find('button').trigger('click');
+ expect(wrapper.emitted().dismissHoverTip).toHaveLength(1);
+ });
+ });
+
+ describe('when links are live and it has been previously dismissed', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ showLinks: true,
+ tipPreviouslyDismissed: true,
+ },
+ data: {
+ showLinksActive: true,
+ },
+ });
+ });
+
+ it('is not displayed', () => {
+ expect(findHoverTip().exists()).toBe(false);
+ });
+ });
+
+ describe('when links are not live', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ showLinks: true,
+ },
+ data: {
+ showLinksActive: false,
+ },
+ });
+ });
+
+ it('is not displayed', () => {
+ expect(findHoverTip().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
index 8aecfc1b649..24cc6e76098 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
@@ -26,6 +26,7 @@ describe('Linked Pipelines Column', () => {
const defaultProps = {
columnTitle: 'Downstream',
linkedPipelines: processedPipeline.downstream,
+ showLinks: false,
type: DOWNSTREAM,
viewType: STAGE_VIEW,
configPaths: {
@@ -120,6 +121,26 @@ describe('Linked Pipelines Column', () => {
});
});
+ describe('when graph does not use needs', () => {
+ beforeEach(() => {
+ const nonNeedsResponse = { ...wrappedPipelineReturn };
+ nonNeedsResponse.data.project.pipeline.usesNeeds = false;
+
+ createComponentWithApollo({
+ props: {
+ viewType: LAYER_VIEW,
+ },
+ getPipelineDetailsHandler: jest.fn().mockResolvedValue(nonNeedsResponse),
+ mountFn: mount,
+ });
+ });
+
+ it('shows the stage view, even when the main graph view type is layers', async () => {
+ await clickExpandButtonAndAwaitTimers();
+ expect(findPipelineGraph().props('viewType')).toBe(STAGE_VIEW);
+ });
+ });
+
describe('downstream', () => {
describe('when successful', () => {
beforeEach(() => {
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
index 5756a666ff3..eb05669463b 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_mock_data.js
@@ -3727,8 +3727,8 @@ export default {
scheduled_actions: [],
},
ref: {
- name: 'master',
- path: '/h5bp/html5-boilerplate/commits/master',
+ name: 'main',
+ path: '/h5bp/html5-boilerplate/commits/main',
tag: false,
branch: true,
merge_request: false,
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index cf420f68f37..28fe3b67e7b 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
@@ -8,6 +8,7 @@ export const mockPipelineResponse = {
__typename: 'Pipeline',
id: 163,
iid: '22',
+ complete: true,
usesNeeds: true,
downstream: null,
upstream: null,
@@ -570,6 +571,7 @@ export const wrappedPipelineReturn = {
__typename: 'Pipeline',
id: 'gid://gitlab/Ci::Pipeline/175',
iid: '38',
+ complete: true,
usesNeeds: true,
downstream: {
__typename: 'PipelineConnection',
@@ -669,3 +671,22 @@ export const pipelineWithUpstreamDownstream = (base) => {
return generateResponse(pip, 'root/abcd-dag');
};
+
+export const mapCallouts = (callouts) =>
+ callouts.map((callout) => {
+ return { featureName: callout, __typename: 'UserCallout' };
+ });
+
+export const mockCalloutsResponse = (mappedCallouts) => ({
+ data: {
+ currentUser: {
+ id: 45,
+ __typename: 'User',
+ callouts: {
+ id: 5,
+ __typename: 'UserCalloutConnection',
+ nodes: mappedCallouts,
+ },
+ },
+ },
+});
diff --git a/spec/frontend/pipelines/graph/mock_data_legacy.js b/spec/frontend/pipelines/graph/mock_data_legacy.js
index a4a5d78f906..e1c8b027121 100644
--- a/spec/frontend/pipelines/graph/mock_data_legacy.js
+++ b/spec/frontend/pipelines/graph/mock_data_legacy.js
@@ -221,22 +221,22 @@ export default {
cancelable: false,
},
ref: {
- name: 'master',
- path: '/root/ci-mock/tree/master',
+ name: 'main',
+ path: '/root/ci-mock/tree/main',
tag: false,
branch: true,
},
commit: {
id: '798e5f902592192afaba73f4668ae30e56eae492',
short_id: '798e5f90',
- title: "Merge branch 'new-branch' into 'master'\r",
+ title: "Merge branch 'new-branch' into 'main'\r",
created_at: '2017-04-13T10:25:17.000+01:00',
parent_ids: [
'54d483b1ed156fbbf618886ddf7ab023e24f8738',
'c8e2d38a6c538822e81c57022a6e3a0cfedebbcc',
],
message:
- "Merge branch 'new-branch' into 'master'\r\n\r\nAdd new file\r\n\r\nSee merge request !1",
+ "Merge branch 'new-branch' into 'main'\r\n\r\nAdd new file\r\n\r\nSee merge request !1",
author_name: 'Root',
author_email: 'admin@example.com',
authored_date: '2017-04-13T10:25:17.000+01:00',
diff --git a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
index cf2b66dea5f..c67b91ae190 100644
--- a/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
+++ b/spec/frontend/pipelines/graph_shared/__snapshots__/links_inner_spec.js.snap
@@ -1,7 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Links Inner component with a large number of needs matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
+"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
<path d=\\"M202,118L42,118C72,118,72,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
<path d=\\"M202,118L52,118C82,118,82,148,112,148\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
<path d=\\"M222,138L62,138C92,138,92,158,122,158\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
@@ -11,13 +11,13 @@ exports[`Links Inner component with a large number of needs matches snapshot and
`;
exports[`Links Inner component with a parallel need matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
+"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
<path d=\\"M192,108L22,108C52,108,52,118,82,118\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
</svg> </div>"
`;
exports[`Links Inner component with one need matches snapshot and has expected path 1`] = `
-"<div class=\\"gl-display-flex gl-relative\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
+"<div class=\\"gl-display-flex gl-relative\\" totalgroups=\\"10\\"><svg id=\\"link-svg\\" viewBox=\\"0,0,1019,445\\" width=\\"1019px\\" height=\\"445px\\" class=\\"gl-absolute gl-pointer-events-none\\">
<path d=\\"M202,118L42,118C72,118,72,138,102,138\\" stroke-width=\\"2\\" class=\\"gl-fill-transparent gl-transition-duration-slow gl-transition-timing-function-ease gl-stroke-gray-200\\"></path>
</svg> </div>"
`;
diff --git a/spec/frontend/pipelines/graph_shared/links_inner_spec.js b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
index e81f046c1eb..bb1f0965469 100644
--- a/spec/frontend/pipelines/graph_shared/links_inner_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
@@ -1,16 +1,7 @@
import { shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
import { setHTMLFixture } from 'helpers/fixtures';
-import axios from '~/lib/utils/axios_utils';
-import {
- PIPELINES_DETAIL_LINK_DURATION,
- PIPELINES_DETAIL_LINKS_TOTAL,
- PIPELINES_DETAIL_LINKS_JOB_RATIO,
-} from '~/performance/constants';
-import * as perfUtils from '~/performance/utils';
-import * as Api from '~/pipelines/components/graph_shared/api';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
-import * as sentryUtils from '~/pipelines/utils';
+import { parseData } from '~/pipelines/components/parsing_utils';
import { createJobsHash } from '~/pipelines/utils';
import {
jobRect,
@@ -34,8 +25,13 @@ describe('Links Inner component', () => {
let wrapper;
const createComponent = (props) => {
+ const currentPipelineData = props?.pipelineData || defaultProps.pipelineData;
wrapper = shallowMount(LinksInner, {
- propsData: { ...defaultProps, ...props },
+ propsData: {
+ ...defaultProps,
+ ...props,
+ parsedData: parseData(currentPipelineData.flatMap(({ groups }) => groups)),
+ },
});
};
@@ -206,141 +202,4 @@ describe('Links Inner component', () => {
expect(firstLink.classes(hoverColorClass)).toBe(true);
});
});
-
- describe('performance metrics', () => {
- let markAndMeasure;
- let reportToSentry;
- let reportPerformance;
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb());
- markAndMeasure = jest.spyOn(perfUtils, 'performanceMarkAndMeasure');
- reportToSentry = jest.spyOn(sentryUtils, 'reportToSentry');
- reportPerformance = jest.spyOn(Api, 'reportPerformance');
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('with no metrics config object', () => {
- beforeEach(() => {
- setFixtures(pipelineData);
- createComponent({
- pipelineData: pipelineData.stages,
- });
- });
-
- it('is not called', () => {
- expect(markAndMeasure).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- });
- });
-
- describe('with metrics config set to false', () => {
- beforeEach(() => {
- setFixtures(pipelineData);
- createComponent({
- pipelineData: pipelineData.stages,
- metricsConfig: {
- collectMetrics: false,
- metricsPath: '/path/to/metrics',
- },
- });
- });
-
- it('is not called', () => {
- expect(markAndMeasure).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- });
- });
-
- describe('with no metrics path', () => {
- beforeEach(() => {
- setFixtures(pipelineData);
- createComponent({
- pipelineData: pipelineData.stages,
- metricsConfig: {
- collectMetrics: true,
- metricsPath: '',
- },
- });
- });
-
- it('is not called', () => {
- expect(markAndMeasure).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- });
- });
-
- describe('with metrics path and collect set to true', () => {
- const metricsPath = '/root/project/-/ci/prometheus_metrics/histograms.json';
- const duration = 0.0478;
- const numLinks = 1;
- const metricsData = {
- histograms: [
- { name: PIPELINES_DETAIL_LINK_DURATION, value: duration / 1000 },
- { name: PIPELINES_DETAIL_LINKS_TOTAL, value: numLinks },
- {
- name: PIPELINES_DETAIL_LINKS_JOB_RATIO,
- value: numLinks / defaultProps.totalGroups,
- },
- ],
- };
-
- describe('when no duration is obtained', () => {
- beforeEach(() => {
- jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
- return [];
- });
-
- setFixtures(pipelineData);
-
- createComponent({
- pipelineData: pipelineData.stages,
- metricsConfig: {
- collectMetrics: true,
- path: metricsPath,
- },
- });
- });
-
- it('attempts to collect metrics', () => {
- expect(markAndMeasure).toHaveBeenCalled();
- expect(reportPerformance).not.toHaveBeenCalled();
- expect(reportToSentry).not.toHaveBeenCalled();
- });
- });
-
- describe('with duration and no error', () => {
- beforeEach(() => {
- jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
- return [{ duration }];
- });
-
- setFixtures(pipelineData);
-
- createComponent({
- pipelineData: pipelineData.stages,
- metricsConfig: {
- collectMetrics: true,
- path: metricsPath,
- },
- });
- });
-
- it('it calls reportPerformance with expected arguments', () => {
- expect(markAndMeasure).toHaveBeenCalled();
- expect(reportPerformance).toHaveBeenCalled();
- expect(reportPerformance).toHaveBeenCalledWith(metricsPath, metricsData);
- expect(reportToSentry).not.toHaveBeenCalled();
- });
- });
- });
- });
});
diff --git a/spec/frontend/pipelines/graph_shared/links_layer_spec.js b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
index 5e5365eef30..932a19f2f00 100644
--- a/spec/frontend/pipelines/graph_shared/links_layer_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
@@ -1,32 +1,33 @@
-import { GlAlert } from '@gitlab/ui';
-import { fireEvent, within } from '@testing-library/dom';
-import { mount, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
+import {
+ PIPELINES_DETAIL_LINK_DURATION,
+ PIPELINES_DETAIL_LINKS_TOTAL,
+ PIPELINES_DETAIL_LINKS_JOB_RATIO,
+} from '~/performance/constants';
+import * as perfUtils from '~/performance/utils';
+import * as Api from '~/pipelines/components/graph_shared/api';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
+import * as sentryUtils from '~/pipelines/utils';
import { generateResponse, mockPipelineResponse } from '../graph/mock_data';
describe('links layer component', () => {
let wrapper;
- const withinComponent = () => within(wrapper.element);
- const findAlert = () => wrapper.find(GlAlert);
- const findShowAnyways = () =>
- withinComponent().getByText(wrapper.vm.$options.i18n.showLinksAnyways);
const findLinksInner = () => wrapper.find(LinksInner);
const pipeline = generateResponse(mockPipelineResponse, 'root/fungi-xoxo');
const containerId = `pipeline-links-container-${pipeline.id}`;
const slotContent = "<div>Ceci n'est pas un graphique</div>";
- const tooManyStages = Array(101)
- .fill(0)
- .flatMap(() => pipeline.stages);
-
const defaultProps = {
containerId,
containerMeasurements: { width: 400, height: 400 },
pipelineId: pipeline.id,
pipelineData: pipeline.stages,
+ showLinks: false,
};
const createComponent = ({ mountFn = shallowMount, props = {} } = {}) => {
@@ -46,10 +47,9 @@ describe('links layer component', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- describe('with data under max stages', () => {
+ describe('with show links off', () => {
beforeEach(() => {
createComponent();
});
@@ -58,62 +58,174 @@ describe('links layer component', () => {
expect(wrapper.html()).toContain(slotContent);
});
+ it('does not render inner links component', () => {
+ expect(findLinksInner().exists()).toBe(false);
+ });
+ });
+
+ describe('with show links on', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ showLinks: true,
+ },
+ });
+ });
+
+ it('renders the default slot', () => {
+ expect(wrapper.html()).toContain(slotContent);
+ });
+
it('renders the inner links component', () => {
expect(findLinksInner().exists()).toBe(true);
});
});
- describe('with more than the max number of stages', () => {
- describe('rendering', () => {
- beforeEach(() => {
- createComponent({ props: { pipelineData: tooManyStages } });
- });
+ describe('with width or height measurement at 0', () => {
+ beforeEach(() => {
+ createComponent({ props: { containerMeasurements: { width: 0, height: 100 } } });
+ });
- it('renders the default slot', () => {
- expect(wrapper.html()).toContain(slotContent);
- });
+ it('renders the default slot', () => {
+ expect(wrapper.html()).toContain(slotContent);
+ });
- it('renders the alert component', () => {
- expect(findAlert().exists()).toBe(true);
- });
+ it('does not render the inner links component', () => {
+ expect(findLinksInner().exists()).toBe(false);
+ });
+ });
- it('does not render the inner links component', () => {
- expect(findLinksInner().exists()).toBe(false);
- });
+ describe('performance metrics', () => {
+ const metricsPath = '/root/project/-/ci/prometheus_metrics/histograms.json';
+ let markAndMeasure;
+ let reportToSentry;
+ let reportPerformance;
+ let mock;
+
+ beforeEach(() => {
+ jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => cb());
+ markAndMeasure = jest.spyOn(perfUtils, 'performanceMarkAndMeasure');
+ reportToSentry = jest.spyOn(sentryUtils, 'reportToSentry');
+ reportPerformance = jest.spyOn(Api, 'reportPerformance');
});
- describe('with width or height measurement at 0', () => {
+ describe('with no metrics config object', () => {
beforeEach(() => {
- createComponent({ props: { containerMeasurements: { width: 0, height: 100 } } });
+ createComponent();
});
- it('renders the default slot', () => {
- expect(wrapper.html()).toContain(slotContent);
+ it('is not called', () => {
+ expect(markAndMeasure).not.toHaveBeenCalled();
+ expect(reportToSentry).not.toHaveBeenCalled();
+ expect(reportPerformance).not.toHaveBeenCalled();
});
+ });
- it('does not render the alert component', () => {
- expect(findAlert().exists()).toBe(false);
+ describe('with metrics config set to false', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ metricsConfig: {
+ collectMetrics: false,
+ metricsPath: '/path/to/metrics',
+ },
+ },
+ });
});
- it('does not render the inner links component', () => {
- expect(findLinksInner().exists()).toBe(false);
+ it('is not called', () => {
+ expect(markAndMeasure).not.toHaveBeenCalled();
+ expect(reportToSentry).not.toHaveBeenCalled();
+ expect(reportPerformance).not.toHaveBeenCalled();
});
});
- describe('interactions', () => {
+ describe('with no metrics path', () => {
beforeEach(() => {
- createComponent({ mountFn: mount, props: { pipelineData: tooManyStages } });
+ createComponent({
+ props: {
+ metricsConfig: {
+ collectMetrics: true,
+ metricsPath: '',
+ },
+ },
+ });
});
- it('renders the disable button', () => {
- expect(findShowAnyways()).not.toBe(null);
+ it('is not called', () => {
+ expect(markAndMeasure).not.toHaveBeenCalled();
+ expect(reportToSentry).not.toHaveBeenCalled();
+ expect(reportPerformance).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('with metrics path and collect set to true', () => {
+ const duration = 875;
+ const numLinks = 7;
+ const totalGroups = 8;
+ const metricsData = {
+ histograms: [
+ { name: PIPELINES_DETAIL_LINK_DURATION, value: duration / 1000 },
+ { name: PIPELINES_DETAIL_LINKS_TOTAL, value: numLinks },
+ {
+ name: PIPELINES_DETAIL_LINKS_JOB_RATIO,
+ value: numLinks / totalGroups,
+ },
+ ],
+ };
+
+ describe('when no duration is obtained', () => {
+ beforeEach(() => {
+ jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
+ return [];
+ });
+
+ createComponent({
+ props: {
+ metricsConfig: {
+ collectMetrics: true,
+ path: metricsPath,
+ },
+ },
+ });
+ });
+
+ it('attempts to collect metrics', () => {
+ expect(markAndMeasure).toHaveBeenCalled();
+ expect(reportPerformance).not.toHaveBeenCalled();
+ expect(reportToSentry).not.toHaveBeenCalled();
+ });
});
- it('shows links when override is clicked', async () => {
- expect(findLinksInner().exists()).toBe(false);
- fireEvent(findShowAnyways(), new MouseEvent('click', { bubbles: true }));
- await wrapper.vm.$nextTick();
- expect(findLinksInner().exists()).toBe(true);
+ describe('with duration and no error', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onPost(metricsPath).reply(200, {});
+
+ jest.spyOn(window.performance, 'getEntriesByName').mockImplementation(() => {
+ return [{ duration }];
+ });
+
+ createComponent({
+ props: {
+ metricsConfig: {
+ collectMetrics: true,
+ path: metricsPath,
+ },
+ },
+ });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('it calls reportPerformance with expected arguments', () => {
+ expect(markAndMeasure).toHaveBeenCalled();
+ expect(reportPerformance).toHaveBeenCalled();
+ expect(reportPerformance).toHaveBeenCalledWith(metricsPath, metricsData);
+ expect(reportToSentry).not.toHaveBeenCalled();
+ });
});
});
});
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js
index 337838c41b3..16f15b20824 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/pipelines/mock_data.js
@@ -387,7 +387,7 @@ export const tags = [
protected: false,
},
{
- name: 'master-tag',
+ name: 'main-tag',
message: '',
target: '66673b07efef254dab7d537f0433a40e61cf84fe',
commit: {
@@ -413,10 +413,10 @@ export const tags = [
export const mockSearch = [
{ type: 'username', value: { data: 'root', operator: '=' } },
- { type: 'ref', value: { data: 'master', operator: '=' } },
+ { type: 'ref', value: { data: 'main', operator: '=' } },
{ type: 'status', value: { data: 'pending', operator: '=' } },
];
export const mockBranchesAfterMap = ['branch-1', 'branch-10', 'branch-11'];
-export const mockTagsAfterMap = ['tag-3', 'tag-2', 'tag-1', 'master-tag'];
+export const mockTagsAfterMap = ['tag-3', 'tag-2', 'tag-1', 'main-tag'];
diff --git a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js b/spec/frontend/pipelines/parsing_utils_spec.js
index 84ff83883b7..96748ae9e5c 100644
--- a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
+++ b/spec/frontend/pipelines/parsing_utils_spec.js
@@ -3,12 +3,15 @@ import {
createNodeDict,
makeLinksFromNodes,
filterByAncestors,
+ generateColumnsFromLayersListBare,
+ listByLayers,
parseData,
removeOrphanNodes,
getMaxNodes,
} from '~/pipelines/components/parsing_utils';
-import { mockParsedGraphQLNodes } from './mock_data';
+import { mockParsedGraphQLNodes } from './components/dag/mock_data';
+import { generateResponse, mockPipelineResponse } from './graph/mock_data';
describe('DAG visualization parsing utilities', () => {
const nodeDict = createNodeDict(mockParsedGraphQLNodes);
@@ -108,4 +111,45 @@ describe('DAG visualization parsing utilities', () => {
expect(getMaxNodes(layerNodes)).toBe(3);
});
});
+
+ describe('generateColumnsFromLayersList', () => {
+ const pipeline = generateResponse(mockPipelineResponse, 'root/fungi-xoxo');
+ const layers = listByLayers(pipeline);
+ const columns = generateColumnsFromLayersListBare(pipeline, layers);
+
+ it('returns stage-like objects with default name, id, and status', () => {
+ columns.forEach((col, idx) => {
+ expect(col).toMatchObject({
+ name: '',
+ status: { action: null },
+ id: `layer-${idx}`,
+ });
+ });
+ });
+
+ it('creates groups that match the list created in listByLayers', () => {
+ columns.forEach((col, idx) => {
+ const groupNames = col.groups.map(({ name }) => name);
+ expect(groupNames).toEqual(layers[idx]);
+ });
+ });
+
+ it('looks up the correct group object', () => {
+ columns.forEach((col) => {
+ col.groups.forEach((group) => {
+ const groupStage = pipeline.stages.find((el) => el.name === group.stageName);
+ const groupObject = groupStage.groups.find((el) => el.name === group.name);
+ expect(group).toBe(groupObject);
+ });
+ });
+ });
+
+ /*
+ Just as a fallback in case multiple functions change, so tests pass
+ but the implementation moves away from case.
+ */
+ it('matches the snapshot', () => {
+ expect(columns).toMatchSnapshot();
+ });
+ });
});
diff --git a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
index 258f2bda829..7bac7036f46 100644
--- a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
@@ -1,18 +1,21 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { setHTMLFixture } from 'helpers/fixtures';
import { CI_CONFIG_STATUS_VALID } from '~/pipeline_editor/constants';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import JobPill from '~/pipelines/components/pipeline_graph/job_pill.vue';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import StagePill from '~/pipelines/components/pipeline_graph/stage_pill.vue';
-import { DRAW_FAILURE } from '~/pipelines/constants';
-import { invalidNeedsData, pipelineData, singleStageData } from './mock_data';
+import { pipelineData, singleStageData } from './mock_data';
describe('pipeline graph component', () => {
const defaultProps = { pipelineData };
let wrapper;
+ const containerId = 'pipeline-graph-container-0';
+ setHTMLFixture(`<div id="${containerId}"></div>`);
+
const createComponent = (props = defaultProps) => {
return shallowMount(PipelineGraph, {
propsData: {
@@ -55,18 +58,7 @@ describe('pipeline graph component', () => {
it('renders the graph with no status error', () => {
expect(findAlert().exists()).toBe(false);
expect(findPipelineGraph().exists()).toBe(true);
- });
- });
-
- describe('with error while rendering the links with needs', () => {
- beforeEach(() => {
- wrapper = createComponent({ pipelineData: invalidNeedsData });
- });
-
- it('renders the error that link could not be drawn', () => {
expect(findLinksLayer().exists()).toBe(true);
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[DRAW_FAILURE]);
});
});
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
new file mode 100644
index 00000000000..88b3ef2032a
--- /dev/null
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -0,0 +1,112 @@
+import { GlAlert, GlDropdown, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import PipelineMultiActions, {
+ i18n,
+} from '~/pipelines/components/pipelines_list/pipeline_multi_actions.vue';
+
+describe('Pipeline Multi Actions Dropdown', () => {
+ let wrapper;
+ let mockAxios;
+
+ const artifacts = [
+ {
+ name: 'job my-artifact',
+ path: '/download/path',
+ },
+ {
+ name: 'job-2 my-artifact-2',
+ path: '/download/path-two',
+ },
+ ];
+ const artifactItemTestId = 'artifact-item';
+ const artifactsEndpointPlaceholder = ':pipeline_artifacts_id';
+ const artifactsEndpoint = `endpoint/${artifactsEndpointPlaceholder}/artifacts.json`;
+ const pipelineId = 108;
+
+ const createComponent = ({ mockData = {} } = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(PipelineMultiActions, {
+ provide: {
+ artifactsEndpoint,
+ artifactsEndpointPlaceholder,
+ },
+ propsData: {
+ pipelineId,
+ },
+ data() {
+ return {
+ ...mockData,
+ };
+ },
+ stubs: {
+ GlSprintf,
+ },
+ }),
+ );
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllArtifactItems = () => wrapper.findAllByTestId(artifactItemTestId);
+ const findFirstArtifactItem = () => wrapper.findByTestId(artifactItemTestId);
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+
+ wrapper.destroy();
+ });
+
+ it('should render the dropdown', () => {
+ createComponent();
+
+ expect(findDropdown().exists()).toBe(true);
+ });
+
+ describe('Artifacts', () => {
+ it('should fetch artifacts on dropdown click', async () => {
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ mockAxios.onGet(endpoint).replyOnce(200, { artifacts });
+ createComponent();
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(wrapper.vm.artifacts).toEqual(artifacts);
+ });
+
+ it('should render all the provided artifacts', () => {
+ createComponent({ mockData: { artifacts } });
+
+ expect(findAllArtifactItems()).toHaveLength(artifacts.length);
+ });
+
+ it('should render the correct artifact name and path', () => {
+ createComponent({ mockData: { artifacts } });
+
+ expect(findFirstArtifactItem().attributes('href')).toBe(artifacts[0].path);
+ expect(findFirstArtifactItem().text()).toBe(`Download ${artifacts[0].name} artifact`);
+ });
+
+ describe('with a failing request', () => {
+ it('should render an error message', async () => {
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ mockAxios.onGet(endpoint).replyOnce(500);
+ createComponent();
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ const error = findAlert();
+ expect(error.exists()).toBe(true);
+ expect(error.text()).toBe(i18n.artifactsFetchErrorMessage);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/pipelines_artifacts_spec.js b/spec/frontend/pipelines/pipelines_artifacts_spec.js
index d4a2db08d97..336255768d7 100644
--- a/spec/frontend/pipelines/pipelines_artifacts_spec.js
+++ b/spec/frontend/pipelines/pipelines_artifacts_spec.js
@@ -1,23 +1,43 @@
-import { GlDropdown, GlDropdownItem, GlSprintf } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlDropdownItem, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import PipelineArtifacts from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import PipelineArtifacts, {
+ i18n,
+} from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
describe('Pipelines Artifacts dropdown', () => {
let wrapper;
+ let mockAxios;
- const createComponent = () => {
+ const artifacts = [
+ {
+ name: 'job my-artifact',
+ path: '/download/path',
+ },
+ {
+ name: 'job-2 my-artifact-2',
+ path: '/download/path-two',
+ },
+ ];
+ const artifactsEndpointPlaceholder = ':pipeline_artifacts_id';
+ const artifactsEndpoint = `endpoint/${artifactsEndpointPlaceholder}/artifacts.json`;
+ const pipelineId = 108;
+
+ const createComponent = ({ mockData = {} } = {}) => {
wrapper = shallowMount(PipelineArtifacts, {
+ provide: {
+ artifactsEndpoint,
+ artifactsEndpointPlaceholder,
+ },
propsData: {
- artifacts: [
- {
- name: 'job my-artifact',
- path: '/download/path',
- },
- {
- name: 'job-2 my-artifact-2',
- path: '/download/path-two',
- },
- ],
+ pipelineId,
+ },
+ data() {
+ return {
+ ...mockData,
+ };
},
stubs: {
GlSprintf,
@@ -25,11 +45,14 @@ describe('Pipelines Artifacts dropdown', () => {
});
};
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findFirstGlDropdownItem = () => wrapper.find(GlDropdownItem);
const findAllGlDropdownItems = () => wrapper.find(GlDropdown).findAll(GlDropdownItem);
beforeEach(() => {
- createComponent();
+ mockAxios = new MockAdapter(axios);
});
afterEach(() => {
@@ -37,13 +60,66 @@ describe('Pipelines Artifacts dropdown', () => {
wrapper = null;
});
+ it('should render the dropdown', () => {
+ createComponent();
+
+ expect(findDropdown().exists()).toBe(true);
+ });
+
+ it('should fetch artifacts on dropdown click', async () => {
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ mockAxios.onGet(endpoint).replyOnce(200, { artifacts });
+ createComponent();
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ expect(mockAxios.history.get).toHaveLength(1);
+ expect(wrapper.vm.artifacts).toEqual(artifacts);
+ });
+
it('should render a dropdown with all the provided artifacts', () => {
- expect(findAllGlDropdownItems()).toHaveLength(2);
+ createComponent({ mockData: { artifacts } });
+
+ expect(findAllGlDropdownItems()).toHaveLength(artifacts.length);
});
it('should render a link with the provided path', () => {
- expect(findFirstGlDropdownItem().attributes('href')).toBe('/download/path');
+ createComponent({ mockData: { artifacts } });
- expect(findFirstGlDropdownItem().text()).toBe('Download job my-artifact artifact');
+ expect(findFirstGlDropdownItem().attributes('href')).toBe(artifacts[0].path);
+
+ expect(findFirstGlDropdownItem().text()).toBe(`Download ${artifacts[0].name} artifact`);
+ });
+
+ describe('with a failing request', () => {
+ it('should render an error message', async () => {
+ const endpoint = artifactsEndpoint.replace(artifactsEndpointPlaceholder, pipelineId);
+ mockAxios.onGet(endpoint).replyOnce(500);
+ createComponent();
+ findDropdown().vm.$emit('show');
+ await waitForPromises();
+
+ const error = findAlert();
+ expect(error.exists()).toBe(true);
+ expect(error.text()).toBe(i18n.artifactsFetchErrorMessage);
+ });
+ });
+
+ describe('with no artifacts received', () => {
+ it('should render empty alert message', () => {
+ createComponent({ mockData: { artifacts: [] } });
+
+ const emptyAlert = findAlert();
+ expect(emptyAlert.exists()).toBe(true);
+ expect(emptyAlert.text()).toBe(i18n.noArtifacts);
+ });
+ });
+
+ describe('when artifacts are loading', () => {
+ it('should show loading icon', () => {
+ createComponent({ mockData: { isLoading: true } });
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/pipelines/pipelines_ci_templates_spec.js b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
index d4cf6027ff7..0c37bf2d84a 100644
--- a/spec/frontend/pipelines/pipelines_ci_templates_spec.js
+++ b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
@@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import ExperimentTracking from '~/experimentation/experiment_tracking';
import PipelinesCiTemplate from '~/pipelines/components/pipelines_list/pipelines_ci_templates.vue';
-const addCiYmlPath = "/-/new/master?commit_message='Add%20.gitlab-ci.yml'";
+const addCiYmlPath = "/-/new/main?commit_message='Add%20.gitlab-ci.yml'";
const suggestedCiTemplates = [
{ name: 'Android', logo: '/assets/illustrations/logos/android.svg' },
{ name: 'Bash', logo: '/assets/illustrations/logos/bash.svg' },
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index 84a25f42201..f9b59c5dc48 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,3 +1,4 @@
+import '~/commons';
import { GlButton, GlEmptyState, GlFilteredSearch, GlLoadingIcon, GlPagination } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
@@ -6,6 +7,7 @@ import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
+import { getExperimentVariant } from '~/experimentation/utils';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
@@ -19,6 +21,10 @@ import TablePagination from '~/vue_shared/components/pagination/table_pagination
import { stageReply, users, mockSearch, branches } from './mock_data';
jest.mock('~/flash');
+jest.mock('~/experimentation/utils', () => ({
+ ...jest.requireActual('~/experimentation/utils'),
+ getExperimentVariant: jest.fn().mockReturnValue('control'),
+}));
const mockProjectPath = 'twitter/flight';
const mockProjectId = '21';
@@ -41,6 +47,7 @@ describe('Pipelines', () => {
ciLintPath: '/ci/lint',
resetCachePath: `${mockProjectPath}/settings/ci_cd/reset_cache`,
newPipelinePath: `${mockProjectPath}/pipelines/new`,
+ codeQualityPagePath: `${mockProjectPath}/-/new/master?commit_message=Add+.gitlab-ci.yml+and+create+a+code+quality+job&file_name=.gitlab-ci.yml&template=Code-Quality`,
};
const noPermissions = {
@@ -87,7 +94,10 @@ describe('Pipelines', () => {
beforeAll(() => {
origWindowLocation = window.location;
delete window.location;
- window.location = { search: '' };
+ window.location = {
+ search: '',
+ protocol: 'https:',
+ };
});
afterAll(() => {
@@ -289,7 +299,7 @@ describe('Pipelines', () => {
page: '1',
scope: 'all',
username: 'root',
- ref: 'master',
+ ref: 'main',
status: 'pending',
};
@@ -321,7 +331,7 @@ describe('Pipelines', () => {
expect(window.history.pushState).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
- `${window.location.pathname}?page=1&scope=all&username=root&ref=master&status=pending`,
+ `${window.location.pathname}?page=1&scope=all&username=root&ref=main&status=pending`,
);
});
});
@@ -551,6 +561,19 @@ describe('Pipelines', () => {
);
});
+ describe('when the code_quality_walkthrough experiment is active', () => {
+ beforeAll(() => {
+ getExperimentVariant.mockReturnValue('candidate');
+ });
+
+ it('renders another CTA button', () => {
+ expect(findEmptyState().findComponent(GlButton).text()).toBe('Add a code quality job');
+ expect(findEmptyState().findComponent(GlButton).attributes('href')).toBe(
+ paths.codeQualityPagePath,
+ );
+ });
+ });
+
it('does not render filtered search', () => {
expect(findFilteredSearch().exists()).toBe(false);
});
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 70e47b98575..68b0dfc018e 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -1,3 +1,4 @@
+import '~/commons';
import { GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -5,11 +6,11 @@ import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mi
import PipelineOperations from '~/pipelines/components/pipelines_list/pipeline_operations.vue';
import PipelineTriggerer from '~/pipelines/components/pipelines_list/pipeline_triggerer.vue';
import PipelineUrl from '~/pipelines/components/pipelines_list/pipeline_url.vue';
-import PipelinesStatusBadge from '~/pipelines/components/pipelines_list/pipelines_status_badge.vue';
import PipelinesTable from '~/pipelines/components/pipelines_list/pipelines_table.vue';
import PipelinesTimeago from '~/pipelines/components/pipelines_list/time_ago.vue';
import eventHub from '~/pipelines/event_hub';
+import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
import CommitComponent from '~/vue_shared/components/commit.vue';
jest.mock('~/pipelines/event_hub');
@@ -42,7 +43,7 @@ describe('Pipelines Table', () => {
};
const findGlTable = () => wrapper.findComponent(GlTable);
- const findStatusBadge = () => wrapper.findComponent(PipelinesStatusBadge);
+ const findStatusBadge = () => wrapper.findComponent(CiBadge);
const findPipelineInfo = () => wrapper.findComponent(PipelineUrl);
const findTriggerer = () => wrapper.findComponent(PipelineTriggerer);
const findCommit = () => wrapper.findComponent(CommitComponent);
diff --git a/spec/frontend/pipelines/test_reports/empty_state_spec.js b/spec/frontend/pipelines/test_reports/empty_state_spec.js
new file mode 100644
index 00000000000..ee0f8a90a11
--- /dev/null
+++ b/spec/frontend/pipelines/test_reports/empty_state_spec.js
@@ -0,0 +1,45 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import EmptyState, { i18n } from '~/pipelines/components/test_reports/empty_state.vue';
+
+describe('Test report empty state', () => {
+ let wrapper;
+
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+
+ const createComponent = ({ hasTestReport = true } = {}) => {
+ wrapper = shallowMount(EmptyState, {
+ provide: {
+ emptyStateImagePath: '/image/path',
+ hasTestReport,
+ },
+ stubs: {
+ GlEmptyState,
+ },
+ });
+ };
+
+ describe('when pipeline has a test report', () => {
+ it('should render empty test report message', () => {
+ createComponent();
+
+ expect(findEmptyState().props()).toMatchObject({
+ primaryButtonText: i18n.noTestsButton,
+ description: i18n.noTestsDescription,
+ title: i18n.noTestsTitle,
+ });
+ });
+ });
+
+ describe('when pipeline does not have a test report', () => {
+ it('should render no test report message', () => {
+ createComponent({ hasTestReport: false });
+
+ expect(findEmptyState().props()).toMatchObject({
+ primaryButtonText: i18n.noReportsButton,
+ description: i18n.noReportsDescription,
+ title: i18n.noReportsTitle,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/test_reports/test_case_details_spec.js b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
index e866586a2c3..c995eb864d1 100644
--- a/spec/frontend/pipelines/test_reports/test_case_details_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_case_details_spec.js
@@ -1,5 +1,6 @@
import { GlModal } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
import CodeBlock from '~/vue_shared/components/code_block.vue';
@@ -13,29 +14,32 @@ describe('Test case details', () => {
formattedTime: '10.04ms',
recent_failures: {
count: 2,
- base_branch: 'master',
+ base_branch: 'main',
},
system_output: 'Line 42 is broken',
};
- const findModal = () => wrapper.find(GlModal);
- const findName = () => wrapper.find('[data-testid="test-case-name"]');
- const findDuration = () => wrapper.find('[data-testid="test-case-duration"]');
- const findRecentFailures = () => wrapper.find('[data-testid="test-case-recent-failures"]');
- const findSystemOutput = () => wrapper.find('[data-testid="test-case-trace"]');
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findName = () => wrapper.findByTestId('test-case-name');
+ const findDuration = () => wrapper.findByTestId('test-case-duration');
+ const findRecentFailures = () => wrapper.findByTestId('test-case-recent-failures');
+ const findAttachmentUrl = () => wrapper.findByTestId('test-case-attachment-url');
+ const findSystemOutput = () => wrapper.findByTestId('test-case-trace');
const createComponent = (testCase = {}) => {
- wrapper = shallowMount(TestCaseDetails, {
- localVue,
- propsData: {
- modalId: 'my-modal',
- testCase: {
- ...defaultTestCase,
- ...testCase,
+ wrapper = extendedWrapper(
+ shallowMount(TestCaseDetails, {
+ localVue,
+ propsData: {
+ modalId: 'my-modal',
+ testCase: {
+ ...defaultTestCase,
+ ...testCase,
+ },
},
- },
- stubs: { CodeBlock, GlModal },
- });
+ stubs: { CodeBlock, GlModal },
+ }),
+ );
};
afterEach(() => {
@@ -91,6 +95,25 @@ describe('Test case details', () => {
});
});
+ describe('when test case has attachment URL', () => {
+ it('renders the attachment URL as a link', () => {
+ const expectedUrl = '/my/path.jpg';
+ createComponent({ attachment_url: expectedUrl });
+ const attachmentUrl = findAttachmentUrl();
+
+ expect(attachmentUrl.exists()).toBe(true);
+ expect(attachmentUrl.attributes('href')).toBe(expectedUrl);
+ });
+ });
+
+ describe('when test case does not have attachment URL', () => {
+ it('does not render the attachment URL', () => {
+ createComponent({ attachment_url: null });
+
+ expect(findAttachmentUrl().exists()).toBe(false);
+ });
+ });
+
describe('when test case has system output', () => {
it('renders the test case system output', () => {
createComponent();
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
index da5763ddf8e..e44d59ba888 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -2,6 +2,8 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { getJSONFixture } from 'helpers/fixtures';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import EmptyState from '~/pipelines/components/test_reports/empty_state.vue';
import TestReports from '~/pipelines/components/test_reports/test_reports.vue';
import TestSummary from '~/pipelines/components/test_reports/test_summary.vue';
import TestSummaryTable from '~/pipelines/components/test_reports/test_summary_table.vue';
@@ -16,11 +18,11 @@ describe('Test reports app', () => {
const testReports = getJSONFixture('pipelines/test_report.json');
- const loadingSpinner = () => wrapper.find(GlLoadingIcon);
- const testsDetail = () => wrapper.find('[data-testid="tests-detail"]');
- const noTestsToShow = () => wrapper.find('[data-testid="no-tests-to-show"]');
- const testSummary = () => wrapper.find(TestSummary);
- const testSummaryTable = () => wrapper.find(TestSummaryTable);
+ const loadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
+ const testsDetail = () => wrapper.findByTestId('tests-detail');
+ const emptyState = () => wrapper.findComponent(EmptyState);
+ const testSummary = () => wrapper.findComponent(TestSummary);
+ const testSummaryTable = () => wrapper.findComponent(TestSummaryTable);
const actionSpies = {
fetchTestSuite: jest.fn(),
@@ -29,7 +31,7 @@ describe('Test reports app', () => {
removeSelectedSuiteIndex: jest.fn(),
};
- const createComponent = (state = {}) => {
+ const createComponent = ({ state = {} } = {}) => {
store = new Vuex.Store({
state: {
isLoading: false,
@@ -41,10 +43,12 @@ describe('Test reports app', () => {
getters,
});
- wrapper = shallowMount(TestReports, {
- store,
- localVue,
- });
+ wrapper = extendedWrapper(
+ shallowMount(TestReports, {
+ store,
+ localVue,
+ }),
+ );
};
afterEach(() => {
@@ -52,33 +56,28 @@ describe('Test reports app', () => {
});
describe('when component is created', () => {
- beforeEach(() => {
+ it('should call fetchSummary when pipeline has test report', () => {
createComponent();
- });
- it('should call fetchSummary', () => {
expect(actionSpies.fetchSummary).toHaveBeenCalled();
});
});
describe('when loading', () => {
- beforeEach(() => createComponent({ isLoading: true }));
+ beforeEach(() => createComponent({ state: { isLoading: true } }));
it('shows the loading spinner', () => {
- expect(noTestsToShow().exists()).toBe(false);
+ expect(emptyState().exists()).toBe(false);
expect(testsDetail().exists()).toBe(false);
expect(loadingSpinner().exists()).toBe(true);
});
});
describe('when the api returns no data', () => {
- beforeEach(() => createComponent({ testReports: {} }));
-
- it('displays that there are no tests to show', () => {
- const noTests = noTestsToShow();
+ it('displays empty state component', () => {
+ createComponent({ state: { testReports: {} } });
- expect(noTests.exists()).toBe(true);
- expect(noTests.text()).toBe('There are no tests to show.');
+ expect(emptyState().exists()).toBe(true);
});
});
@@ -97,7 +96,7 @@ describe('Test reports app', () => {
describe('when a suite is clicked', () => {
beforeEach(() => {
- createComponent({ hasFullReport: true });
+ createComponent({ state: { hasFullReport: true } });
testSummaryTable().vm.$emit('row-click', 0);
});
@@ -109,7 +108,7 @@ describe('Test reports app', () => {
describe('when clicking back to summary', () => {
beforeEach(() => {
- createComponent({ selectedSuiteIndex: 0 });
+ createComponent({ state: { selectedSuiteIndex: 0 } });
testSummary().vm.$emit('on-back-click');
});
diff --git a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
index 2e32d62b4bd..2e44f40eda4 100644
--- a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
@@ -89,7 +89,7 @@ describe('Pipeline Branch Name Token', () => {
});
it('renders only the branch searched for', () => {
- const mockBranches = ['master'];
+ const mockBranches = ['main'];
createComponent({ stubs }, { branches: mockBranches, loading: false });
expect(findAllFilteredSearchSuggestions()).toHaveLength(mockBranches.length);
diff --git a/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
index 42c9dfc9ff0..b03dbb73b95 100644
--- a/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
@@ -89,7 +89,7 @@ describe('Pipeline Branch Name Token', () => {
});
it('renders only the tag searched for', () => {
- const mockTags = ['master-tag'];
+ const mockTags = ['main-tag'];
createComponent({ stubs }, { tags: mockTags, loading: false });
expect(findAllFilteredSearchSuggestions()).toHaveLength(mockTags.length);
diff --git a/spec/frontend/project_find_file_spec.js b/spec/frontend/project_find_file_spec.js
index 5919910d791..106b41bcc02 100644
--- a/spec/frontend/project_find_file_spec.js
+++ b/spec/frontend/project_find_file_spec.js
@@ -10,9 +10,9 @@ jest.mock('~/lib/dompurify', () => ({
sanitize: jest.fn((val) => val),
}));
-const BLOB_URL_TEMPLATE = `${TEST_HOST}/namespace/project/blob/master`;
-const FILE_FIND_URL = `${TEST_HOST}/namespace/project/files/master?format=json`;
-const FIND_TREE_URL = `${TEST_HOST}/namespace/project/tree/master`;
+const BLOB_URL_TEMPLATE = `${TEST_HOST}/namespace/project/blob/main`;
+const FILE_FIND_URL = `${TEST_HOST}/namespace/project/files/main?format=json`;
+const FIND_TREE_URL = `${TEST_HOST}/namespace/project/tree/main`;
const TEMPLATE = `<div class="file-finder-holder tree-holder js-file-finder" data-blob-url-template="${BLOB_URL_TEMPLATE}" data-file-find-url="${FILE_FIND_URL}" data-find-tree-url="${FIND_TREE_URL}">
<input class="file-finder-input" id="file_find" />
<div class="tree-content-holder">
diff --git a/spec/frontend/projects/compare/components/app_legacy_spec.js b/spec/frontend/projects/compare/components/app_legacy_spec.js
index 93e96c8b9f7..6fdf4014575 100644
--- a/spec/frontend/projects/compare/components/app_legacy_spec.js
+++ b/spec/frontend/projects/compare/components/app_legacy_spec.js
@@ -7,7 +7,7 @@ jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
const projectCompareIndexPath = 'some/path';
const refsProjectPath = 'some/refs/path';
-const paramsFrom = 'master';
+const paramsFrom = 'main';
const paramsTo = 'some-other-branch';
describe('CompareApp component', () => {
diff --git a/spec/frontend/projects/compare/components/app_spec.js b/spec/frontend/projects/compare/components/app_spec.js
index 6de06e4373c..7989a6f3d74 100644
--- a/spec/frontend/projects/compare/components/app_spec.js
+++ b/spec/frontend/projects/compare/components/app_spec.js
@@ -2,26 +2,19 @@ import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import CompareApp from '~/projects/compare/components/app.vue';
import RevisionCard from '~/projects/compare/components/revision_card.vue';
+import { appDefaultProps as defaultProps } from './mock_data';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
-const projectCompareIndexPath = 'some/path';
-const refsProjectPath = 'some/refs/path';
-const paramsFrom = 'master';
-const paramsTo = 'master';
-
describe('CompareApp component', () => {
let wrapper;
+ const findSourceRevisionCard = () => wrapper.find('[data-testid="sourceRevisionCard"]');
+ const findTargetRevisionCard = () => wrapper.find('[data-testid="targetRevisionCard"]');
const createComponent = (props = {}) => {
wrapper = shallowMount(CompareApp, {
propsData: {
- projectCompareIndexPath,
- refsProjectPath,
- paramsFrom,
- paramsTo,
- projectMergeRequestPath: '',
- createMrPath: '',
+ ...defaultProps,
...props,
},
});
@@ -39,16 +32,16 @@ describe('CompareApp component', () => {
it('renders component with prop', () => {
expect(wrapper.props()).toEqual(
expect.objectContaining({
- projectCompareIndexPath,
- refsProjectPath,
- paramsFrom,
- paramsTo,
+ projectCompareIndexPath: defaultProps.projectCompareIndexPath,
+ refsProjectPath: defaultProps.refsProjectPath,
+ paramsFrom: defaultProps.paramsFrom,
+ paramsTo: defaultProps.paramsTo,
}),
);
});
it('contains the correct form attributes', () => {
- expect(wrapper.attributes('action')).toBe(projectCompareIndexPath);
+ expect(wrapper.attributes('action')).toBe(defaultProps.projectCompareIndexPath);
expect(wrapper.attributes('method')).toBe('POST');
});
@@ -87,6 +80,58 @@ describe('CompareApp component', () => {
});
});
+ it('sets the selected project when the "selectProject" event is emitted', async () => {
+ const project = {
+ name: 'some-to-name',
+ id: '1',
+ };
+
+ findTargetRevisionCard().vm.$emit('selectProject', {
+ direction: 'to',
+ project,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(findTargetRevisionCard().props('selectedProject')).toEqual(
+ expect.objectContaining(project),
+ );
+ });
+
+ it('sets the selected revision when the "selectRevision" event is emitted', async () => {
+ const revision = 'some-revision';
+
+ findTargetRevisionCard().vm.$emit('selectRevision', {
+ direction: 'to',
+ revision,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(findSourceRevisionCard().props('paramsBranch')).toBe(revision);
+ });
+
+ describe('swap revisions button', () => {
+ const findSwapRevisionsButton = () => wrapper.find('[data-testid="swapRevisionsButton"]');
+
+ it('renders the swap revisions button', () => {
+ expect(findSwapRevisionsButton().exists()).toBe(true);
+ });
+
+ it('has the correct text', () => {
+ expect(findSwapRevisionsButton().text()).toBe('Swap revisions');
+ });
+
+ it('swaps revisions when clicked', async () => {
+ findSwapRevisionsButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findTargetRevisionCard().props('paramsBranch')).toBe(defaultProps.paramsTo);
+ expect(findSourceRevisionCard().props('paramsBranch')).toBe(defaultProps.paramsFrom);
+ });
+ });
+
describe('merge request buttons', () => {
const findProjectMrButton = () => wrapper.find('[data-testid="projectMrButton"]');
const findCreateMrButton = () => wrapper.find('[data-testid="createMrButton"]');
diff --git a/spec/frontend/projects/compare/components/mock_data.js b/spec/frontend/projects/compare/components/mock_data.js
new file mode 100644
index 00000000000..61309928c26
--- /dev/null
+++ b/spec/frontend/projects/compare/components/mock_data.js
@@ -0,0 +1,37 @@
+const refsProjectPath = 'some/refs/path';
+const paramsName = 'to';
+const paramsBranch = 'main';
+const defaultProject = {
+ name: 'some-to-name',
+ id: '1',
+};
+
+export const appDefaultProps = {
+ projectCompareIndexPath: 'some/path',
+ projectMergeRequestPath: '',
+ projects: [defaultProject],
+ paramsFrom: 'main',
+ paramsTo: 'target/branch',
+ createMrPath: '',
+ refsProjectPath,
+ defaultProject,
+};
+
+export const revisionCardDefaultProps = {
+ selectedProject: defaultProject,
+ paramsBranch,
+ revisionText: 'Source',
+ refsProjectPath,
+ paramsName,
+};
+
+export const repoDropdownDefaultProps = {
+ selectedProject: defaultProject,
+ paramsName,
+};
+
+export const revisionDropdownDefaultProps = {
+ refsProjectPath,
+ paramsBranch,
+ paramsName,
+};
diff --git a/spec/frontend/projects/compare/components/repo_dropdown_spec.js b/spec/frontend/projects/compare/components/repo_dropdown_spec.js
index df8fea8fd32..27a7a32ebca 100644
--- a/spec/frontend/projects/compare/components/repo_dropdown_spec.js
+++ b/spec/frontend/projects/compare/components/repo_dropdown_spec.js
@@ -1,37 +1,17 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RepoDropdown from '~/projects/compare/components/repo_dropdown.vue';
-
-const defaultProps = {
- paramsName: 'to',
-};
-
-const projectToId = '1';
-const projectToName = 'some-to-name';
-const projectFromId = '2';
-const projectFromName = 'some-from-name';
-
-const defaultProvide = {
- projectTo: { id: projectToId, name: projectToName },
- projectsFrom: [
- { id: projectFromId, name: projectFromName },
- { id: 3, name: 'some-from-another-name' },
- ],
-};
+import { revisionCardDefaultProps as defaultProps } from './mock_data';
describe('RepoDropdown component', () => {
let wrapper;
- const createComponent = (props = {}, provide = {}) => {
+ const createComponent = (props = {}) => {
wrapper = shallowMount(RepoDropdown, {
propsData: {
...defaultProps,
...props,
},
- provide: {
- ...defaultProvide,
- ...provide,
- },
});
};
@@ -49,11 +29,11 @@ describe('RepoDropdown component', () => {
});
it('set hidden input', () => {
- expect(findHiddenInput().attributes('value')).toBe(projectToId);
+ expect(findHiddenInput().attributes('value')).toBe(defaultProps.selectedProject.id);
});
it('displays the project name in the disabled dropdown', () => {
- expect(findGlDropdown().props('text')).toBe(projectToName);
+ expect(findGlDropdown().props('text')).toBe(defaultProps.selectedProject.name);
expect(findGlDropdown().props('disabled')).toBe(true);
});
@@ -66,31 +46,39 @@ describe('RepoDropdown component', () => {
describe('Target Revision', () => {
beforeEach(() => {
- createComponent({ paramsName: 'from' });
+ const projects = [
+ {
+ name: 'some-to-name',
+ id: '1',
+ },
+ ];
+
+ createComponent({ paramsName: 'from', projects });
});
it('set hidden input of the selected project', () => {
- expect(findHiddenInput().attributes('value')).toBe(projectToId);
+ expect(findHiddenInput().attributes('value')).toBe(defaultProps.selectedProject.id);
});
it('displays matching project name of the source revision initially in the dropdown', () => {
- expect(findGlDropdown().props('text')).toBe(projectToName);
+ expect(findGlDropdown().props('text')).toBe(defaultProps.selectedProject.name);
});
- it('updates the hiddin input value when onClick method is triggered', async () => {
- const repoId = '100';
+ it('updates the hidden input value when onClick method is triggered', async () => {
+ const repoId = '1';
wrapper.vm.onClick({ id: repoId });
await wrapper.vm.$nextTick();
expect(findHiddenInput().attributes('value')).toBe(repoId);
});
- it('emits `changeTargetProject` event when another target project is selected', async () => {
- const index = 1;
- const { projectsFrom } = defaultProvide;
- findGlDropdown().findAll(GlDropdownItem).at(index).vm.$emit('click');
+ it('emits `selectProject` event when another target project is selected', async () => {
+ findGlDropdown().findAll(GlDropdownItem).at(0).vm.$emit('click');
await wrapper.vm.$nextTick();
- expect(wrapper.emitted('changeTargetProject')[0][0]).toEqual(projectsFrom[index].name);
+ expect(wrapper.emitted('selectProject')[0][0]).toEqual({
+ direction: 'from',
+ project: { id: '1', name: 'some-to-name' },
+ });
});
});
});
diff --git a/spec/frontend/projects/compare/components/revision_card_spec.js b/spec/frontend/projects/compare/components/revision_card_spec.js
index 83f858f4454..57906045337 100644
--- a/spec/frontend/projects/compare/components/revision_card_spec.js
+++ b/spec/frontend/projects/compare/components/revision_card_spec.js
@@ -3,13 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import RepoDropdown from '~/projects/compare/components/repo_dropdown.vue';
import RevisionCard from '~/projects/compare/components/revision_card.vue';
import RevisionDropdown from '~/projects/compare/components/revision_dropdown.vue';
-
-const defaultProps = {
- refsProjectPath: 'some/refs/path',
- revisionText: 'Source',
- paramsName: 'to',
- paramsBranch: 'master',
-};
+import { revisionCardDefaultProps as defaultProps } from './mock_data';
describe('RepoDropdown component', () => {
let wrapper;
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
index ca208395e82..38e13dc5462 100644
--- a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
+++ b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
@@ -9,7 +9,7 @@ const defaultProps = {
refsProjectPath: 'some/refs/path',
revisionText: 'Target',
paramsName: 'from',
- paramsBranch: 'master',
+ paramsBranch: 'main',
};
jest.mock('~/flash');
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
index aab9607ceae..118bb68585e 100644
--- a/spec/frontend/projects/compare/components/revision_dropdown_spec.js
+++ b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
@@ -1,15 +1,10 @@
-import { GlDropdown, GlSearchBoxByType } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import RevisionDropdown from '~/projects/compare/components/revision_dropdown.vue';
-
-const defaultProps = {
- refsProjectPath: 'some/refs/path',
- paramsName: 'from',
- paramsBranch: 'master',
-};
+import { revisionDropdownDefaultProps as defaultProps } from './mock_data';
jest.mock('~/flash');
@@ -142,4 +137,17 @@ describe('RevisionDropdown component', () => {
expect(findGlDropdown().props('text')).toBe(defaultProps.paramsBranch);
});
});
+
+ it('emits `selectRevision` event when another revision is selected', async () => {
+ createComponent();
+ wrapper.vm.branches = ['some-branch'];
+ await wrapper.vm.$nextTick();
+
+ findGlDropdown().findAll(GlDropdownItem).at(0).vm.$emit('click');
+
+ expect(wrapper.emitted('selectRevision')[0][0]).toEqual({
+ direction: 'to',
+ revision: 'some-branch',
+ });
+ });
});
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
deleted file mode 100644
index 204e7a7c394..00000000000
--- a/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
+++ /dev/null
@@ -1,144 +0,0 @@
-import { GlBreadcrumb } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { assignGitlabExperiment } from 'helpers/experimentation_helper';
-import App from '~/projects/experiment_new_project_creation/components/app.vue';
-import LegacyContainer from '~/projects/experiment_new_project_creation/components/legacy_container.vue';
-import WelcomePage from '~/projects/experiment_new_project_creation/components/welcome.vue';
-
-describe('Experimental new project creation app', () => {
- let wrapper;
-
- const createComponent = (propsData) => {
- wrapper = shallowMount(App, { propsData });
- };
-
- afterEach(() => {
- wrapper.destroy();
- window.location.hash = '';
- wrapper = null;
- });
-
- const findWelcomePage = () => wrapper.findComponent(WelcomePage);
- const findPanel = (panelName) =>
- findWelcomePage()
- .props()
- .panels.find((p) => p.name === panelName);
- const findPanelHeader = () => wrapper.find('h4');
-
- describe('new_repo experiment', () => {
- describe('when in the candidate variant', () => {
- assignGitlabExperiment('new_repo', 'candidate');
-
- it('has "repository" in the panel title', () => {
- createComponent();
-
- expect(findPanel('blank_project').title).toBe('Create blank project/repository');
- });
-
- describe('when hash is not empty on load', () => {
- beforeEach(() => {
- window.location.hash = '#blank_project';
- createComponent();
- });
-
- it('renders "project/repository"', () => {
- expect(findPanelHeader().text()).toBe('Create blank project/repository');
- });
- });
- });
-
- describe('when in the control variant', () => {
- assignGitlabExperiment('new_repo', 'control');
-
- it('has "project" in the panel title', () => {
- createComponent();
-
- expect(findPanel('blank_project').title).toBe('Create blank project');
- });
-
- describe('when hash is not empty on load', () => {
- beforeEach(() => {
- window.location.hash = '#blank_project';
- createComponent();
- });
-
- it('renders "project"', () => {
- expect(findPanelHeader().text()).toBe('Create blank project');
- });
- });
- });
- });
-
- describe('with empty hash', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders welcome page', () => {
- expect(wrapper.find(WelcomePage).exists()).toBe(true);
- });
-
- it('does not render breadcrumbs', () => {
- expect(wrapper.find(GlBreadcrumb).exists()).toBe(false);
- });
- });
-
- it('renders blank project container if there are errors', () => {
- createComponent({ hasErrors: true });
- expect(wrapper.find(WelcomePage).exists()).toBe(false);
- expect(wrapper.find(LegacyContainer).exists()).toBe(true);
- });
-
- describe('when hash is not empty on load', () => {
- beforeEach(() => {
- window.location.hash = '#blank_project';
- createComponent();
- });
-
- it('renders relevant container', () => {
- expect(wrapper.find(WelcomePage).exists()).toBe(false);
- expect(wrapper.find(LegacyContainer).exists()).toBe(true);
- });
-
- it('renders breadcrumbs', () => {
- expect(wrapper.find(GlBreadcrumb).exists()).toBe(true);
- });
- });
-
- describe('display custom new project guideline text', () => {
- beforeEach(() => {
- window.location.hash = '#blank_project';
- });
-
- it('does not render new project guideline if undefined', () => {
- createComponent();
- expect(wrapper.find('div#new-project-guideline').exists()).toBe(false);
- });
-
- it('render new project guideline if defined', () => {
- const guidelineSelector = 'div#new-project-guideline';
-
- createComponent({
- newProjectGuidelines: '<h4>Internal Guidelines</h4><p>lorem ipsum</p>',
- });
- expect(wrapper.find(guidelineSelector).exists()).toBe(true);
- expect(wrapper.find(guidelineSelector).html()).toContain('<h4>Internal Guidelines</h4>');
- expect(wrapper.find(guidelineSelector).html()).toContain('<p>lorem ipsum</p>');
- });
- });
-
- it('renders relevant container when hash changes', () => {
- createComponent();
- expect(wrapper.find(WelcomePage).exists()).toBe(true);
-
- window.location.hash = '#blank_project';
- const ev = document.createEvent('HTMLEvents');
- ev.initEvent('hashchange', false, false);
- window.dispatchEvent(ev);
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(WelcomePage).exists()).toBe(false);
- expect(wrapper.find(LegacyContainer).exists()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
index 0cf05d4ac37..987a215eb4c 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -54,8 +54,8 @@ describe('ProjectsPipelinesChartsApp', () => {
expect(findGlTabs().exists()).toBe(true);
expect(findGlTabAtIndex(0).attributes('title')).toBe('Pipelines');
- expect(findGlTabAtIndex(1).attributes('title')).toBe('Deployments');
- expect(findGlTabAtIndex(2).attributes('title')).toBe('Lead Time');
+ expect(findGlTabAtIndex(1).attributes('title')).toBe('Deployment frequency');
+ expect(findGlTabAtIndex(2).attributes('title')).toBe('Lead time');
});
it('renders the pipeline charts', () => {
@@ -75,7 +75,7 @@ describe('ProjectsPipelinesChartsApp', () => {
setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts`);
mergeUrlParams.mockImplementation(({ chart }, path) => {
- expect(chart).toBe('deployments');
+ expect(chart).toBe('deployment-frequency');
expect(path).toBe(window.location.pathname);
chartsPath = `${path}?chart=${chart}`;
return chartsPath;
@@ -114,12 +114,12 @@ describe('ProjectsPipelinesChartsApp', () => {
describe('when provided with a query param', () => {
it.each`
- chart | tab
- ${'lead-time'} | ${'2'}
- ${'deployments'} | ${'1'}
- ${'pipelines'} | ${'0'}
- ${'fake'} | ${'0'}
- ${''} | ${'0'}
+ chart | tab
+ ${'lead-time'} | ${'2'}
+ ${'deployment-frequency'} | ${'1'}
+ ${'pipelines'} | ${'0'}
+ ${'fake'} | ${'0'}
+ ${''} | ${'0'}
`('shows the correct tab for URL parameter "$chart"', ({ chart, tab }) => {
setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts?chart=${chart}`);
getParameterValues.mockImplementation((name) => {
@@ -152,7 +152,7 @@ describe('ProjectsPipelinesChartsApp', () => {
getParameterValues.mockImplementationOnce((name) => {
expect(name).toBe('chart');
- return ['deployments'];
+ return ['deployment-frequency'];
});
popstateHandler();
diff --git a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js
index 64f80300237..2b523467379 100644
--- a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_area_chart_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import CiCdAnalyticsAreaChart from '~/projects/pipelines/charts/components/ci_cd_analytics_area_chart.vue';
+import CiCdAnalyticsAreaChart from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_area_chart.vue';
import { transformedAreaChartData } from '../mock_data';
describe('CiCdAnalyticsAreaChart', () => {
diff --git a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
index 037530ddd48..9adc6dba51e 100644
--- a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
@@ -1,8 +1,8 @@
import { GlSegmentedControl } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import CiCdAnalyticsAreaChart from '~/projects/pipelines/charts/components/ci_cd_analytics_area_chart.vue';
-import CiCdAnalyticsCharts from '~/projects/pipelines/charts/components/ci_cd_analytics_charts.vue';
+import CiCdAnalyticsAreaChart from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_area_chart.vue';
+import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue';
import { transformedAreaChartData, chartOptions } from '../mock_data';
const DEFAULT_PROPS = {
@@ -26,7 +26,7 @@ const DEFAULT_PROPS = {
],
};
-describe('~/projects/pipelines/charts/components/ci_cd_analytics_charts.vue', () => {
+describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', () => {
let wrapper;
const createWrapper = (props = {}) =>
diff --git a/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
index c5cfe783569..b5ee62f2042 100644
--- a/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/pipeline_charts_spec.js
@@ -2,11 +2,11 @@ import { GlColumnChart } from '@gitlab/ui/dist/charts';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import CiCdAnalyticsCharts from '~/projects/pipelines/charts/components/ci_cd_analytics_charts.vue';
import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_charts.vue';
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
import getPipelineCountByStatus from '~/projects/pipelines/charts/graphql/queries/get_pipeline_count_by_status.query.graphql';
import getProjectPipelineStatistics from '~/projects/pipelines/charts/graphql/queries/get_project_pipeline_statistics.query.graphql';
+import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue';
import { mockPipelineCount, mockPipelineStatistics } from '../mock_data';
const projectPath = 'gitlab-org/gitlab';
diff --git a/spec/frontend/ref/stores/mutations_spec.js b/spec/frontend/ref/stores/mutations_spec.js
index 11d4fe0e206..de1d5c557ce 100644
--- a/spec/frontend/ref/stores/mutations_spec.js
+++ b/spec/frontend/ref/stores/mutations_spec.js
@@ -108,7 +108,7 @@ describe('Ref selector Vuex store mutations', () => {
const response = {
data: [
{
- name: 'master',
+ name: 'main',
default: true,
// everything except "name" and "default" should be stripped
@@ -130,7 +130,7 @@ describe('Ref selector Vuex store mutations', () => {
expect(state.matches.branches).toEqual({
list: [
{
- name: 'master',
+ name: 'main',
default: true,
},
{
diff --git a/spec/frontend/registry/explorer/components/details_page/details_header_spec.js b/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
index b50ed87a563..632f506f4ae 100644
--- a/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/details_header_spec.js
@@ -1,7 +1,10 @@
import { GlButton, GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import { useFakeDate } from 'helpers/fake_date';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import waitForPromises from 'helpers/wait_for_promises';
import component from '~/registry/explorer/components/details_page/details_header.vue';
import {
UNSCHEDULED_STATUS,
@@ -16,15 +19,18 @@ import {
ROOT_IMAGE_TEXT,
ROOT_IMAGE_TOOLTIP,
} from '~/registry/explorer/constants';
+import getContainerRepositoryTagCountQuery from '~/registry/explorer/graphql/queries/get_container_repository_tags_count.query.graphql';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
+import { imageTagsCountMock } from '../../mock_data';
describe('Details Header', () => {
let wrapper;
+ let apolloProvider;
+ let localVue;
const defaultImage = {
name: 'foo',
updatedAt: '2020-11-03T13:29:21Z',
- tagsCount: 10,
canDelete: true,
project: {
visibility: 'public',
@@ -51,12 +57,31 @@ describe('Details Header', () => {
await wrapper.vm.$nextTick();
};
- const mountComponent = (propsData = { image: defaultImage }) => {
+ const mountComponent = ({
+ propsData = { image: defaultImage },
+ resolver = jest.fn().mockResolvedValue(imageTagsCountMock()),
+ $apollo = undefined,
+ } = {}) => {
+ const mocks = {};
+
+ if ($apollo) {
+ mocks.$apollo = $apollo;
+ } else {
+ localVue = createLocalVue();
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[getContainerRepositoryTagCountQuery, resolver]];
+ apolloProvider = createMockApollo(requestHandlers);
+ }
+
wrapper = shallowMount(component, {
+ localVue,
+ apolloProvider,
propsData,
directives: {
GlTooltip: createMockDirective(),
},
+ mocks,
stubs: {
TitleArea,
},
@@ -64,41 +89,48 @@ describe('Details Header', () => {
};
afterEach(() => {
+ // if we want to mix createMockApollo and manual mocks we need to reset everything
wrapper.destroy();
+ apolloProvider = undefined;
+ localVue = undefined;
wrapper = null;
});
+
describe('image name', () => {
describe('missing image name', () => {
- it('root image ', () => {
- mountComponent({ image: { ...defaultImage, name: '' } });
+ beforeEach(() => {
+ mountComponent({ propsData: { image: { ...defaultImage, name: '' } } });
+
+ return waitForPromises();
+ });
+ it('root image ', () => {
expect(findTitle().text()).toBe(ROOT_IMAGE_TEXT);
});
it('has an icon', () => {
- mountComponent({ image: { ...defaultImage, name: '' } });
-
expect(findInfoIcon().exists()).toBe(true);
expect(findInfoIcon().props('name')).toBe('information-o');
});
it('has a tooltip', () => {
- mountComponent({ image: { ...defaultImage, name: '' } });
-
const tooltip = getBinding(findInfoIcon().element, 'gl-tooltip');
expect(tooltip.value).toBe(ROOT_IMAGE_TOOLTIP);
});
});
describe('with image name present', () => {
- it('shows image.name ', () => {
+ beforeEach(() => {
mountComponent();
+
+ return waitForPromises();
+ });
+
+ it('shows image.name ', () => {
expect(findTitle().text()).toContain('foo');
});
it('has no icon', () => {
- mountComponent();
-
expect(findInfoIcon().exists()).toBe(false);
});
});
@@ -111,16 +143,10 @@ describe('Details Header', () => {
expect(findDeleteButton().exists()).toBe(true);
});
- it('is hidden while loading', () => {
- mountComponent({ image: defaultImage, metadataLoading: true });
-
- expect(findDeleteButton().exists()).toBe(false);
- });
-
it('has the correct text', () => {
mountComponent();
- expect(findDeleteButton().text()).toBe('Delete');
+ expect(findDeleteButton().text()).toBe('Delete image repository');
});
it('has the correct props', () => {
@@ -149,7 +175,7 @@ describe('Details Header', () => {
`(
'when canDelete is $canDelete and disabled is $disabled is $isDisabled that the button is disabled',
({ canDelete, disabled, isDisabled }) => {
- mountComponent({ image: { ...defaultImage, canDelete }, disabled });
+ mountComponent({ propsData: { image: { ...defaultImage, canDelete }, disabled } });
expect(findDeleteButton().props('disabled')).toBe(isDisabled);
},
@@ -158,15 +184,32 @@ describe('Details Header', () => {
describe('metadata items', () => {
describe('tags count', () => {
+ it('displays "-- tags" while loading', async () => {
+ // here we are forced to mock apollo because `waitForMetadataItems` waits
+ // for two ticks, de facto allowing the promise to resolve, so there is
+ // no way to catch the component as both rendered and in loading state
+ mountComponent({ $apollo: { queries: { containerRepository: { loading: true } } } });
+
+ await waitForMetadataItems();
+
+ expect(findTagsCount().props('text')).toBe('-- tags');
+ });
+
it('when there is more than one tag has the correct text', async () => {
mountComponent();
+
+ await waitForPromises();
await waitForMetadataItems();
- expect(findTagsCount().props('text')).toBe('10 tags');
+ expect(findTagsCount().props('text')).toBe('13 tags');
});
it('when there is one tag has the correct text', async () => {
- mountComponent({ image: { ...defaultImage, tagsCount: 1 } });
+ mountComponent({
+ resolver: jest.fn().mockResolvedValue(imageTagsCountMock({ tagsCount: 1 })),
+ });
+
+ await waitForPromises();
await waitForMetadataItems();
expect(findTagsCount().props('text')).toBe('1 tag');
@@ -208,11 +251,13 @@ describe('Details Header', () => {
'when the status is $status the text is $text and the tooltip is $tooltip',
async ({ status, text, tooltip }) => {
mountComponent({
- image: {
- ...defaultImage,
- expirationPolicyCleanupStatus: status,
- project: {
- containerExpirationPolicy: { enabled: true, nextRunAt: '2021-01-03T14:29:21Z' },
+ propsData: {
+ image: {
+ ...defaultImage,
+ expirationPolicyCleanupStatus: status,
+ project: {
+ containerExpirationPolicy: { enabled: true, nextRunAt: '2021-01-03T14:29:21Z' },
+ },
},
},
});
@@ -242,7 +287,9 @@ describe('Details Header', () => {
expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye');
});
it('shows an eye slashed when the project is not public', async () => {
- mountComponent({ image: { ...defaultImage, project: { visibility: 'private' } } });
+ mountComponent({
+ propsData: { image: { ...defaultImage, project: { visibility: 'private' } } },
+ });
await waitForMetadataItems();
expect(findLastUpdatedAndVisibility().props('icon')).toBe('eye-slash');
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
index 8b70f84c1bd..dc9063bde2c 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
@@ -1,5 +1,6 @@
import { GlFormCheckbox, GlSprintf, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import DeleteButton from '~/registry/explorer/components/delete_button.vue';
@@ -72,8 +73,15 @@ describe('tags list row', () => {
expect(findCheckbox().exists()).toBe(false);
});
- it('is disabled when the digest is missing', () => {
- mountComponent({ tag: { ...tag, digest: null } });
+ it.each`
+ digest | disabled
+ ${'foo'} | ${true}
+ ${null} | ${false}
+ ${null} | ${true}
+ ${'foo'} | ${true}
+ `('is disabled when the digest $digest and disabled is $disabled', ({ digest, disabled }) => {
+ mountComponent({ tag: { ...tag, digest }, disabled });
+
expect(findCheckbox().attributes('disabled')).toBe('true');
});
@@ -141,6 +149,12 @@ describe('tags list row', () => {
title: tag.location,
});
});
+
+ it('is disabled when the component is disabled', () => {
+ mountComponent({ ...defaultProps, disabled: true });
+
+ expect(findClipboardButton().attributes('disabled')).toBe('true');
+ });
});
describe('warning icon', () => {
@@ -266,15 +280,19 @@ describe('tags list row', () => {
});
it.each`
- canDelete | digest
- ${true} | ${null}
- ${false} | ${'foo'}
- ${false} | ${null}
- `('is disabled when canDelete is $canDelete and digest is $digest', ({ canDelete, digest }) => {
- mountComponent({ ...defaultProps, tag: { ...tag, canDelete, digest } });
-
- expect(findDeleteButton().attributes('disabled')).toBe('true');
- });
+ canDelete | digest | disabled
+ ${true} | ${null} | ${true}
+ ${false} | ${'foo'} | ${true}
+ ${false} | ${null} | ${true}
+ ${true} | ${'foo'} | ${true}
+ `(
+ 'is disabled when canDelete is $canDelete and digest is $digest and disabled is $disabled',
+ ({ canDelete, digest, disabled }) => {
+ mountComponent({ ...defaultProps, tag: { ...tag, canDelete, digest }, disabled });
+
+ expect(findDeleteButton().attributes('disabled')).toBe('true');
+ },
+ );
it('delete event emits delete', () => {
mountComponent();
@@ -287,13 +305,10 @@ describe('tags list row', () => {
describe('details rows', () => {
describe('when the tag has a digest', () => {
- beforeEach(() => {
+ it('has 3 details rows', async () => {
mountComponent();
+ await nextTick();
- return wrapper.vm.$nextTick();
- });
-
- it('has 3 details rows', () => {
expect(findDetailsRows().length).toBe(3);
});
@@ -303,17 +318,37 @@ describe('tags list row', () => {
${'manifest detail'} | ${findManifestDetail} | ${'Manifest digest: sha256:2cf3d2fdac1b04a14301d47d51cb88dcd26714c74f91440eeee99ce399089062'} | ${'log'} | ${true}
${'configuration detail'} | ${findConfigurationDetail} | ${'Configuration digest: sha256:c2613843ab33aabf847965442b13a8b55a56ae28837ce182627c0716eb08c02b'} | ${'cloud-gear'} | ${true}
`('$name details row', ({ finderFunction, text, icon, clipboard }) => {
- it(`has ${text} as text`, () => {
+ it(`has ${text} as text`, async () => {
+ mountComponent();
+ await nextTick();
+
expect(finderFunction().text()).toMatchInterpolatedText(text);
});
- it(`has the ${icon} icon`, () => {
+ it(`has the ${icon} icon`, async () => {
+ mountComponent();
+ await nextTick();
+
expect(finderFunction().props('icon')).toBe(icon);
});
- it(`is ${clipboard} that clipboard button exist`, () => {
- expect(finderFunction().find(ClipboardButton).exists()).toBe(clipboard);
- });
+ if (clipboard) {
+ it(`clipboard button exist`, async () => {
+ mountComponent();
+ await nextTick();
+
+ expect(finderFunction().find(ClipboardButton).exists()).toBe(clipboard);
+ });
+
+ it('is disabled when the component is disabled', async () => {
+ mountComponent({ ...defaultProps, disabled: true });
+ await nextTick();
+
+ expect(finderFunction().findComponent(ClipboardButton).attributes('disabled')).toBe(
+ 'true',
+ );
+ });
+ }
});
});
@@ -321,7 +356,7 @@ describe('tags list row', () => {
it('hides the details rows', async () => {
mountComponent({ tag: { ...tag, digest: null } });
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findDetailsRows().length).toBe(0);
});
});
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js
index dc6760a17bd..51934cd074d 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_list_spec.js
@@ -1,22 +1,55 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlKeysetPagination } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import EmptyTagsState from '~/registry/explorer/components/details_page/empty_state.vue';
import component from '~/registry/explorer/components/details_page/tags_list.vue';
import TagsListRow from '~/registry/explorer/components/details_page/tags_list_row.vue';
+import TagsLoader from '~/registry/explorer/components/details_page/tags_loader.vue';
import { TAGS_LIST_TITLE, REMOVE_TAGS_BUTTON_TITLE } from '~/registry/explorer/constants/index';
-import { tagsMock } from '../../mock_data';
+import getContainerRepositoryTagsQuery from '~/registry/explorer/graphql/queries/get_container_repository_tags.query.graphql';
+import { tagsMock, imageTagsMock, tagsPageInfo } from '../../mock_data';
+
+const localVue = createLocalVue();
describe('Tags List', () => {
let wrapper;
+ let apolloProvider;
const tags = [...tagsMock];
const readOnlyTags = tags.map((t) => ({ ...t, canDelete: false }));
const findTagsListRow = () => wrapper.findAll(TagsListRow);
const findDeleteButton = () => wrapper.find(GlButton);
const findListTitle = () => wrapper.find('[data-testid="list-title"]');
+ const findPagination = () => wrapper.find(GlKeysetPagination);
+ const findEmptyState = () => wrapper.find(EmptyTagsState);
+ const findTagsLoader = () => wrapper.find(TagsLoader);
+
+ const waitForApolloRequestRender = async () => {
+ await waitForPromises();
+ await nextTick();
+ };
+
+ const mountComponent = ({
+ propsData = { isMobile: false, id: 1 },
+ resolver = jest.fn().mockResolvedValue(imageTagsMock()),
+ } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[getContainerRepositoryTagsQuery, resolver]];
- const mountComponent = (propsData = { tags, isMobile: false }) => {
+ apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMount(component, {
+ localVue,
+ apolloProvider,
propsData,
+ provide() {
+ return {
+ config: {},
+ };
+ },
});
};
@@ -26,15 +59,19 @@ describe('Tags List', () => {
});
describe('List title', () => {
- it('exists', () => {
+ it('exists', async () => {
mountComponent();
+ await waitForApolloRequestRender();
+
expect(findListTitle().exists()).toBe(true);
});
- it('has the correct text', () => {
+ it('has the correct text', async () => {
mountComponent();
+ await waitForApolloRequestRender();
+
expect(findListTitle().text()).toBe(TAGS_LIST_TITLE);
});
});
@@ -48,21 +85,29 @@ describe('Tags List', () => {
${readOnlyTags} | ${true} | ${false}
`(
'is $isVisible that delete button exists when tags is $inputTags and isMobile is $isMobile',
- ({ inputTags, isMobile, isVisible }) => {
- mountComponent({ tags: inputTags, isMobile });
+ async ({ inputTags, isMobile, isVisible }) => {
+ mountComponent({
+ propsData: { tags: inputTags, isMobile, id: 1 },
+ resolver: jest.fn().mockResolvedValue(imageTagsMock(inputTags)),
+ });
+
+ await waitForApolloRequestRender();
expect(findDeleteButton().exists()).toBe(isVisible);
},
);
- it('has the correct text', () => {
+ it('has the correct text', async () => {
mountComponent();
+ await waitForApolloRequestRender();
+
expect(findDeleteButton().text()).toBe(REMOVE_TAGS_BUTTON_TITLE);
});
- it('has the correct props', () => {
+ it('has the correct props', async () => {
mountComponent();
+ await waitForApolloRequestRender();
expect(findDeleteButton().attributes()).toMatchObject({
category: 'secondary',
@@ -79,35 +124,44 @@ describe('Tags List', () => {
`(
'is $buttonDisabled that the button is disabled when the component disabled state is $disabled and is $doSelect that the user selected a tag',
async ({ disabled, buttonDisabled, doSelect }) => {
- mountComponent({ tags, disabled, isMobile: false });
+ mountComponent({ propsData: { tags, disabled, isMobile: false, id: 1 } });
+
+ await waitForApolloRequestRender();
if (doSelect) {
findTagsListRow().at(0).vm.$emit('select');
- await wrapper.vm.$nextTick();
+ await nextTick();
}
expect(findDeleteButton().attributes('disabled')).toBe(buttonDisabled);
},
);
- it('click event emits a deleted event with selected items', () => {
+ it('click event emits a deleted event with selected items', async () => {
mountComponent();
- findTagsListRow().at(0).vm.$emit('select');
+ await waitForApolloRequestRender();
+
+ findTagsListRow().at(0).vm.$emit('select');
findDeleteButton().vm.$emit('click');
- expect(wrapper.emitted('delete')).toEqual([[{ 'beta-24753': true }]]);
+
+ expect(wrapper.emitted('delete')[0][0][0].name).toBe(tags[0].name);
});
});
describe('list rows', () => {
- it('one row exist for each tag', () => {
+ it('one row exist for each tag', async () => {
mountComponent();
+ await waitForApolloRequestRender();
+
expect(findTagsListRow()).toHaveLength(tags.length);
});
- it('the correct props are bound to it', () => {
- mountComponent({ tags, disabled: true });
+ it('the correct props are bound to it', async () => {
+ mountComponent({ propsData: { disabled: true, id: 1 } });
+
+ await waitForApolloRequestRender();
const rows = findTagsListRow();
@@ -120,16 +174,138 @@ describe('Tags List', () => {
describe('events', () => {
it('select event update the selected items', async () => {
mountComponent();
+
+ await waitForApolloRequestRender();
+
findTagsListRow().at(0).vm.$emit('select');
- await wrapper.vm.$nextTick();
+
+ await nextTick();
+
expect(findTagsListRow().at(0).attributes('selected')).toBe('true');
});
- it('delete event emit a delete event', () => {
+ it('delete event emit a delete event', async () => {
mountComponent();
+
+ await waitForApolloRequestRender();
+
findTagsListRow().at(0).vm.$emit('delete');
- expect(wrapper.emitted('delete')).toEqual([[{ 'beta-24753': true }]]);
+ expect(wrapper.emitted('delete')[0][0][0].name).toBe(tags[0].name);
+ });
+ });
+ });
+
+ describe('when the list of tags is empty', () => {
+ const resolver = jest.fn().mockResolvedValue(imageTagsMock([]));
+
+ it('has the empty state', async () => {
+ mountComponent({ resolver });
+
+ await waitForApolloRequestRender();
+
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('does not show the loader', async () => {
+ mountComponent({ resolver });
+
+ await waitForApolloRequestRender();
+
+ expect(findTagsLoader().exists()).toBe(false);
+ });
+
+ it('does not show the list', async () => {
+ mountComponent({ resolver });
+
+ await waitForApolloRequestRender();
+
+ expect(findTagsListRow().exists()).toBe(false);
+ expect(findListTitle().exists()).toBe(false);
+ });
+ });
+
+ describe('pagination', () => {
+ it('exists', async () => {
+ mountComponent();
+
+ await waitForApolloRequestRender();
+
+ expect(findPagination().exists()).toBe(true);
+ });
+
+ it('is hidden when loading', () => {
+ mountComponent();
+
+ expect(findPagination().exists()).toBe(false);
+ });
+
+ it('is hidden when there are no more pages', async () => {
+ mountComponent({ resolver: jest.fn().mockResolvedValue(imageTagsMock([])) });
+
+ await waitForApolloRequestRender();
+
+ expect(findPagination().exists()).toBe(false);
+ });
+
+ it('is wired to the correct pagination props', async () => {
+ mountComponent();
+
+ await waitForApolloRequestRender();
+
+ expect(findPagination().props()).toMatchObject({
+ hasNextPage: tagsPageInfo.hasNextPage,
+ hasPreviousPage: tagsPageInfo.hasPreviousPage,
});
});
+
+ it('fetch next page when user clicks next', async () => {
+ const resolver = jest.fn().mockResolvedValue(imageTagsMock());
+ mountComponent({ resolver });
+
+ await waitForApolloRequestRender();
+
+ findPagination().vm.$emit('next');
+
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({ after: tagsPageInfo.endCursor }),
+ );
+ });
+
+ it('fetch previous page when user clicks prev', async () => {
+ const resolver = jest.fn().mockResolvedValue(imageTagsMock());
+ mountComponent({ resolver });
+
+ await waitForApolloRequestRender();
+
+ findPagination().vm.$emit('prev');
+
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({ first: null, before: tagsPageInfo.startCursor }),
+ );
+ });
+ });
+
+ describe('loading state', () => {
+ it.each`
+ isImageLoading | queryExecuting | loadingVisible
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${false}
+ `(
+ 'when the isImageLoading is $isImageLoading, and is $queryExecuting that the query is still executing is $loadingVisible that the loader is shown',
+ async ({ isImageLoading, queryExecuting, loadingVisible }) => {
+ mountComponent({ propsData: { isImageLoading, isMobile: false, id: 1 } });
+
+ if (!queryExecuting) {
+ await waitForApolloRequestRender();
+ }
+
+ expect(findTagsLoader().exists()).toBe(loadingVisible);
+ expect(findTagsListRow().exists()).toBe(!loadingVisible);
+ expect(findListTitle().exists()).toBe(!loadingVisible);
+ expect(findPagination().exists()).toBe(!loadingVisible);
+ },
+ );
});
});
diff --git a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
index 6c897b983f7..323d7b177e7 100644
--- a/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/image_list_row_spec.js
@@ -25,10 +25,11 @@ describe('Image List Row', () => {
const findDetailsLink = () => wrapper.find('[data-testid="details-link"]');
const findTagsCount = () => wrapper.find('[data-testid="tags-count"]');
- const findDeleteBtn = () => wrapper.find(DeleteButton);
- const findClipboardButton = () => wrapper.find(ClipboardButton);
+ const findDeleteBtn = () => wrapper.findComponent(DeleteButton);
+ const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
const findWarningIcon = () => wrapper.find('[data-testid="warning-icon"]');
- const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findListItemComponent = () => wrapper.findComponent(ListItem);
const mountComponent = (props) => {
wrapper = shallowMount(Component, {
@@ -52,20 +53,28 @@ describe('Image List Row', () => {
wrapper = null;
});
- describe('main tooltip', () => {
- it(`the title is ${ROW_SCHEDULED_FOR_DELETION}`, () => {
- mountComponent();
+ describe('list item component', () => {
+ describe('tooltip', () => {
+ it(`the title is ${ROW_SCHEDULED_FOR_DELETION}`, () => {
+ mountComponent();
+
+ const tooltip = getBinding(wrapper.element, 'gl-tooltip');
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value.title).toBe(ROW_SCHEDULED_FOR_DELETION);
+ });
- const tooltip = getBinding(wrapper.element, 'gl-tooltip');
- expect(tooltip).toBeDefined();
- expect(tooltip.value.title).toBe(ROW_SCHEDULED_FOR_DELETION);
+ it('is disabled when item is being deleted', () => {
+ mountComponent({ item: { ...item, status: IMAGE_DELETE_SCHEDULED_STATUS } });
+
+ const tooltip = getBinding(wrapper.element, 'gl-tooltip');
+ expect(tooltip.value.disabled).toBe(false);
+ });
});
- it('is disabled when item is being deleted', () => {
+ it('is disabled when the item is in deleting status', () => {
mountComponent({ item: { ...item, status: IMAGE_DELETE_SCHEDULED_STATUS } });
- const tooltip = getBinding(wrapper.element, 'gl-tooltip');
- expect(tooltip.value.disabled).toBe(false);
+ expect(findListItemComponent().props('disabled')).toBe(true);
});
});
@@ -118,6 +127,20 @@ describe('Image List Row', () => {
},
);
});
+
+ describe('when the item is deleting', () => {
+ beforeEach(() => {
+ mountComponent({ item: { ...item, status: IMAGE_DELETE_SCHEDULED_STATUS } });
+ });
+
+ it('the router link is disabled', () => {
+ // we check the event prop as is the only workaround to disable a router link
+ expect(findDetailsLink().props('event')).toBe('');
+ });
+ it('the clipboard button is disabled', () => {
+ expect(findClipboardButton().attributes('disabled')).toBe('true');
+ });
+ });
});
describe('delete button', () => {
diff --git a/spec/frontend/registry/explorer/mock_data.js b/spec/frontend/registry/explorer/mock_data.js
index f4453912db4..fe258dcd4e8 100644
--- a/spec/frontend/registry/explorer/mock_data.js
+++ b/spec/frontend/registry/explorer/mock_data.js
@@ -113,7 +113,6 @@ export const containerRepositoryMock = {
canDelete: true,
createdAt: '2020-11-03T13:29:21Z',
updatedAt: '2020-11-03T13:29:21Z',
- tagsCount: 13,
expirationPolicyStartedAt: null,
expirationPolicyCleanupStatus: 'UNSCHEDULED',
project: {
@@ -161,6 +160,30 @@ export const tagsMock = [
},
];
+export const imageTagsMock = (nodes = tagsMock) => ({
+ data: {
+ containerRepository: {
+ id: containerRepositoryMock.id,
+ tags: {
+ nodes,
+ pageInfo: { ...tagsPageInfo },
+ __typename: 'ContainerRepositoryTagConnection',
+ },
+ __typename: 'ContainerRepositoryDetails',
+ },
+ },
+});
+
+export const imageTagsCountMock = (override) => ({
+ data: {
+ containerRepository: {
+ id: containerRepositoryMock.id,
+ tagsCount: 13,
+ ...override,
+ },
+ },
+});
+
export const graphQLImageDetailsMock = (override) => ({
data: {
containerRepository: {
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 76baf4f72c9..022f6e71fe6 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -28,12 +28,10 @@ import Tracking from '~/tracking';
import {
graphQLImageDetailsMock,
- graphQLImageDetailsEmptyTagsMock,
graphQLDeleteImageRepositoryTagsMock,
containerRepositoryMock,
graphQLEmptyImageDetailsMock,
tagsMock,
- tagsPageInfo,
} from '../mock_data';
import { DeleteModal } from '../stubs';
@@ -72,12 +70,6 @@ describe('Details Page', () => {
await wrapper.vm.$nextTick();
};
- const tagsArrayToSelectedTags = (tags) =>
- tags.reduce((acc, c) => {
- acc[c.name] = true;
- return acc;
- }, {});
-
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock()),
mutationResolver = jest.fn().mockResolvedValue(graphQLDeleteImageRepositoryTagsMock),
@@ -138,12 +130,6 @@ describe('Details Page', () => {
expect(findTagsList().exists()).toBe(false);
});
-
- it('does not show pagination', () => {
- mountComponent();
-
- expect(findPagination().exists()).toBe(false);
- });
});
describe('when the image does not exist', () => {
@@ -167,34 +153,6 @@ describe('Details Page', () => {
});
});
- describe('when the list of tags is empty', () => {
- const resolver = jest.fn().mockResolvedValue(graphQLImageDetailsEmptyTagsMock);
-
- it('has the empty state', async () => {
- mountComponent({ resolver });
-
- await waitForApolloRequestRender();
-
- expect(findEmptyState().exists()).toBe(true);
- });
-
- it('does not show the loader', async () => {
- mountComponent({ resolver });
-
- await waitForApolloRequestRender();
-
- expect(findTagsLoader().exists()).toBe(false);
- });
-
- it('does not show the list', async () => {
- mountComponent({ resolver });
-
- await waitForApolloRequestRender();
-
- expect(findTagsList().exists()).toBe(false);
- });
- });
-
describe('list', () => {
it('exists', async () => {
mountComponent();
@@ -211,7 +169,6 @@ describe('Details Page', () => {
expect(findTagsList().props()).toMatchObject({
isMobile: false,
- tags: cleanTags,
});
});
@@ -224,7 +181,7 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
[tagToBeDeleted] = cleanTags;
- findTagsList().vm.$emit('delete', { [tagToBeDeleted.name]: true });
+ findTagsList().vm.$emit('delete', [tagToBeDeleted]);
});
it('open the modal', async () => {
@@ -244,7 +201,7 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
- findTagsList().vm.$emit('delete', tagsArrayToSelectedTags(cleanTags));
+ findTagsList().vm.$emit('delete', cleanTags);
});
it('open the modal', () => {
@@ -260,61 +217,6 @@ describe('Details Page', () => {
});
});
- describe('pagination', () => {
- it('exists', async () => {
- mountComponent();
-
- await waitForApolloRequestRender();
-
- expect(findPagination().exists()).toBe(true);
- });
-
- it('is hidden when there are no more pages', async () => {
- mountComponent({ resolver: jest.fn().mockResolvedValue(graphQLImageDetailsEmptyTagsMock) });
-
- await waitForApolloRequestRender();
-
- expect(findPagination().exists()).toBe(false);
- });
-
- it('is wired to the correct pagination props', async () => {
- mountComponent();
-
- await waitForApolloRequestRender();
-
- expect(findPagination().props()).toMatchObject({
- hasNextPage: tagsPageInfo.hasNextPage,
- hasPreviousPage: tagsPageInfo.hasPreviousPage,
- });
- });
-
- it('fetch next page when user clicks next', async () => {
- const resolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock());
- mountComponent({ resolver });
-
- await waitForApolloRequestRender();
-
- findPagination().vm.$emit('next');
-
- expect(resolver).toHaveBeenCalledWith(
- expect.objectContaining({ after: tagsPageInfo.endCursor }),
- );
- });
-
- it('fetch previous page when user clicks prev', async () => {
- const resolver = jest.fn().mockResolvedValue(graphQLImageDetailsMock());
- mountComponent({ resolver });
-
- await waitForApolloRequestRender();
-
- findPagination().vm.$emit('prev');
-
- expect(resolver).toHaveBeenCalledWith(
- expect.objectContaining({ first: null, before: tagsPageInfo.startCursor }),
- );
- });
- });
-
describe('modal', () => {
it('exists', async () => {
mountComponent();
@@ -349,7 +251,7 @@ describe('Details Page', () => {
});
describe('when one item is selected to be deleted', () => {
it('calls apollo mutation with the right parameters', async () => {
- findTagsList().vm.$emit('delete', { [cleanTags[0].name]: true });
+ findTagsList().vm.$emit('delete', [cleanTags[0]]);
await wrapper.vm.$nextTick();
@@ -363,7 +265,7 @@ describe('Details Page', () => {
describe('when more than one item is selected to be deleted', () => {
it('calls apollo mutation with the right parameters', async () => {
- findTagsList().vm.$emit('delete', { ...tagsArrayToSelectedTags(tagsMock) });
+ findTagsList().vm.$emit('delete', tagsMock);
await wrapper.vm.$nextTick();
@@ -390,7 +292,6 @@ describe('Details Page', () => {
await waitForApolloRequestRender();
expect(findDetailsHeader().props()).toMatchObject({
- metadataLoading: false,
image: {
name: containerRepositoryMock.name,
project: {
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index c9f84be97c4..cad593b76ea 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -129,6 +129,68 @@ Object {
}
`;
+exports[`releases/util.js convertOneReleaseForEditingGraphQLResponse matches snapshot 1`] = `
+Object {
+ "data": Object {
+ "_links": Object {
+ "self": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
+ "selfUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
+ },
+ "assets": Object {
+ "count": undefined,
+ "links": Array [
+ Object {
+ "id": "gid://gitlab/Releases::Link/13",
+ "linkType": "image",
+ "name": "Image",
+ "url": "https://example.com/image",
+ },
+ Object {
+ "id": "gid://gitlab/Releases::Link/12",
+ "linkType": "package",
+ "name": "Package",
+ "url": "https://example.com/package",
+ },
+ Object {
+ "id": "gid://gitlab/Releases::Link/11",
+ "linkType": "runbook",
+ "name": "Runbook",
+ "url": "http://localhost/releases-namespace/releases-project/runbook",
+ },
+ Object {
+ "id": "gid://gitlab/Releases::Link/10",
+ "linkType": "other",
+ "name": "linux-amd64 binaries",
+ "url": "https://downloads.example.com/bin/gitlab-linux-amd64",
+ },
+ ],
+ "sources": Array [],
+ },
+ "author": undefined,
+ "description": "Best. Release. **Ever.** :rocket:",
+ "evidences": Array [],
+ "milestones": Array [
+ Object {
+ "issueStats": Object {},
+ "stats": undefined,
+ "title": "12.3",
+ "webPath": undefined,
+ "webUrl": undefined,
+ },
+ Object {
+ "issueStats": Object {},
+ "stats": undefined,
+ "title": "12.4",
+ "webPath": undefined,
+ "webUrl": undefined,
+ },
+ ],
+ "name": "The first release",
+ "tagName": "v1.1",
+ },
+}
+`;
+
exports[`releases/util.js convertOneReleaseGraphQLResponse matches snapshot 1`] = `
Object {
"data": Object {
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 7955b079cbc..3a28020c284 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -1,210 +1,231 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { range as rge } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import { merge } from 'lodash';
+import Vue from 'vue';
import Vuex from 'vuex';
-import { getJSONFixture } from 'helpers/fixtures';
-import waitForPromises from 'helpers/wait_for_promises';
-import api from '~/api';
-import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import ReleasesApp from '~/releases/components/app_index.vue';
+import { getParameterByName } from '~/lib/utils/common_utils';
+import AppIndex from '~/releases/components/app_index.vue';
+import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
import ReleasesPagination from '~/releases/components/releases_pagination.vue';
-import createStore from '~/releases/stores';
-import createIndexModule from '~/releases/stores/modules/index';
-import { pageInfoHeadersWithoutPagination, pageInfoHeadersWithPagination } from '../mock_data';
+import ReleasesSort from '~/releases/components/releases_sort.vue';
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
- getParameterByName: jest.fn().mockImplementation((paramName) => {
- return `${paramName}_param_value`;
- }),
+ getParameterByName: jest.fn(),
}));
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
-const release = getJSONFixture('api/releases/release.json');
-const releases = [release];
-
-describe('Releases App ', () => {
+describe('app_index.vue', () => {
let wrapper;
- let fetchReleaseSpy;
-
- const paginatedReleases = rge(21).map((index) => ({
- ...convertObjectPropsToCamelCase(release, { deep: true }),
- tagName: `${index}.00`,
- }));
-
- const defaultInitialState = {
- projectId: 'gitlab-ce',
- projectPath: 'gitlab-org/gitlab-ce',
- documentationPath: 'help/releases',
- illustrationPath: 'illustration/path',
+ let fetchReleasesSpy;
+ let urlParams;
+
+ const createComponent = (storeUpdates) => {
+ wrapper = shallowMount(AppIndex, {
+ store: new Vuex.Store({
+ modules: {
+ index: merge(
+ {
+ namespaced: true,
+ actions: {
+ fetchReleases: fetchReleasesSpy,
+ },
+ state: {
+ isLoading: true,
+ releases: [],
+ },
+ },
+ storeUpdates,
+ ),
+ },
+ }),
+ });
};
- const createComponent = (stateUpdates = {}) => {
- const indexModule = createIndexModule({
- ...defaultInitialState,
- ...stateUpdates,
+ beforeEach(() => {
+ fetchReleasesSpy = jest.fn();
+ getParameterByName.mockImplementation((paramName) => urlParams[paramName]);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ // Finders
+ const findLoadingIndicator = () => wrapper.find(ReleaseSkeletonLoader);
+ const findEmptyState = () => wrapper.find('[data-testid="empty-state"]');
+ const findSuccessState = () => wrapper.find('[data-testid="success-state"]');
+ const findPagination = () => wrapper.find(ReleasesPagination);
+ const findSortControls = () => wrapper.find(ReleasesSort);
+ const findNewReleaseButton = () => wrapper.find('[data-testid="new-release-button"]');
+
+ // Expectations
+ const expectLoadingIndicator = (shouldExist) => {
+ it(`${shouldExist ? 'renders' : 'does not render'} a loading indicator`, () => {
+ expect(findLoadingIndicator().exists()).toBe(shouldExist);
});
+ };
- fetchReleaseSpy = jest.spyOn(indexModule.actions, 'fetchReleases');
+ const expectEmptyState = (shouldExist) => {
+ it(`${shouldExist ? 'renders' : 'does not render'} an empty state`, () => {
+ expect(findEmptyState().exists()).toBe(shouldExist);
+ });
+ };
- const store = createStore({
- modules: { index: indexModule },
- featureFlags: {
- graphqlReleaseData: true,
- graphqlReleasesPage: false,
- graphqlMilestoneStats: true,
- },
+ const expectSuccessState = (shouldExist) => {
+ it(`${shouldExist ? 'renders' : 'does not render'} the success state`, () => {
+ expect(findSuccessState().exists()).toBe(shouldExist);
});
+ };
- wrapper = shallowMount(ReleasesApp, {
- store,
- localVue,
+ const expectPagination = (shouldExist) => {
+ it(`${shouldExist ? 'renders' : 'does not render'} the pagination controls`, () => {
+ expect(findPagination().exists()).toBe(shouldExist);
});
};
- afterEach(() => {
- wrapper.destroy();
- });
+ const expectNewReleaseButton = (shouldExist) => {
+ it(`${shouldExist ? 'renders' : 'does not render'} the "New release" button`, () => {
+ expect(findNewReleaseButton().exists()).toBe(shouldExist);
+ });
+ };
+ // Tests
describe('on startup', () => {
- beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
+ it.each`
+ before | after
+ ${null} | ${null}
+ ${'before_param_value'} | ${null}
+ ${null} | ${'after_param_value'}
+ `(
+ 'calls fetchRelease with the correct parameters based on the curent query parameters: before: $before, after: $after',
+ ({ before, after }) => {
+ urlParams = { before, after };
+
+ createComponent();
+
+ expect(fetchReleasesSpy).toHaveBeenCalledTimes(1);
+ expect(fetchReleasesSpy).toHaveBeenCalledWith(expect.anything(), urlParams);
+ },
+ );
+ });
+ describe('when the request to fetch releases has not yet completed', () => {
+ beforeEach(() => {
createComponent();
});
- it('calls fetchRelease with the page, before, and after parameters', () => {
- expect(fetchReleaseSpy).toHaveBeenCalledTimes(1);
- expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), {
- page: 'page_param_value',
- before: 'before_param_value',
- after: 'after_param_value',
- });
- });
+ expectLoadingIndicator(true);
+ expectEmptyState(false);
+ expectSuccessState(false);
+ expectPagination(false);
});
- describe('while loading', () => {
+ describe('when the request fails', () => {
beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- // Need to defer the return value here to the next stack,
- // otherwise the loading state disappears before our test even starts.
- .mockImplementation(() => waitForPromises().then(() => ({ data: [], headers: {} })));
-
- createComponent();
+ createComponent({
+ state: {
+ isLoading: false,
+ hasError: true,
+ },
+ });
});
- it('renders loading icon', () => {
- expect(wrapper.find('.js-loading').exists()).toBe(true);
- expect(wrapper.find('.js-empty-state').exists()).toBe(false);
- expect(wrapper.find('.js-success-state').exists()).toBe(false);
- expect(wrapper.find(ReleasesPagination).exists()).toBe(false);
- });
+ expectLoadingIndicator(false);
+ expectEmptyState(false);
+ expectSuccessState(false);
+ expectPagination(true);
});
- describe('with successful request', () => {
+ describe('when the request succeeds but returns no releases', () => {
beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
-
- createComponent();
+ createComponent({
+ state: {
+ isLoading: false,
+ },
+ });
});
- it('renders success state', () => {
- expect(wrapper.find('.js-loading').exists()).toBe(false);
- expect(wrapper.find('.js-empty-state').exists()).toBe(false);
- expect(wrapper.find('.js-success-state').exists()).toBe(true);
- expect(wrapper.find(ReleasesPagination).exists()).toBe(true);
- });
+ expectLoadingIndicator(false);
+ expectEmptyState(true);
+ expectSuccessState(false);
+ expectPagination(true);
});
- describe('with successful request and pagination', () => {
+ describe('when the request succeeds and includes at least one release', () => {
beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- .mockResolvedValue({ data: paginatedReleases, headers: pageInfoHeadersWithPagination });
-
- createComponent();
+ createComponent({
+ state: {
+ isLoading: false,
+ releases: [{}],
+ },
+ });
});
- it('renders success state', () => {
- expect(wrapper.find('.js-loading').exists()).toBe(false);
- expect(wrapper.find('.js-empty-state').exists()).toBe(false);
- expect(wrapper.find('.js-success-state').exists()).toBe(true);
- expect(wrapper.find(ReleasesPagination).exists()).toBe(true);
- });
+ expectLoadingIndicator(false);
+ expectEmptyState(false);
+ expectSuccessState(true);
+ expectPagination(true);
});
- describe('with empty request', () => {
+ describe('sorting', () => {
beforeEach(() => {
- jest.spyOn(api, 'releases').mockResolvedValue({ data: [], headers: {} });
-
createComponent();
});
- it('renders empty state', () => {
- expect(wrapper.find('.js-loading').exists()).toBe(false);
- expect(wrapper.find('.js-empty-state').exists()).toBe(true);
- expect(wrapper.find('.js-success-state').exists()).toBe(false);
+ it('renders the sort controls', () => {
+ expect(findSortControls().exists()).toBe(true);
});
- });
- describe('"New release" button', () => {
- const findNewReleaseButton = () => wrapper.find('.js-new-release-btn');
+ it('calls the fetchReleases store method when the sort is updated', () => {
+ fetchReleasesSpy.mockClear();
- beforeEach(() => {
- jest.spyOn(api, 'releases').mockResolvedValue({ data: [], headers: {} });
+ findSortControls().vm.$emit('sort:changed');
+
+ expect(fetchReleasesSpy).toHaveBeenCalledTimes(1);
});
+ });
- describe('when the user is allowed to create a new Release', () => {
- const newReleasePath = 'path/to/new/release';
+ describe('"New release" button', () => {
+ describe('when the user is allowed to create releases', () => {
+ const newReleasePath = 'path/to/new/release/page';
beforeEach(() => {
- createComponent({ newReleasePath });
+ createComponent({ state: { newReleasePath } });
});
- it('renders the "New release" button', () => {
- expect(findNewReleaseButton().exists()).toBe(true);
- });
+ expectNewReleaseButton(true);
- it('renders the "New release" button with the correct href', () => {
+ it('renders the button with the correct href', () => {
expect(findNewReleaseButton().attributes('href')).toBe(newReleasePath);
});
});
- describe('when the user is not allowed to create a new Release', () => {
- beforeEach(() => createComponent());
-
- it('does not render the "New release" button', () => {
- expect(findNewReleaseButton().exists()).toBe(false);
+ describe('when the user is not allowed to create releases', () => {
+ beforeEach(() => {
+ createComponent();
});
+
+ expectNewReleaseButton(false);
});
});
- describe('when the back button is pressed', () => {
+ describe("when the browser's back button is pressed", () => {
beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
+ urlParams = {
+ before: 'before_param_value',
+ };
createComponent();
- fetchReleaseSpy.mockClear();
+ fetchReleasesSpy.mockClear();
window.dispatchEvent(new PopStateEvent('popstate'));
});
- it('calls fetchRelease with the page parameter', () => {
- expect(fetchReleaseSpy).toHaveBeenCalledTimes(1);
- expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), {
- page: 'page_param_value',
- before: 'before_param_value',
- after: 'after_param_value',
- });
+ it('calls the fetchRelease store method with the parameters from the URL query', () => {
+ expect(fetchReleasesSpy).toHaveBeenCalledTimes(1);
+ expect(fetchReleasesSpy).toHaveBeenCalledWith(expect.anything(), urlParams);
});
});
});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 425cb9d0059..7ea7a6ffe94 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -7,12 +7,12 @@ import createFlash from '~/flash';
import ReleaseShowApp from '~/releases/components/app_show.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
-import oneReleaseQuery from '~/releases/queries/one_release.query.graphql';
+import oneReleaseQuery from '~/releases/graphql/queries/one_release.query.graphql';
jest.mock('~/flash');
const oneReleaseQueryResponse = getJSONFixture(
- 'graphql/releases/queries/one_release.query.graphql.json',
+ 'graphql/releases/graphql/queries/one_release.query.graphql.json',
);
Vue.use(VueApollo);
diff --git a/spec/frontend/releases/components/releases_pagination_graphql_spec.js b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
deleted file mode 100644
index 5b2dd4bc784..00000000000
--- a/spec/frontend/releases/components/releases_pagination_graphql_spec.js
+++ /dev/null
@@ -1,175 +0,0 @@
-import { mount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { historyPushState } from '~/lib/utils/common_utils';
-import ReleasesPaginationGraphql from '~/releases/components/releases_pagination_graphql.vue';
-import createStore from '~/releases/stores';
-import createIndexModule from '~/releases/stores/modules/index';
-
-jest.mock('~/lib/utils/common_utils', () => ({
- ...jest.requireActual('~/lib/utils/common_utils'),
- historyPushState: jest.fn(),
-}));
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('~/releases/components/releases_pagination_graphql.vue', () => {
- let wrapper;
- let indexModule;
-
- const cursors = {
- startCursor: 'startCursor',
- endCursor: 'endCursor',
- };
-
- const projectPath = 'my/project';
-
- const createComponent = (pageInfo) => {
- indexModule = createIndexModule({ projectPath });
-
- indexModule.state.graphQlPageInfo = pageInfo;
-
- indexModule.actions.fetchReleases = jest.fn();
-
- wrapper = mount(ReleasesPaginationGraphql, {
- store: createStore({
- modules: {
- index: indexModule,
- },
- featureFlags: {},
- }),
- localVue,
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findPrevButton = () => wrapper.find('[data-testid="prevButton"]');
- const findNextButton = () => wrapper.find('[data-testid="nextButton"]');
-
- const expectDisabledPrev = () => {
- expect(findPrevButton().attributes().disabled).toBe('disabled');
- };
- const expectEnabledPrev = () => {
- expect(findPrevButton().attributes().disabled).toBe(undefined);
- };
- const expectDisabledNext = () => {
- expect(findNextButton().attributes().disabled).toBe('disabled');
- };
- const expectEnabledNext = () => {
- expect(findNextButton().attributes().disabled).toBe(undefined);
- };
-
- describe('when there is only one page of results', () => {
- beforeEach(() => {
- createComponent({
- hasPreviousPage: false,
- hasNextPage: false,
- });
- });
-
- it('does not render anything', () => {
- expect(wrapper.html()).toBe('');
- });
- });
-
- describe('when there is a next page, but not a previous page', () => {
- beforeEach(() => {
- createComponent({
- hasPreviousPage: false,
- hasNextPage: true,
- });
- });
-
- it('renders a disabled "Prev" button', () => {
- expectDisabledPrev();
- });
-
- it('renders an enabled "Next" button', () => {
- expectEnabledNext();
- });
- });
-
- describe('when there is a previous page, but not a next page', () => {
- beforeEach(() => {
- createComponent({
- hasPreviousPage: true,
- hasNextPage: false,
- });
- });
-
- it('renders a enabled "Prev" button', () => {
- expectEnabledPrev();
- });
-
- it('renders an disabled "Next" button', () => {
- expectDisabledNext();
- });
- });
-
- describe('when there is both a previous page and a next page', () => {
- beforeEach(() => {
- createComponent({
- hasPreviousPage: true,
- hasNextPage: true,
- });
- });
-
- it('renders a enabled "Prev" button', () => {
- expectEnabledPrev();
- });
-
- it('renders an enabled "Next" button', () => {
- expectEnabledNext();
- });
- });
-
- describe('button behavior', () => {
- beforeEach(() => {
- createComponent({
- hasPreviousPage: true,
- hasNextPage: true,
- ...cursors,
- });
- });
-
- describe('next button behavior', () => {
- beforeEach(() => {
- findNextButton().trigger('click');
- });
-
- it('calls fetchReleases with the correct after cursor', () => {
- expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
- [expect.anything(), { after: cursors.endCursor }],
- ]);
- });
-
- it('calls historyPushState with the new URL', () => {
- expect(historyPushState.mock.calls).toEqual([
- [expect.stringContaining(`?after=${cursors.endCursor}`)],
- ]);
- });
- });
-
- describe('previous button behavior', () => {
- beforeEach(() => {
- findPrevButton().trigger('click');
- });
-
- it('calls fetchReleases with the correct before cursor', () => {
- expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
- [expect.anything(), { before: cursors.startCursor }],
- ]);
- });
-
- it('calls historyPushState with the new URL', () => {
- expect(historyPushState.mock.calls).toEqual([
- [expect.stringContaining(`?before=${cursors.startCursor}`)],
- ]);
- });
- });
- });
-});
diff --git a/spec/frontend/releases/components/releases_pagination_rest_spec.js b/spec/frontend/releases/components/releases_pagination_rest_spec.js
deleted file mode 100644
index 7d45176967b..00000000000
--- a/spec/frontend/releases/components/releases_pagination_rest_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import { GlPagination } from '@gitlab/ui';
-import { mount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import * as commonUtils from '~/lib/utils/common_utils';
-import ReleasesPaginationRest from '~/releases/components/releases_pagination_rest.vue';
-import createStore from '~/releases/stores';
-import createIndexModule from '~/releases/stores/modules/index';
-
-commonUtils.historyPushState = jest.fn();
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('~/releases/components/releases_pagination_rest.vue', () => {
- let wrapper;
- let indexModule;
-
- const projectId = 19;
-
- const createComponent = (pageInfo) => {
- indexModule = createIndexModule({ projectId });
-
- indexModule.state.restPageInfo = pageInfo;
-
- indexModule.actions.fetchReleases = jest.fn();
-
- wrapper = mount(ReleasesPaginationRest, {
- store: createStore({
- modules: {
- index: indexModule,
- },
- featureFlags: {},
- }),
- localVue,
- });
- };
-
- const findGlPagination = () => wrapper.find(GlPagination);
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('when a page number is clicked', () => {
- const newPage = 2;
-
- beforeEach(() => {
- createComponent({
- perPage: 20,
- page: 1,
- total: 40,
- totalPages: 2,
- nextPage: 2,
- });
-
- findGlPagination().vm.$emit('input', newPage);
- });
-
- it('calls fetchReleases with the correct page', () => {
- expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
- [expect.anything(), { page: newPage }],
- ]);
- });
-
- it('calls historyPushState with the new URL', () => {
- expect(commonUtils.historyPushState.mock.calls).toEqual([
- [expect.stringContaining(`?page=${newPage}`)],
- ]);
- });
- });
-});
diff --git a/spec/frontend/releases/components/releases_pagination_spec.js b/spec/frontend/releases/components/releases_pagination_spec.js
index 1d47da31f38..2d08f72ad8b 100644
--- a/spec/frontend/releases/components/releases_pagination_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_spec.js
@@ -1,23 +1,46 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlKeysetPagination } from '@gitlab/ui';
+import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import { historyPushState } from '~/lib/utils/common_utils';
import ReleasesPagination from '~/releases/components/releases_pagination.vue';
-import ReleasesPaginationGraphql from '~/releases/components/releases_pagination_graphql.vue';
-import ReleasesPaginationRest from '~/releases/components/releases_pagination_rest.vue';
+import createStore from '~/releases/stores';
+import createIndexModule from '~/releases/stores/modules/index';
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ ...jest.requireActual('~/lib/utils/common_utils'),
+ historyPushState: jest.fn(),
+}));
const localVue = createLocalVue();
localVue.use(Vuex);
describe('~/releases/components/releases_pagination.vue', () => {
let wrapper;
+ let indexModule;
- const createComponent = (useGraphQLEndpoint) => {
- const store = new Vuex.Store({
- getters: {
- useGraphQLEndpoint: () => useGraphQLEndpoint,
- },
- });
+ const cursors = {
+ startCursor: 'startCursor',
+ endCursor: 'endCursor',
+ };
+
+ const projectPath = 'my/project';
+
+ const createComponent = (pageInfo) => {
+ indexModule = createIndexModule({ projectPath });
+
+ indexModule.state.pageInfo = pageInfo;
- wrapper = shallowMount(ReleasesPagination, { store, localVue });
+ indexModule.actions.fetchReleases = jest.fn();
+
+ wrapper = mount(ReleasesPagination, {
+ store: createStore({
+ modules: {
+ index: indexModule,
+ },
+ featureFlags: {},
+ }),
+ localVue,
+ });
};
afterEach(() => {
@@ -25,28 +48,130 @@ describe('~/releases/components/releases_pagination.vue', () => {
wrapper = null;
});
- const findRestPagination = () => wrapper.find(ReleasesPaginationRest);
- const findGraphQlPagination = () => wrapper.find(ReleasesPaginationGraphql);
+ const findGlKeysetPagination = () => wrapper.findComponent(GlKeysetPagination);
+ const findPrevButton = () => findGlKeysetPagination().find('[data-testid="prevButton"]');
+ const findNextButton = () => findGlKeysetPagination().find('[data-testid="nextButton"]');
+
+ const expectDisabledPrev = () => {
+ expect(findPrevButton().attributes().disabled).toBe('disabled');
+ };
+ const expectEnabledPrev = () => {
+ expect(findPrevButton().attributes().disabled).toBe(undefined);
+ };
+ const expectDisabledNext = () => {
+ expect(findNextButton().attributes().disabled).toBe('disabled');
+ };
+ const expectEnabledNext = () => {
+ expect(findNextButton().attributes().disabled).toBe(undefined);
+ };
+
+ describe('when there is only one page of results', () => {
+ beforeEach(() => {
+ createComponent({
+ hasPreviousPage: false,
+ hasNextPage: false,
+ });
+ });
- describe('when one of necessary feature flags is disabled', () => {
+ it('does not render a GlKeysetPagination', () => {
+ expect(findGlKeysetPagination().exists()).toBe(false);
+ });
+ });
+
+ describe('when there is a next page, but not a previous page', () => {
beforeEach(() => {
- createComponent(false);
+ createComponent({
+ hasPreviousPage: false,
+ hasNextPage: true,
+ });
});
- it('renders the REST pagination component', () => {
- expect(findRestPagination().exists()).toBe(true);
- expect(findGraphQlPagination().exists()).toBe(false);
+ it('renders a disabled "Prev" button', () => {
+ expectDisabledPrev();
+ });
+
+ it('renders an enabled "Next" button', () => {
+ expectEnabledNext();
});
});
- describe('when all the necessary feature flags are enabled', () => {
+ describe('when there is a previous page, but not a next page', () => {
beforeEach(() => {
- createComponent(true);
+ createComponent({
+ hasPreviousPage: true,
+ hasNextPage: false,
+ });
});
- it('renders the GraphQL pagination component', () => {
- expect(findGraphQlPagination().exists()).toBe(true);
- expect(findRestPagination().exists()).toBe(false);
+ it('renders a enabled "Prev" button', () => {
+ expectEnabledPrev();
+ });
+
+ it('renders an disabled "Next" button', () => {
+ expectDisabledNext();
+ });
+ });
+
+ describe('when there is both a previous page and a next page', () => {
+ beforeEach(() => {
+ createComponent({
+ hasPreviousPage: true,
+ hasNextPage: true,
+ });
+ });
+
+ it('renders a enabled "Prev" button', () => {
+ expectEnabledPrev();
+ });
+
+ it('renders an enabled "Next" button', () => {
+ expectEnabledNext();
+ });
+ });
+
+ describe('button behavior', () => {
+ beforeEach(() => {
+ createComponent({
+ hasPreviousPage: true,
+ hasNextPage: true,
+ ...cursors,
+ });
+ });
+
+ describe('next button behavior', () => {
+ beforeEach(() => {
+ findNextButton().trigger('click');
+ });
+
+ it('calls fetchReleases with the correct after cursor', () => {
+ expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
+ [expect.anything(), { after: cursors.endCursor }],
+ ]);
+ });
+
+ it('calls historyPushState with the new URL', () => {
+ expect(historyPushState.mock.calls).toEqual([
+ [expect.stringContaining(`?after=${cursors.endCursor}`)],
+ ]);
+ });
+ });
+
+ describe('previous button behavior', () => {
+ beforeEach(() => {
+ findPrevButton().trigger('click');
+ });
+
+ it('calls fetchReleases with the correct before cursor', () => {
+ expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
+ [expect.anything(), { before: cursors.startCursor }],
+ ]);
+ });
+
+ it('calls historyPushState with the new URL', () => {
+ expect(historyPushState.mock.calls).toEqual([
+ [expect.stringContaining(`?before=${cursors.startCursor}`)],
+ ]);
+ });
});
});
});
diff --git a/spec/frontend/releases/components/tag_field_new_spec.js b/spec/frontend/releases/components/tag_field_new_spec.js
index f1608ca31b4..114e46ce64b 100644
--- a/spec/frontend/releases/components/tag_field_new_spec.js
+++ b/spec/frontend/releases/components/tag_field_new_spec.js
@@ -10,29 +10,35 @@ const TEST_PROJECT_ID = '1234';
const TEST_CREATE_FROM = 'test-create-from';
const NONEXISTENT_TAG_NAME = 'nonexistent-tag';
-// A mock version of the RefSelector component that simulates
-// a scenario where the users has searched for "nonexistent-tag"
-// and the component has found no tags that match.
-const RefSelectorStub = Vue.component('RefSelectorStub', {
- data() {
- return {
- footerSlotProps: {
- isLoading: false,
- matches: {
- tags: { totalCount: 0 },
- },
- query: NONEXISTENT_TAG_NAME,
- },
- };
- },
- template: '<div><slot name="footer" v-bind="footerSlotProps"></slot></div>',
-});
-
describe('releases/components/tag_field_new', () => {
let store;
let wrapper;
+ let RefSelectorStub;
+
+ const createComponent = (
+ mountFn = shallowMount,
+ { searchQuery } = { searchQuery: NONEXISTENT_TAG_NAME },
+ ) => {
+ // A mock version of the RefSelector component that just renders the
+ // #footer slot, so that the content inside this slot can be tested.
+ RefSelectorStub = Vue.component('RefSelectorStub', {
+ data() {
+ return {
+ footerSlotProps: {
+ isLoading: false,
+ matches: {
+ tags: {
+ totalCount: 1,
+ list: [{ name: TEST_TAG_NAME }],
+ },
+ },
+ query: searchQuery,
+ },
+ };
+ },
+ template: '<div><slot name="footer" v-bind="footerSlotProps"></slot></div>',
+ });
- const createComponent = (mountFn = shallowMount) => {
wrapper = mountFn(TagFieldNew, {
store,
stubs: {
@@ -84,8 +90,6 @@ describe('releases/components/tag_field_new', () => {
describe('when the user selects a new tag name', () => {
beforeEach(async () => {
findCreateNewTagOption().vm.$emit('click');
-
- await wrapper.vm.$nextTick();
});
it("updates the store's release.tagName property", () => {
@@ -102,8 +106,6 @@ describe('releases/components/tag_field_new', () => {
beforeEach(async () => {
findTagNameDropdown().vm.$emit('input', updatedTagName);
-
- await wrapper.vm.$nextTick();
});
it("updates the store's release.tagName property", () => {
@@ -116,6 +118,28 @@ describe('releases/components/tag_field_new', () => {
});
});
+ describe('"Create tag" option', () => {
+ describe('when the search query exactly matches one of the search results', () => {
+ beforeEach(async () => {
+ createComponent(mount, { searchQuery: TEST_TAG_NAME });
+ });
+
+ it('does not show the "Create tag" option', () => {
+ expect(findCreateNewTagOption().exists()).toBe(false);
+ });
+ });
+
+ describe('when the search query does not exactly match one of the search results', () => {
+ beforeEach(async () => {
+ createComponent(mount, { searchQuery: NONEXISTENT_TAG_NAME });
+ });
+
+ it('shows the "Create tag" option', () => {
+ expect(findCreateNewTagOption().exists()).toBe(true);
+ });
+ });
+ });
+
describe('validation', () => {
beforeEach(() => {
createComponent(mount);
@@ -176,8 +200,6 @@ describe('releases/components/tag_field_new', () => {
const updatedCreateFrom = 'update-create-from';
findCreateFromDropdown().vm.$emit('input', updatedCreateFrom);
- await wrapper.vm.$nextTick();
-
expect(store.state.editNew.createFrom).toBe(updatedCreateFrom);
});
});
diff --git a/spec/frontend/releases/stores/getters_spec.js b/spec/frontend/releases/stores/getters_spec.js
deleted file mode 100644
index 01e10567cf0..00000000000
--- a/spec/frontend/releases/stores/getters_spec.js
+++ /dev/null
@@ -1,22 +0,0 @@
-import * as getters from '~/releases/stores/getters';
-
-describe('~/releases/stores/getters.js', () => {
- it.each`
- graphqlReleaseData | graphqlReleasesPage | graphqlMilestoneStats | result
- ${false} | ${false} | ${false} | ${false}
- ${false} | ${false} | ${true} | ${false}
- ${false} | ${true} | ${false} | ${false}
- ${false} | ${true} | ${true} | ${false}
- ${true} | ${false} | ${false} | ${false}
- ${true} | ${false} | ${true} | ${false}
- ${true} | ${true} | ${false} | ${false}
- ${true} | ${true} | ${true} | ${true}
- `(
- 'returns $result with feature flag values graphqlReleaseData=$graphqlReleaseData, graphqlReleasesPage=$graphqlReleasesPage, and graphqlMilestoneStats=$graphqlMilestoneStats',
- ({ result: expectedResult, ...featureFlags }) => {
- const actualResult = getters.useGraphQLEndpoint({ featureFlags });
-
- expect(actualResult).toBe(expectedResult);
- },
- );
-});
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index b116d601ca4..688ec4c0a50 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,18 +1,16 @@
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
import { cloneDeep } from 'lodash';
import { getJSONFixture } from 'helpers/fixtures';
import testAction from 'helpers/vuex_action_helper';
-import api from '~/api';
import { deprecatedCreateFlash as createFlash } from '~/flash';
-import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import httpStatus from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility';
import { ASSET_LINK_TYPE } from '~/releases/constants';
+import createReleaseAssetLinkMutation from '~/releases/graphql/mutations/create_release_link.mutation.graphql';
+import deleteReleaseAssetLinkMutation from '~/releases/graphql/mutations/delete_release_link.mutation.graphql';
+import updateReleaseMutation from '~/releases/graphql/mutations/update_release.mutation.graphql';
import * as actions from '~/releases/stores/modules/edit_new/actions';
import * as types from '~/releases/stores/modules/edit_new/mutation_types';
import createState from '~/releases/stores/modules/edit_new/state';
-import { releaseToApiJson, apiJsonToRelease } from '~/releases/util';
+import { gqClient, convertOneReleaseGraphQLResponse } from '~/releases/util';
jest.mock('~/flash');
@@ -21,12 +19,21 @@ jest.mock('~/lib/utils/url_utility', () => ({
joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
}));
-const originalRelease = getJSONFixture('api/releases/release.json');
+jest.mock('~/releases/util', () => ({
+ ...jest.requireActual('~/releases/util'),
+ gqClient: {
+ query: jest.fn(),
+ mutate: jest.fn(),
+ },
+}));
+
+const originalOneReleaseForEditingQueryResponse = getJSONFixture(
+ 'graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json',
+);
describe('Release edit/new actions', () => {
let state;
- let release;
- let mock;
+ let releaseResponse;
let error;
const setupState = (updates = {}) => {
@@ -34,38 +41,26 @@ describe('Release edit/new actions', () => {
isExistingRelease: true,
};
- const rootState = {
- featureFlags: {
- graphqlIndividualReleasePage: false,
- },
- };
-
state = {
...createState({
projectId: '18',
- tagName: release.tag_name,
+ tagName: releaseResponse.tag_name,
releasesPagePath: 'path/to/releases/page',
markdownDocsPath: 'path/to/markdown/docs',
markdownPreviewPath: 'path/to/markdown/preview',
}),
...getters,
- ...rootState,
...updates,
};
};
beforeEach(() => {
- release = cloneDeep(originalRelease);
- mock = new MockAdapter(axios);
+ releaseResponse = cloneDeep(originalOneReleaseForEditingQueryResponse);
gon.api_version = 'v4';
- error = { message: 'An error occurred' };
+ error = new Error('Yikes!');
createFlash.mockClear();
});
- afterEach(() => {
- mock.restore();
- });
-
describe('when creating a new release', () => {
beforeEach(() => {
setupState({ isExistingRelease: false });
@@ -118,15 +113,9 @@ describe('Release edit/new actions', () => {
beforeEach(setupState);
describe('fetchRelease', () => {
- let getReleaseUrl;
-
- beforeEach(() => {
- getReleaseUrl = `/api/v4/projects/${state.projectId}/releases/${state.tagName}`;
- });
-
describe('when the network request to the Release API is successful', () => {
beforeEach(() => {
- mock.onGet(getReleaseUrl).replyOnce(httpStatus.OK, release);
+ gqClient.query.mockResolvedValue(releaseResponse);
});
it(`commits ${types.REQUEST_RELEASE} and then commits ${types.RECEIVE_RELEASE_SUCCESS} with the converted release object`, () => {
@@ -136,15 +125,15 @@ describe('Release edit/new actions', () => {
},
{
type: types.RECEIVE_RELEASE_SUCCESS,
- payload: apiJsonToRelease(release, { deep: true }),
+ payload: convertOneReleaseGraphQLResponse(releaseResponse).data,
},
]);
});
});
- describe('when the network request to the Release API fails', () => {
+ describe('when the GraphQL network request fails', () => {
beforeEach(() => {
- mock.onGet(getReleaseUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ gqClient.query.mockRejectedValue(error);
});
it(`commits ${types.REQUEST_RELEASE} and then commits ${types.RECEIVE_RELEASE_ERROR} with an error object`, () => {
@@ -282,44 +271,50 @@ describe('Release edit/new actions', () => {
describe('receiveSaveReleaseSuccess', () => {
it(`commits ${types.RECEIVE_SAVE_RELEASE_SUCCESS}`, () =>
- testAction(actions.receiveSaveReleaseSuccess, release, state, [
+ testAction(actions.receiveSaveReleaseSuccess, releaseResponse, state, [
{ type: types.RECEIVE_SAVE_RELEASE_SUCCESS },
]));
it("redirects to the release's dedicated page", () => {
- actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state }, release);
+ const { selfUrl } = releaseResponse.data.project.release.links;
+ actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state }, selfUrl);
expect(redirectTo).toHaveBeenCalledTimes(1);
- expect(redirectTo).toHaveBeenCalledWith(release._links.self);
+ expect(redirectTo).toHaveBeenCalledWith(selfUrl);
});
});
describe('createRelease', () => {
- let createReleaseUrl;
let releaseLinksToCreate;
beforeEach(() => {
- const camelCasedRelease = convertObjectPropsToCamelCase(release);
+ const { data: release } = convertOneReleaseGraphQLResponse(
+ originalOneReleaseForEditingQueryResponse,
+ );
- releaseLinksToCreate = camelCasedRelease.assets.links.slice(0, 1);
+ releaseLinksToCreate = release.assets.links.slice(0, 1);
setupState({
- release: camelCasedRelease,
+ release,
releaseLinksToCreate,
});
-
- createReleaseUrl = `/api/v4/projects/${state.projectId}/releases`;
});
- describe('when the network request to the Release API is successful', () => {
+ describe('when the GraphQL request is successful', () => {
+ const selfUrl = 'url/to/self';
+
beforeEach(() => {
- const expectedRelease = releaseToApiJson({
- ...state.release,
- assets: {
- links: releaseLinksToCreate,
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseCreate: {
+ release: {
+ links: {
+ selfUrl,
+ },
+ },
+ errors: [],
+ },
},
});
-
- mock.onPost(createReleaseUrl, expectedRelease).replyOnce(httpStatus.CREATED, release);
});
it(`dispatches "receiveSaveReleaseSuccess" with the converted release object`, () => {
@@ -331,16 +326,16 @@ describe('Release edit/new actions', () => {
[
{
type: 'receiveSaveReleaseSuccess',
- payload: apiJsonToRelease(release, { deep: true }),
+ payload: selfUrl,
},
],
);
});
});
- describe('when the network request to the Release API fails', () => {
+ describe('when the GraphQL network request fails', () => {
beforeEach(() => {
- mock.onPost(createReleaseUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ gqClient.mutate.mockRejectedValue(error);
});
it(`commits ${types.RECEIVE_SAVE_RELEASE_ERROR} with an error object`, () => {
@@ -358,7 +353,7 @@ describe('Release edit/new actions', () => {
.then(() => {
expect(createFlash).toHaveBeenCalledTimes(1);
expect(createFlash).toHaveBeenCalledWith(
- 'Something went wrong while creating a new release',
+ 'Something went wrong while creating a new release.',
);
});
});
@@ -369,112 +364,209 @@ describe('Release edit/new actions', () => {
let getters;
let dispatch;
let commit;
- let callOrder;
+ let release;
beforeEach(() => {
getters = {
releaseLinksToDelete: [{ id: '1' }, { id: '2' }],
- releaseLinksToCreate: [{ id: 'new-link-1' }, { id: 'new-link-2' }],
+ releaseLinksToCreate: [
+ { id: 'new-link-1', name: 'Link 1', url: 'https://example.com/1', linkType: 'Other' },
+ { id: 'new-link-2', name: 'Link 2', url: 'https://example.com/2', linkType: 'Package' },
+ ],
+ releaseUpdateMutatationVariables: {},
};
+ release = convertOneReleaseGraphQLResponse(releaseResponse).data;
+
setupState({
- release: convertObjectPropsToCamelCase(release),
+ release,
...getters,
});
dispatch = jest.fn();
commit = jest.fn();
- callOrder = [];
- jest.spyOn(api, 'updateRelease').mockImplementation(() => {
- callOrder.push('updateRelease');
- return Promise.resolve({ data: release });
- });
- jest.spyOn(api, 'deleteReleaseLink').mockImplementation(() => {
- callOrder.push('deleteReleaseLink');
- return Promise.resolve();
- });
- jest.spyOn(api, 'createReleaseLink').mockImplementation(() => {
- callOrder.push('createReleaseLink');
- return Promise.resolve();
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseUpdate: {
+ errors: [],
+ },
+ releaseAssetLinkDelete: {
+ errors: [],
+ },
+ releaseAssetLinkCreate: {
+ errors: [],
+ },
+ },
});
});
describe('when the network request to the Release API is successful', () => {
- it('dispatches receiveSaveReleaseSuccess', () => {
- return actions.updateRelease({ commit, dispatch, state, getters }).then(() => {
- expect(dispatch.mock.calls).toEqual([
- ['receiveSaveReleaseSuccess', apiJsonToRelease(release)],
- ]);
- });
+ it('dispatches receiveSaveReleaseSuccess', async () => {
+ await actions.updateRelease({ commit, dispatch, state, getters });
+ expect(dispatch.mock.calls).toEqual([['receiveSaveReleaseSuccess', release._links.self]]);
});
- it('updates the Release, then deletes all existing links, and then recreates new links', () => {
- return actions.updateRelease({ dispatch, state, getters }).then(() => {
- expect(callOrder).toEqual([
- 'updateRelease',
- 'deleteReleaseLink',
- 'deleteReleaseLink',
- 'createReleaseLink',
- 'createReleaseLink',
- ]);
+ it('updates the Release, then deletes all existing links, and then recreates new links', async () => {
+ await actions.updateRelease({ commit, dispatch, state, getters });
- expect(api.updateRelease.mock.calls).toEqual([
- [
- state.projectId,
- state.tagName,
- releaseToApiJson({
- ...state.release,
- assets: {
- links: getters.releaseLinksToCreate,
- },
- }),
- ],
- ]);
+ // First, update the release
+ expect(gqClient.mutate.mock.calls[0]).toEqual([
+ {
+ mutation: updateReleaseMutation,
+ variables: getters.releaseUpdateMutatationVariables,
+ },
+ ]);
- expect(api.deleteReleaseLink).toHaveBeenCalledTimes(
- getters.releaseLinksToDelete.length,
- );
- getters.releaseLinksToDelete.forEach((link) => {
- expect(api.deleteReleaseLink).toHaveBeenCalledWith(
- state.projectId,
- state.tagName,
- link.id,
- );
- });
+ // Then, delete the first asset link
+ expect(gqClient.mutate.mock.calls[1]).toEqual([
+ {
+ mutation: deleteReleaseAssetLinkMutation,
+ variables: { input: { id: getters.releaseLinksToDelete[0].id } },
+ },
+ ]);
- expect(api.createReleaseLink).toHaveBeenCalledTimes(
- getters.releaseLinksToCreate.length,
- );
- getters.releaseLinksToCreate.forEach((link) => {
- expect(api.createReleaseLink).toHaveBeenCalledWith(
- state.projectId,
- state.tagName,
- link,
- );
- });
- });
+ // And the second
+ expect(gqClient.mutate.mock.calls[2]).toEqual([
+ {
+ mutation: deleteReleaseAssetLinkMutation,
+ variables: { input: { id: getters.releaseLinksToDelete[1].id } },
+ },
+ ]);
+
+ // Recreate the first asset link
+ expect(gqClient.mutate.mock.calls[3]).toEqual([
+ {
+ mutation: createReleaseAssetLinkMutation,
+ variables: {
+ input: {
+ projectPath: state.projectPath,
+ tagName: state.tagName,
+ name: getters.releaseLinksToCreate[0].name,
+ url: getters.releaseLinksToCreate[0].url,
+ linkType: getters.releaseLinksToCreate[0].linkType.toUpperCase(),
+ },
+ },
+ },
+ ]);
+
+ // And finally, recreate the second
+ expect(gqClient.mutate.mock.calls[4]).toEqual([
+ {
+ mutation: createReleaseAssetLinkMutation,
+ variables: {
+ input: {
+ projectPath: state.projectPath,
+ tagName: state.tagName,
+ name: getters.releaseLinksToCreate[1].name,
+ url: getters.releaseLinksToCreate[1].url,
+ linkType: getters.releaseLinksToCreate[1].linkType.toUpperCase(),
+ },
+ },
+ },
+ ]);
});
});
- describe('when the network request to the Release API fails', () => {
+ describe('when the GraphQL network request fails', () => {
beforeEach(() => {
- jest.spyOn(api, 'updateRelease').mockRejectedValue(error);
+ gqClient.mutate.mockRejectedValue(error);
});
- it('dispatches requestUpdateRelease and receiveUpdateReleaseError with an error object', () => {
- return actions.updateRelease({ commit, dispatch, state, getters }).then(() => {
- expect(commit.mock.calls).toEqual([[types.RECEIVE_SAVE_RELEASE_ERROR, error]]);
- });
+ it('dispatches requestUpdateRelease and receiveUpdateReleaseError with an error object', async () => {
+ await actions.updateRelease({ commit, dispatch, state, getters });
+
+ expect(commit.mock.calls).toEqual([[types.RECEIVE_SAVE_RELEASE_ERROR, error]]);
+ });
+
+ it('shows a flash message', async () => {
+ await actions.updateRelease({ commit, dispatch, state, getters });
+
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith(
+ 'Something went wrong while saving the release details.',
+ );
});
+ });
+
+ describe('when the GraphQL mutation returns errors-as-data', () => {
+ const expectCorrectErrorHandling = () => {
+ it('dispatches requestUpdateRelease and receiveUpdateReleaseError with an error object', async () => {
+ await actions.updateRelease({ commit, dispatch, state, getters });
+
+ expect(commit.mock.calls).toEqual([
+ [types.RECEIVE_SAVE_RELEASE_ERROR, expect.any(Error)],
+ ]);
+ });
+
+ it('shows a flash message', async () => {
+ await actions.updateRelease({ commit, dispatch, state, getters });
- it('shows a flash message', () => {
- return actions.updateRelease({ commit, dispatch, state, getters }).then(() => {
expect(createFlash).toHaveBeenCalledTimes(1);
expect(createFlash).toHaveBeenCalledWith(
- 'Something went wrong while saving the release details',
+ 'Something went wrong while saving the release details.',
);
});
+ };
+
+ describe('when the releaseUpdate mutation returns errors-as-data', () => {
+ beforeEach(() => {
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseUpdate: {
+ errors: ['Something went wrong!'],
+ },
+ releaseAssetLinkDelete: {
+ errors: [],
+ },
+ releaseAssetLinkCreate: {
+ errors: [],
+ },
+ },
+ });
+ });
+
+ expectCorrectErrorHandling();
+ });
+
+ describe('when the releaseAssetLinkDelete mutation returns errors-as-data', () => {
+ beforeEach(() => {
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseUpdate: {
+ errors: [],
+ },
+ releaseAssetLinkDelete: {
+ errors: ['Something went wrong!'],
+ },
+ releaseAssetLinkCreate: {
+ errors: [],
+ },
+ },
+ });
+ });
+
+ expectCorrectErrorHandling();
+ });
+
+ describe('when the releaseAssetLinkCreate mutation returns errors-as-data', () => {
+ beforeEach(() => {
+ gqClient.mutate.mockResolvedValue({
+ data: {
+ releaseUpdate: {
+ errors: [],
+ },
+ releaseAssetLinkDelete: {
+ errors: [],
+ },
+ releaseAssetLinkCreate: {
+ errors: ['Something went wrong!'],
+ },
+ },
+ });
+ });
+
+ expectCorrectErrorHandling();
});
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 1449c064d77..66f24ac9559 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -257,4 +257,93 @@ describe('Release edit/new getters', () => {
});
});
});
+
+ describe.each([
+ [
+ 'returns all the data needed for the releaseUpdate GraphQL query',
+ {
+ projectPath: 'projectPath',
+ release: {
+ tagName: 'release.tagName',
+ name: 'release.name',
+ description: 'release.description',
+ milestones: ['release.milestone[0].title'],
+ },
+ },
+ {
+ projectPath: 'projectPath',
+ tagName: 'release.tagName',
+ name: 'release.name',
+ description: 'release.description',
+ milestones: ['release.milestone[0].title'],
+ },
+ ],
+ [
+ 'trims whitespace from the release name',
+ { release: { name: ' name \t\n' } },
+ { name: 'name' },
+ ],
+ [
+ 'returns the name as null if the name is nothing but whitespace',
+ { release: { name: ' \t\n' } },
+ { name: null },
+ ],
+ ['returns the name as null if the name is undefined', { release: {} }, { name: null }],
+ [
+ 'returns just the milestone titles even if the release includes full milestone objects',
+ { release: { milestones: [{ title: 'release.milestone[0].title' }] } },
+ { milestones: ['release.milestone[0].title'] },
+ ],
+ ])('releaseUpdateMutatationVariables', (description, state, expectedVariables) => {
+ it(description, () => {
+ const expectedVariablesObject = { input: expect.objectContaining(expectedVariables) };
+
+ const actualVariables = getters.releaseUpdateMutatationVariables(state);
+
+ expect(actualVariables).toEqual(expectedVariablesObject);
+ });
+ });
+
+ describe('releaseCreateMutatationVariables', () => {
+ it('returns all the data needed for the releaseCreate GraphQL query', () => {
+ const state = {
+ createFrom: 'main',
+ };
+
+ const otherGetters = {
+ releaseUpdateMutatationVariables: {
+ input: {
+ name: 'release.name',
+ },
+ },
+ releaseLinksToCreate: [
+ {
+ name: 'link.name',
+ url: 'link.url',
+ linkType: 'link.linkType',
+ },
+ ],
+ };
+
+ const expectedVariables = {
+ input: {
+ name: 'release.name',
+ ref: 'main',
+ assets: {
+ links: [
+ {
+ name: 'link.name',
+ url: 'link.url',
+ linkType: 'LINK.LINKTYPE',
+ },
+ ],
+ },
+ },
+ };
+
+ const actualVariables = getters.releaseCreateMutatationVariables(state, otherGetters);
+
+ expect(actualVariables).toEqual(expectedVariables);
+ });
+ });
});
diff --git a/spec/frontend/releases/stores/modules/list/actions_spec.js b/spec/frontend/releases/stores/modules/list/actions_spec.js
index 4dc996174bc..af520c2eb20 100644
--- a/spec/frontend/releases/stores/modules/list/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/list/actions_spec.js
@@ -1,43 +1,29 @@
import { cloneDeep } from 'lodash';
import { getJSONFixture } from 'helpers/fixtures';
import testAction from 'helpers/vuex_action_helper';
-import api from '~/api';
-import {
- normalizeHeaders,
- parseIntPagination,
- convertObjectPropsToCamelCase,
-} from '~/lib/utils/common_utils';
import { PAGE_SIZE } from '~/releases/constants';
-import allReleasesQuery from '~/releases/queries/all_releases.query.graphql';
+import allReleasesQuery from '~/releases/graphql/queries/all_releases.query.graphql';
import {
fetchReleases,
- fetchReleasesGraphQl,
- fetchReleasesRest,
receiveReleasesError,
setSorting,
} from '~/releases/stores/modules/index/actions';
import * as types from '~/releases/stores/modules/index/mutation_types';
import createState from '~/releases/stores/modules/index/state';
import { gqClient, convertAllReleasesGraphQLResponse } from '~/releases/util';
-import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
-
-const originalRelease = getJSONFixture('api/releases/release.json');
-const originalReleases = [originalRelease];
const originalGraphqlReleasesResponse = getJSONFixture(
- 'graphql/releases/queries/all_releases.query.graphql.json',
+ 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
);
describe('Releases State actions', () => {
let mockedState;
- let releases;
let graphqlReleasesResponse;
const projectPath = 'root/test-project';
const projectId = 19;
const before = 'testBeforeCursor';
const after = 'testAfterCursor';
- const page = 2;
beforeEach(() => {
mockedState = {
@@ -47,57 +33,10 @@ describe('Releases State actions', () => {
}),
};
- releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
graphqlReleasesResponse = cloneDeep(originalGraphqlReleasesResponse);
});
- describe('when all the necessary GraphQL feature flags are enabled', () => {
- beforeEach(() => {
- mockedState.useGraphQLEndpoint = true;
- });
-
- describe('fetchReleases', () => {
- it('dispatches fetchReleasesGraphQl with before and after parameters', () => {
- return testAction(
- fetchReleases,
- { before, after, page },
- mockedState,
- [],
- [
- {
- type: 'fetchReleasesGraphQl',
- payload: { before, after },
- },
- ],
- );
- });
- });
- });
-
- describe('when at least one of the GraphQL feature flags is disabled', () => {
- beforeEach(() => {
- mockedState.useGraphQLEndpoint = false;
- });
-
- describe('fetchReleases', () => {
- it('dispatches fetchReleasesRest with a page parameter', () => {
- return testAction(
- fetchReleases,
- { before, after, page },
- mockedState,
- [],
- [
- {
- type: 'fetchReleasesRest',
- payload: { page },
- },
- ],
- );
- });
- });
- });
-
- describe('fetchReleasesGraphQl', () => {
+ describe('fetchReleases', () => {
describe('GraphQL query variables', () => {
let vuexParams;
@@ -109,7 +48,7 @@ describe('Releases State actions', () => {
describe('when neither a before nor an after parameter is provided', () => {
beforeEach(() => {
- fetchReleasesGraphQl(vuexParams, { before: undefined, after: undefined });
+ fetchReleases(vuexParams, { before: undefined, after: undefined });
});
it('makes a GraphQl query with a first variable', () => {
@@ -122,7 +61,7 @@ describe('Releases State actions', () => {
describe('when only a before parameter is provided', () => {
beforeEach(() => {
- fetchReleasesGraphQl(vuexParams, { before, after: undefined });
+ fetchReleases(vuexParams, { before, after: undefined });
});
it('makes a GraphQl query with last and before variables', () => {
@@ -135,7 +74,7 @@ describe('Releases State actions', () => {
describe('when only an after parameter is provided', () => {
beforeEach(() => {
- fetchReleasesGraphQl(vuexParams, { before: undefined, after });
+ fetchReleases(vuexParams, { before: undefined, after });
});
it('makes a GraphQl query with first and after variables', () => {
@@ -148,12 +87,12 @@ describe('Releases State actions', () => {
describe('when both before and after parameters are provided', () => {
it('throws an error', () => {
- const callFetchReleasesGraphQl = () => {
- fetchReleasesGraphQl(vuexParams, { before, after });
+ const callFetchReleases = () => {
+ fetchReleases(vuexParams, { before, after });
};
- expect(callFetchReleasesGraphQl).toThrowError(
- 'Both a `before` and an `after` parameter were provided to fetchReleasesGraphQl. These parameters cannot be used together.',
+ expect(callFetchReleases).toThrowError(
+ 'Both a `before` and an `after` parameter were provided to fetchReleases. These parameters cannot be used together.',
);
});
});
@@ -171,7 +110,7 @@ describe('Releases State actions', () => {
mockedState.sorting.sort = sort;
mockedState.sorting.orderBy = orderBy;
- fetchReleasesGraphQl(vuexParams, { before: undefined, after: undefined });
+ fetchReleases(vuexParams, { before: undefined, after: undefined });
expect(gqClient.query).toHaveBeenCalledWith({
query: allReleasesQuery,
@@ -191,7 +130,7 @@ describe('Releases State actions', () => {
const convertedResponse = convertAllReleasesGraphQLResponse(graphqlReleasesResponse);
return testAction(
- fetchReleasesGraphQl,
+ fetchReleases,
{},
mockedState,
[
@@ -202,7 +141,7 @@ describe('Releases State actions', () => {
type: types.RECEIVE_RELEASES_SUCCESS,
payload: {
data: convertedResponse.data,
- graphQlPageInfo: convertedResponse.paginationInfo,
+ pageInfo: convertedResponse.paginationInfo,
},
},
],
@@ -218,90 +157,7 @@ describe('Releases State actions', () => {
it(`commits ${types.REQUEST_RELEASES} and dispatch receiveReleasesError`, () => {
return testAction(
- fetchReleasesGraphQl,
- {},
- mockedState,
- [
- {
- type: types.REQUEST_RELEASES,
- },
- ],
- [
- {
- type: 'receiveReleasesError',
- },
- ],
- );
- });
- });
- });
-
- describe('fetchReleasesRest', () => {
- describe('REST query parameters', () => {
- let vuexParams;
-
- beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
-
- vuexParams = { dispatch: jest.fn(), commit: jest.fn(), state: mockedState };
- });
-
- describe('when a page parameter is provided', () => {
- beforeEach(() => {
- fetchReleasesRest(vuexParams, { page: 2 });
- });
-
- it('makes a REST query with a page query parameter', () => {
- expect(api.releases).toHaveBeenCalledWith(projectId, {
- page,
- order_by: 'released_at',
- sort: 'desc',
- });
- });
- });
- });
-
- describe('when the request is successful', () => {
- beforeEach(() => {
- jest
- .spyOn(api, 'releases')
- .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
- });
-
- it(`commits ${types.REQUEST_RELEASES} and ${types.RECEIVE_RELEASES_SUCCESS}`, () => {
- return testAction(
- fetchReleasesRest,
- {},
- mockedState,
- [
- {
- type: types.REQUEST_RELEASES,
- },
- {
- type: types.RECEIVE_RELEASES_SUCCESS,
- payload: {
- data: convertObjectPropsToCamelCase(releases, { deep: true }),
- restPageInfo: parseIntPagination(
- normalizeHeaders(pageInfoHeadersWithoutPagination),
- ),
- },
- },
- ],
- [],
- );
- });
- });
-
- describe('when the request fails', () => {
- beforeEach(() => {
- jest.spyOn(api, 'releases').mockRejectedValue(new Error('Something went wrong!'));
- });
-
- it(`commits ${types.REQUEST_RELEASES} and dispatch receiveReleasesError`, () => {
- return testAction(
- fetchReleasesRest,
+ fetchReleases,
{},
mockedState,
[
diff --git a/spec/frontend/releases/stores/modules/list/mutations_spec.js b/spec/frontend/releases/stores/modules/list/mutations_spec.js
index 8b35ba5d7ac..08d803b3c2c 100644
--- a/spec/frontend/releases/stores/modules/list/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/list/mutations_spec.js
@@ -1,28 +1,25 @@
import { getJSONFixture } from 'helpers/fixtures';
-import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import * as types from '~/releases/stores/modules/index/mutation_types';
import mutations from '~/releases/stores/modules/index/mutations';
import createState from '~/releases/stores/modules/index/state';
import { convertAllReleasesGraphQLResponse } from '~/releases/util';
-import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
const originalRelease = getJSONFixture('api/releases/release.json');
const originalReleases = [originalRelease];
const graphqlReleasesResponse = getJSONFixture(
- 'graphql/releases/queries/all_releases.query.graphql.json',
+ 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
);
describe('Releases Store Mutations', () => {
let stateCopy;
- let restPageInfo;
- let graphQlPageInfo;
+ let pageInfo;
let releases;
beforeEach(() => {
stateCopy = createState({});
- restPageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
- graphQlPageInfo = convertAllReleasesGraphQLResponse(graphqlReleasesResponse).paginationInfo;
+ pageInfo = convertAllReleasesGraphQLResponse(graphqlReleasesResponse).paginationInfo;
releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
});
@@ -37,8 +34,7 @@ describe('Releases Store Mutations', () => {
describe('RECEIVE_RELEASES_SUCCESS', () => {
beforeEach(() => {
mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, {
- restPageInfo,
- graphQlPageInfo,
+ pageInfo,
data: releases,
});
});
@@ -55,20 +51,15 @@ describe('Releases Store Mutations', () => {
expect(stateCopy.releases).toEqual(releases);
});
- it('sets restPageInfo', () => {
- expect(stateCopy.restPageInfo).toEqual(restPageInfo);
- });
-
- it('sets graphQlPageInfo', () => {
- expect(stateCopy.graphQlPageInfo).toEqual(graphQlPageInfo);
+ it('sets pageInfo', () => {
+ expect(stateCopy.pageInfo).toEqual(pageInfo);
});
});
describe('RECEIVE_RELEASES_ERROR', () => {
it('resets data', () => {
mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, {
- restPageInfo,
- graphQlPageInfo,
+ pageInfo,
data: releases,
});
@@ -76,8 +67,7 @@ describe('Releases Store Mutations', () => {
expect(stateCopy.isLoading).toEqual(false);
expect(stateCopy.releases).toEqual([]);
- expect(stateCopy.restPageInfo).toEqual({});
- expect(stateCopy.graphQlPageInfo).toEqual({});
+ expect(stateCopy.pageInfo).toEqual({});
});
});
diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js
index fd00a524628..36e7be369d3 100644
--- a/spec/frontend/releases/util_spec.js
+++ b/spec/frontend/releases/util_spec.js
@@ -1,121 +1,22 @@
import { cloneDeep } from 'lodash';
import { getJSONFixture } from 'helpers/fixtures';
import {
- releaseToApiJson,
- apiJsonToRelease,
convertGraphQLRelease,
convertAllReleasesGraphQLResponse,
convertOneReleaseGraphQLResponse,
} from '~/releases/util';
const originalAllReleasesQueryResponse = getJSONFixture(
- 'graphql/releases/queries/all_releases.query.graphql.json',
+ 'graphql/releases/graphql/queries/all_releases.query.graphql.json',
);
const originalOneReleaseQueryResponse = getJSONFixture(
- 'graphql/releases/queries/one_release.query.graphql.json',
+ 'graphql/releases/graphql/queries/one_release.query.graphql.json',
+);
+const originalOneReleaseForEditingQueryResponse = getJSONFixture(
+ 'graphql/releases/graphql/queries/one_release_for_editing.query.graphql.json',
);
describe('releases/util.js', () => {
- describe('releaseToApiJson', () => {
- it('converts a release JavaScript object into JSON that the Release API can accept', () => {
- const release = {
- tagName: 'tag-name',
- name: 'Release name',
- description: 'Release description',
- milestones: ['13.2', '13.3'],
- assets: {
- links: [{ url: 'https://gitlab.example.com/link', linkType: 'other' }],
- },
- };
-
- const expectedJson = {
- tag_name: 'tag-name',
- ref: null,
- name: 'Release name',
- description: 'Release description',
- milestones: ['13.2', '13.3'],
- assets: {
- links: [{ url: 'https://gitlab.example.com/link', link_type: 'other' }],
- },
- };
-
- expect(releaseToApiJson(release)).toEqual(expectedJson);
- });
-
- describe('when createFrom is provided', () => {
- it('adds the provided createFrom ref to the JSON as a "ref" property', () => {
- const createFrom = 'main';
-
- const release = {};
-
- const expectedJson = {
- ref: createFrom,
- };
-
- expect(releaseToApiJson(release, createFrom)).toMatchObject(expectedJson);
- });
- });
-
- describe('release.name', () => {
- it.each`
- input | output
- ${null} | ${null}
- ${''} | ${null}
- ${' \t\n\r\n'} | ${null}
- ${' Release name '} | ${'Release name'}
- `('converts a name like `$input` to `$output`', ({ input, output }) => {
- const release = { name: input };
-
- const expectedJson = {
- name: output,
- };
-
- expect(releaseToApiJson(release)).toMatchObject(expectedJson);
- });
- });
-
- describe('when milestones contains full milestone objects', () => {
- it('converts the milestone objects into titles', () => {
- const release = {
- milestones: [{ title: '13.2' }, { title: '13.3' }, '13.4'],
- };
-
- const expectedJson = { milestones: ['13.2', '13.3', '13.4'] };
-
- expect(releaseToApiJson(release)).toMatchObject(expectedJson);
- });
- });
- });
-
- describe('apiJsonToRelease', () => {
- it('converts JSON received from the Release API into an object usable by the Vue application', () => {
- const json = {
- tag_name: 'tag-name',
- assets: {
- links: [
- {
- link_type: 'other',
- },
- ],
- },
- };
-
- const expectedRelease = {
- tagName: 'tag-name',
- assets: {
- links: [
- {
- linkType: 'other',
- },
- ],
- },
- milestones: [],
- };
-
- expect(apiJsonToRelease(json)).toEqual(expectedRelease);
- });
- });
-
describe('convertGraphQLRelease', () => {
let releaseFromResponse;
let convertedRelease;
@@ -135,6 +36,26 @@ describe('releases/util.js', () => {
expect(convertedRelease.assets.links[0].linkType).toBeUndefined();
});
+
+ it('handles assets that have no links', () => {
+ expect(convertedRelease.assets.links[0]).not.toBeUndefined();
+
+ delete releaseFromResponse.assets.links;
+
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
+
+ expect(convertedRelease.assets.links).toEqual([]);
+ });
+
+ it('handles assets that have no sources', () => {
+ expect(convertedRelease.assets.sources[0]).not.toBeUndefined();
+
+ delete releaseFromResponse.assets.sources;
+
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
+
+ expect(convertedRelease.assets.sources).toEqual([]);
+ });
});
describe('_links', () => {
@@ -160,6 +81,33 @@ describe('releases/util.js', () => {
expect(convertedRelease.commit).toBeUndefined();
});
});
+
+ describe('milestones', () => {
+ it("handles releases that don't have any milestone stats", () => {
+ expect(convertedRelease.milestones[0].issueStats).not.toBeUndefined();
+
+ releaseFromResponse.milestones.nodes = releaseFromResponse.milestones.nodes.map((n) => ({
+ ...n,
+ stats: undefined,
+ }));
+
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
+
+ expect(convertedRelease.milestones[0].issueStats).toEqual({});
+ });
+ });
+
+ describe('evidences', () => {
+ it("handles releases that don't have any evidences", () => {
+ expect(convertedRelease.evidences).not.toBeUndefined();
+
+ delete releaseFromResponse.evidences;
+
+ convertedRelease = convertGraphQLRelease(releaseFromResponse);
+
+ expect(convertedRelease.evidences).toEqual([]);
+ });
+ });
});
describe('convertAllReleasesGraphQLResponse', () => {
@@ -173,4 +121,12 @@ describe('releases/util.js', () => {
expect(convertOneReleaseGraphQLResponse(originalOneReleaseQueryResponse)).toMatchSnapshot();
});
});
+
+ describe('convertOneReleaseForEditingGraphQLResponse', () => {
+ it('matches snapshot', () => {
+ expect(
+ convertOneReleaseGraphQLResponse(originalOneReleaseForEditingQueryResponse),
+ ).toMatchSnapshot();
+ });
+ });
});
diff --git a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
index f0b23bb7b58..b8299d44f13 100644
--- a/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
+++ b/spec/frontend/reports/codequality_report/grouped_codequality_reports_app_spec.js
@@ -3,7 +3,7 @@ import Vuex from 'vuex';
import CodequalityIssueBody from '~/reports/codequality_report/components/codequality_issue_body.vue';
import GroupedCodequalityReportsApp from '~/reports/codequality_report/grouped_codequality_reports_app.vue';
import { getStoreConfig } from '~/reports/codequality_report/store';
-import { mockParsedHeadIssues, mockParsedBaseIssues } from './mock_data';
+import { parsedReportIssues } from './mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -80,7 +80,7 @@ describe('Grouped code quality reports app', () => {
describe('with issues', () => {
describe('with new issues', () => {
beforeEach(() => {
- mockStore.state.newIssues = [mockParsedHeadIssues[0]];
+ mockStore.state.newIssues = parsedReportIssues.newIssues;
mockStore.state.resolvedIssues = [];
});
@@ -89,14 +89,14 @@ describe('Grouped code quality reports app', () => {
});
it('renders custom codequality issue body', () => {
- expect(findIssueBody().props('issue')).toEqual(mockParsedHeadIssues[0]);
+ expect(findIssueBody().props('issue')).toEqual(parsedReportIssues.newIssues[0]);
});
});
describe('with resolved issues', () => {
beforeEach(() => {
mockStore.state.newIssues = [];
- mockStore.state.resolvedIssues = [mockParsedBaseIssues[0]];
+ mockStore.state.resolvedIssues = parsedReportIssues.resolvedIssues;
});
it('renders summary text', () => {
@@ -104,14 +104,14 @@ describe('Grouped code quality reports app', () => {
});
it('renders custom codequality issue body', () => {
- expect(findIssueBody().props('issue')).toEqual(mockParsedBaseIssues[0]);
+ expect(findIssueBody().props('issue')).toEqual(parsedReportIssues.resolvedIssues[0]);
});
});
describe('with new and resolved issues', () => {
beforeEach(() => {
- mockStore.state.newIssues = [mockParsedHeadIssues[0]];
- mockStore.state.resolvedIssues = [mockParsedBaseIssues[0]];
+ mockStore.state.newIssues = parsedReportIssues.newIssues;
+ mockStore.state.resolvedIssues = parsedReportIssues.resolvedIssues;
});
it('renders summary text', () => {
@@ -121,7 +121,7 @@ describe('Grouped code quality reports app', () => {
});
it('renders custom codequality issue body', () => {
- expect(findIssueBody().props('issue')).toEqual(mockParsedHeadIssues[0]);
+ expect(findIssueBody().props('issue')).toEqual(parsedReportIssues.newIssues[0]);
});
});
});
diff --git a/spec/frontend/reports/codequality_report/mock_data.js b/spec/frontend/reports/codequality_report/mock_data.js
index c5cecb34509..2c994116db6 100644
--- a/spec/frontend/reports/codequality_report/mock_data.js
+++ b/spec/frontend/reports/codequality_report/mock_data.js
@@ -1,94 +1,3 @@
-export const headIssues = [
- {
- check_name: 'Rubocop/Lint/UselessAssignment',
- description: 'Insecure Dependency',
- location: {
- path: 'lib/six.rb',
- lines: {
- begin: 6,
- end: 7,
- },
- },
- fingerprint: 'e879dd9bbc0953cad5037cde7ff0f627',
- },
- {
- categories: ['Security'],
- check_name: 'Insecure Dependency',
- description: 'Insecure Dependency',
- location: {
- path: 'Gemfile.lock',
- lines: {
- begin: 22,
- end: 22,
- },
- },
- fingerprint: 'ca2e59451e98ae60ba2f54e3857c50e5',
- },
-];
-
-export const mockParsedHeadIssues = [
- {
- ...headIssues[1],
- name: 'Insecure Dependency',
- path: 'lib/six.rb',
- urlPath: 'headPath/lib/six.rb#L6',
- line: 6,
- },
-];
-
-export const baseIssues = [
- {
- categories: ['Security'],
- check_name: 'Insecure Dependency',
- description: 'Insecure Dependency',
- location: {
- path: 'Gemfile.lock',
- lines: {
- begin: 22,
- end: 22,
- },
- },
- fingerprint: 'ca2e59451e98ae60ba2f54e3857c50e5',
- },
- {
- categories: ['Security'],
- check_name: 'Insecure Dependency',
- description: 'Insecure Dependency',
- location: {
- path: 'Gemfile.lock',
- lines: {
- begin: 21,
- end: 21,
- },
- },
- fingerprint: 'ca2354534dee94ae60ba2f54e3857c50e5',
- },
-];
-
-export const mockParsedBaseIssues = [
- {
- ...baseIssues[1],
- name: 'Insecure Dependency',
- path: 'Gemfile.lock',
- line: 21,
- urlPath: 'basePath/Gemfile.lock#L21',
- },
-];
-
-export const issueDiff = [
- {
- categories: ['Security'],
- check_name: 'Insecure Dependency',
- description: 'Insecure Dependency',
- fingerprint: 'ca2e59451e98ae60ba2f54e3857c50e5',
- line: 6,
- location: { lines: { begin: 22, end: 22 }, path: 'Gemfile.lock' },
- name: 'Insecure Dependency',
- path: 'lib/six.rb',
- urlPath: 'headPath/lib/six.rb#L6',
- },
-];
-
export const reportIssues = {
status: 'failed',
new_errors: [
diff --git a/spec/frontend/reports/codequality_report/store/actions_spec.js b/spec/frontend/reports/codequality_report/store/actions_spec.js
index a2b256448ef..1b83d071d17 100644
--- a/spec/frontend/reports/codequality_report/store/actions_spec.js
+++ b/spec/frontend/reports/codequality_report/store/actions_spec.js
@@ -5,30 +5,7 @@ import axios from '~/lib/utils/axios_utils';
import createStore from '~/reports/codequality_report/store';
import * as actions from '~/reports/codequality_report/store/actions';
import * as types from '~/reports/codequality_report/store/mutation_types';
-import {
- headIssues,
- baseIssues,
- mockParsedHeadIssues,
- mockParsedBaseIssues,
- reportIssues,
- parsedReportIssues,
-} from '../mock_data';
-
-// mock codequality comparison worker
-jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () =>
- jest.fn().mockImplementation(() => {
- return {
- addEventListener: (eventName, callback) => {
- callback({
- data: {
- newIssues: [mockParsedHeadIssues[0]],
- resolvedIssues: [mockParsedBaseIssues[0]],
- },
- });
- },
- };
- }),
-);
+import { reportIssues, parsedReportIssues } from '../mock_data';
describe('Codequality Reports actions', () => {
let localState;
@@ -43,9 +20,6 @@ describe('Codequality Reports actions', () => {
it('should commit SET_PATHS mutation', (done) => {
const paths = {
basePath: 'basePath',
- headPath: 'headPath',
- baseBlobPath: 'baseBlobPath',
- headBlobPath: 'headBlobPath',
reportsPath: 'reportsPath',
helpPath: 'codequalityHelpPath',
};
@@ -63,119 +37,64 @@ describe('Codequality Reports actions', () => {
describe('fetchReports', () => {
let mock;
- let diffFeatureFlagEnabled;
- describe('with codequalityBackendComparison feature flag enabled', () => {
- beforeEach(() => {
- diffFeatureFlagEnabled = true;
- localState.reportsPath = `${TEST_HOST}/codequality_reports.json`;
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- describe('on success', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
- mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, reportIssues);
-
- testAction(
- actions.fetchReports,
- diffFeatureFlagEnabled,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [
- {
- payload: parsedReportIssues,
- type: 'receiveReportsSuccess',
- },
- ],
- done,
- );
- });
- });
-
- describe('on error', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(500);
-
- testAction(
- actions.fetchReports,
- diffFeatureFlagEnabled,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError', payload: expect.any(Error) }],
- done,
- );
- });
- });
+ beforeEach(() => {
+ localState.reportsPath = `${TEST_HOST}/codequality_reports.json`;
+ localState.basePath = '/base/path';
+ mock = new MockAdapter(axios);
});
- describe('with codequalityBackendComparison feature flag disabled', () => {
- beforeEach(() => {
- diffFeatureFlagEnabled = false;
- localState.headPath = `${TEST_HOST}/head.json`;
- localState.basePath = `${TEST_HOST}/base.json`;
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- });
+ afterEach(() => {
+ mock.restore();
+ });
- describe('on success', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
- mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues);
- mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues);
-
- testAction(
- actions.fetchReports,
- diffFeatureFlagEnabled,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [
- {
- payload: {
- newIssues: [mockParsedHeadIssues[0]],
- resolvedIssues: [mockParsedBaseIssues[0]],
- },
- type: 'receiveReportsSuccess',
- },
- ],
- done,
- );
- });
+ describe('on success', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => {
+ mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, reportIssues);
+
+ testAction(
+ actions.fetchReports,
+ null,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [
+ {
+ payload: parsedReportIssues,
+ type: 'receiveReportsSuccess',
+ },
+ ],
+ done,
+ );
});
+ });
- describe('on error', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- mock.onGet(`${TEST_HOST}/head.json`).reply(500);
-
- testAction(
- actions.fetchReports,
- diffFeatureFlagEnabled,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError', payload: expect.any(Error) }],
- done,
- );
- });
+ describe('on error', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
+ mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(500);
+
+ testAction(
+ actions.fetchReports,
+ null,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [{ type: 'receiveReportsError', payload: expect.any(Error) }],
+ done,
+ );
});
+ });
- describe('with no base path', () => {
- it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
- localState.basePath = null;
-
- testAction(
- actions.fetchReports,
- diffFeatureFlagEnabled,
- localState,
- [{ type: types.REQUEST_REPORTS }],
- [{ type: 'receiveReportsError' }],
- done,
- );
- });
+ describe('with no base path', () => {
+ it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => {
+ localState.basePath = null;
+
+ testAction(
+ actions.fetchReports,
+ null,
+ localState,
+ [{ type: types.REQUEST_REPORTS }],
+ [{ type: 'receiveReportsError' }],
+ done,
+ );
});
});
});
diff --git a/spec/frontend/reports/codequality_report/store/mutations_spec.js b/spec/frontend/reports/codequality_report/store/mutations_spec.js
index 05a16cd6f82..9d4c05afd36 100644
--- a/spec/frontend/reports/codequality_report/store/mutations_spec.js
+++ b/spec/frontend/reports/codequality_report/store/mutations_spec.js
@@ -13,23 +13,17 @@ describe('Codequality Reports mutations', () => {
describe('SET_PATHS', () => {
it('sets paths to given values', () => {
const basePath = 'base.json';
- const headPath = 'head.json';
- const baseBlobPath = 'base/blob/path/';
- const headBlobPath = 'head/blob/path/';
+ const reportsPath = 'reports.json';
const helpPath = 'help.html';
mutations.SET_PATHS(localState, {
basePath,
- headPath,
- baseBlobPath,
- headBlobPath,
+ reportsPath,
helpPath,
});
expect(localState.basePath).toEqual(basePath);
- expect(localState.headPath).toEqual(headPath);
- expect(localState.baseBlobPath).toEqual(baseBlobPath);
- expect(localState.headBlobPath).toEqual(headBlobPath);
+ expect(localState.reportsPath).toEqual(reportsPath);
expect(localState.helpPath).toEqual(helpPath);
});
});
diff --git a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
deleted file mode 100644
index 389e9b4a1f6..00000000000
--- a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js
+++ /dev/null
@@ -1,153 +0,0 @@
-import {
- parseCodeclimateMetrics,
- doCodeClimateComparison,
-} from '~/reports/codequality_report/store/utils/codequality_comparison';
-import {
- baseIssues,
- mockParsedHeadIssues,
- mockParsedBaseIssues,
- reportIssues,
- parsedReportIssues,
-} from '../../mock_data';
-
-jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () => {
- let mockPostMessageCallback;
- return jest.fn().mockImplementation(() => {
- return {
- addEventListener: (_, callback) => {
- mockPostMessageCallback = callback;
- },
- postMessage: (data) => {
- if (!data.headIssues) return mockPostMessageCallback({ data: {} });
- if (!data.baseIssues) throw new Error();
- const key = 'fingerprint';
- return mockPostMessageCallback({
- data: {
- newIssues: data.headIssues.filter(
- (item) => !data.baseIssues.find((el) => el[key] === item[key]),
- ),
- resolvedIssues: data.baseIssues.filter(
- (item) => !data.headIssues.find((el) => el[key] === item[key]),
- ),
- },
- });
- },
- };
- });
-});
-
-describe('Codequality report store utils', () => {
- let result;
-
- describe('parseCodeclimateMetrics', () => {
- it('should parse the issues from codeclimate artifacts', () => {
- [result] = parseCodeclimateMetrics(baseIssues, 'path');
-
- expect(result.name).toEqual(baseIssues[0].check_name);
- expect(result.path).toEqual(baseIssues[0].location.path);
- expect(result.line).toEqual(baseIssues[0].location.lines.begin);
- });
-
- it('should parse the issues from backend codequality diff', () => {
- [result] = parseCodeclimateMetrics(reportIssues.new_errors, 'path');
-
- expect(result.name).toEqual(parsedReportIssues.newIssues[0].name);
- expect(result.path).toEqual(parsedReportIssues.newIssues[0].path);
- expect(result.line).toEqual(parsedReportIssues.newIssues[0].line);
- });
-
- describe('when an issue has no location or path', () => {
- const issue = { description: 'Insecure Dependency' };
-
- beforeEach(() => {
- [result] = parseCodeclimateMetrics([issue], 'path');
- });
-
- it('is parsed', () => {
- expect(result.name).toEqual(issue.description);
- });
- });
-
- describe('when an issue has a path but no line', () => {
- const issue = { description: 'Insecure Dependency', location: { path: 'Gemfile.lock' } };
-
- beforeEach(() => {
- [result] = parseCodeclimateMetrics([issue], 'path');
- });
-
- it('is parsed', () => {
- expect(result.name).toEqual(issue.description);
- expect(result.path).toEqual(issue.location.path);
- expect(result.urlPath).toEqual(`path/${issue.location.path}`);
- });
- });
-
- describe('when an issue has a line nested in positions', () => {
- const issue = {
- description: 'Insecure Dependency',
- location: {
- path: 'Gemfile.lock',
- positions: { begin: { line: 84 } },
- },
- };
-
- beforeEach(() => {
- [result] = parseCodeclimateMetrics([issue], 'path');
- });
-
- it('is parsed', () => {
- expect(result.name).toEqual(issue.description);
- expect(result.path).toEqual(issue.location.path);
- expect(result.urlPath).toEqual(
- `path/${issue.location.path}#L${issue.location.positions.begin.line}`,
- );
- });
- });
-
- describe('with an empty issue array', () => {
- beforeEach(() => {
- result = parseCodeclimateMetrics([], 'path');
- });
-
- it('returns an empty array', () => {
- expect(result).toEqual([]);
- });
- });
- });
-
- describe('doCodeClimateComparison', () => {
- describe('when the comparison worker finds changed issues', () => {
- beforeEach(async () => {
- result = await doCodeClimateComparison(mockParsedHeadIssues, mockParsedBaseIssues);
- });
-
- it('returns the new and resolved issues', () => {
- expect(result.resolvedIssues[0]).toEqual(mockParsedBaseIssues[0]);
- expect(result.newIssues[0]).toEqual(mockParsedHeadIssues[0]);
- });
- });
-
- describe('when the comparison worker finds no changed issues', () => {
- beforeEach(async () => {
- result = await doCodeClimateComparison([], []);
- });
-
- it('returns the empty issue arrays', () => {
- expect(result.newIssues).toEqual([]);
- expect(result.resolvedIssues).toEqual([]);
- });
- });
-
- describe('when the comparison worker is given malformed data', () => {
- it('rejects the promise', () => {
- return expect(doCodeClimateComparison(null)).rejects.toEqual({});
- });
- });
-
- describe('when the comparison worker encounters an error', () => {
- it('rejects the promise and throws an error', () => {
- return expect(doCodeClimateComparison([], null)).rejects.toThrow();
- });
- });
- });
-});
diff --git a/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js b/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js
new file mode 100644
index 00000000000..ba95294ab0a
--- /dev/null
+++ b/spec/frontend/reports/codequality_report/store/utils/codequality_parser_spec.js
@@ -0,0 +1,74 @@
+import { reportIssues, parsedReportIssues } from 'jest/reports/codequality_report/mock_data';
+import { parseCodeclimateMetrics } from '~/reports/codequality_report/store/utils/codequality_parser';
+
+describe('Codequality report store utils', () => {
+ let result;
+
+ describe('parseCodeclimateMetrics', () => {
+ it('should parse the issues from backend codequality diff', () => {
+ [result] = parseCodeclimateMetrics(reportIssues.new_errors, 'path');
+
+ expect(result.name).toEqual(parsedReportIssues.newIssues[0].name);
+ expect(result.path).toEqual(parsedReportIssues.newIssues[0].path);
+ expect(result.line).toEqual(parsedReportIssues.newIssues[0].line);
+ });
+
+ describe('when an issue has no location or path', () => {
+ const issue = { description: 'Insecure Dependency' };
+
+ beforeEach(() => {
+ [result] = parseCodeclimateMetrics([issue], 'path');
+ });
+
+ it('is parsed', () => {
+ expect(result.name).toEqual(issue.description);
+ });
+ });
+
+ describe('when an issue has a path but no line', () => {
+ const issue = { description: 'Insecure Dependency', location: { path: 'Gemfile.lock' } };
+
+ beforeEach(() => {
+ [result] = parseCodeclimateMetrics([issue], 'path');
+ });
+
+ it('is parsed', () => {
+ expect(result.name).toEqual(issue.description);
+ expect(result.path).toEqual(issue.location.path);
+ expect(result.urlPath).toEqual(`path/${issue.location.path}`);
+ });
+ });
+
+ describe('when an issue has a line nested in positions', () => {
+ const issue = {
+ description: 'Insecure Dependency',
+ location: {
+ path: 'Gemfile.lock',
+ positions: { begin: { line: 84 } },
+ },
+ };
+
+ beforeEach(() => {
+ [result] = parseCodeclimateMetrics([issue], 'path');
+ });
+
+ it('is parsed', () => {
+ expect(result.name).toEqual(issue.description);
+ expect(result.path).toEqual(issue.location.path);
+ expect(result.urlPath).toEqual(
+ `path/${issue.location.path}#L${issue.location.positions.begin.line}`,
+ );
+ });
+ });
+
+ describe('with an empty issue array', () => {
+ beforeEach(() => {
+ result = parseCodeclimateMetrics([], 'path');
+ });
+
+ it('returns an empty array', () => {
+ expect(result).toEqual([]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
index 55bb7dbe5c0..d29048d640c 100644
--- a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
@@ -279,9 +279,7 @@ describe('Grouped test reports app', () => {
});
it('renders the recent failures count on the test case', () => {
- expect(findIssueRecentFailures().text()).toBe(
- 'Failed 8 times in master in the last 14 days',
- );
+ expect(findIssueRecentFailures().text()).toBe('Failed 8 times in main in the last 14 days');
});
});
diff --git a/spec/frontend/reports/grouped_test_report/store/mutations_spec.js b/spec/frontend/reports/grouped_test_report/store/mutations_spec.js
index d8642a9b440..b2890d7285f 100644
--- a/spec/frontend/reports/grouped_test_report/store/mutations_spec.js
+++ b/spec/frontend/reports/grouped_test_report/store/mutations_spec.js
@@ -52,7 +52,7 @@ describe('Reports Store Mutations', () => {
system_output: "Failure/Error: is_expected.to eq('gitlab')",
recent_failures: {
count: 4,
- base_branch: 'master',
+ base_branch: 'main',
},
},
],
diff --git a/spec/frontend/reports/mock_data/mock_data.js b/spec/frontend/reports/mock_data/mock_data.js
index 68c7439df47..2599b0ac365 100644
--- a/spec/frontend/reports/mock_data/mock_data.js
+++ b/spec/frontend/reports/mock_data/mock_data.js
@@ -7,7 +7,7 @@ export const failedIssue = {
"Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in \u003ctop (required)\u003e'",
recent_failures: {
count: 3,
- base_branch: 'master',
+ base_branch: 'main',
},
};
diff --git a/spec/frontend/reports/mock_data/recent_failures_report.json b/spec/frontend/reports/mock_data/recent_failures_report.json
index bc86d788ee2..c4a5fb78dcd 100644
--- a/spec/frontend/reports/mock_data/recent_failures_report.json
+++ b/spec/frontend/reports/mock_data/recent_failures_report.json
@@ -12,7 +12,7 @@
"system_output": "Failure/Error: is_expected.to eq(3)\n\n expected: 3\n got: -1\n\n (compared using ==)\n./spec/test_spec.rb:12:in `block (4 levels) in <top (required)>'",
"recent_failures": {
"count": 8,
- "base_branch": "master"
+ "base_branch": "main"
}
},
{
@@ -38,7 +38,7 @@
"execution_time": 0.000562,
"recent_failures": {
"count": 3,
- "base_branch": "master"
+ "base_branch": "main"
}
}
],
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index b662a1d20a9..f03df8cf2ac 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -1,14 +1,17 @@
import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import BlobContent from '~/blob/components/blob_content.vue';
import BlobHeader from '~/blob/components/blob_header.vue';
import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
+import BlobHeaderEdit from '~/repository/components/blob_header_edit.vue';
let wrapper;
-const mockData = {
+const simpleMockData = {
name: 'some_file.js',
size: 123,
- rawBlob: 'raw content',
+ rawSize: 123,
+ rawTextBlob: 'raw content',
type: 'text',
fileType: 'text',
tooLarge: false,
@@ -25,62 +28,160 @@ const mockData = {
lockLink: 'some_file.js/lock',
canModifyBlob: true,
forkPath: 'some_file.js/fork',
- simpleViewer: {},
- richViewer: {},
+ simpleViewer: {
+ fileType: 'text',
+ tooLarge: false,
+ type: 'simple',
+ renderError: null,
+ },
+ richViewer: null,
+};
+const richMockData = {
+ ...simpleMockData,
+ richViewer: {
+ fileType: 'markup',
+ tooLarge: false,
+ type: 'rich',
+ renderError: null,
+ },
};
-function factory(path, loading = false) {
- wrapper = shallowMount(BlobContentViewer, {
+const createFactory = (mountFn) => (
+ { props = {}, mockData = {}, stubs = {} } = {},
+ loading = false,
+) => {
+ wrapper = mountFn(BlobContentViewer, {
propsData: {
- path,
+ path: 'some_file.js',
+ projectPath: 'some/path',
+ ...props,
},
mocks: {
$apollo: {
queries: {
- blobInfo: {
+ project: {
loading,
},
},
},
},
+ stubs,
});
- wrapper.setData({ blobInfo: mockData });
-}
+ wrapper.setData(mockData);
+};
+
+const factory = createFactory(shallowMount);
+const fullFactory = createFactory(mount);
describe('Blob content viewer component', () => {
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findBlobHeader = () => wrapper.find(BlobHeader);
+ const findBlobHeaderEdit = () => wrapper.find(BlobHeaderEdit);
const findBlobContent = () => wrapper.find(BlobContent);
afterEach(() => {
wrapper.destroy();
});
- beforeEach(() => {
- factory('some_file.js');
- });
-
it('renders a GlLoadingIcon component', () => {
- factory('some_file.js', true);
+ factory({ mockData: { blobInfo: simpleMockData } }, true);
expect(findLoadingIcon().exists()).toBe(true);
});
- it('renders a BlobHeader component', () => {
- expect(findBlobHeader().exists()).toBe(true);
+ describe('simple viewer', () => {
+ beforeEach(() => {
+ factory({ mockData: { blobInfo: simpleMockData } });
+ });
+
+ it('renders a BlobHeader component', () => {
+ expect(findBlobHeader().props('activeViewerType')).toEqual('simple');
+ expect(findBlobHeader().props('hasRenderError')).toEqual(false);
+ expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(true);
+ expect(findBlobHeader().props('blob')).toEqual(simpleMockData);
+ });
+
+ it('renders a BlobContent component', () => {
+ expect(findBlobContent().props('loading')).toEqual(false);
+ expect(findBlobContent().props('content')).toEqual('raw content');
+ expect(findBlobContent().props('isRawContent')).toBe(true);
+ expect(findBlobContent().props('activeViewer')).toEqual({
+ fileType: 'text',
+ tooLarge: false,
+ type: 'simple',
+ renderError: null,
+ });
+ });
+ });
+
+ describe('rich viewer', () => {
+ beforeEach(() => {
+ factory({
+ mockData: { blobInfo: richMockData, activeViewerType: 'rich' },
+ });
+ });
+
+ it('renders a BlobHeader component', () => {
+ expect(findBlobHeader().props('activeViewerType')).toEqual('rich');
+ expect(findBlobHeader().props('hasRenderError')).toEqual(false);
+ expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(false);
+ expect(findBlobHeader().props('blob')).toEqual(richMockData);
+ });
+
+ it('renders a BlobContent component', () => {
+ expect(findBlobContent().props('loading')).toEqual(false);
+ expect(findBlobContent().props('content')).toEqual('raw content');
+ expect(findBlobContent().props('isRawContent')).toBe(true);
+ expect(findBlobContent().props('activeViewer')).toEqual({
+ fileType: 'markup',
+ tooLarge: false,
+ type: 'rich',
+ renderError: null,
+ });
+ });
+
+ it('updates viewer type when viewer changed is clicked', async () => {
+ expect(findBlobContent().props('activeViewer')).toEqual(
+ expect.objectContaining({
+ type: 'rich',
+ }),
+ );
+ expect(findBlobHeader().props('activeViewerType')).toEqual('rich');
+
+ findBlobHeader().vm.$emit('viewer-changed', 'simple');
+ await nextTick();
+
+ expect(findBlobHeader().props('activeViewerType')).toEqual('simple');
+ expect(findBlobContent().props('activeViewer')).toEqual(
+ expect.objectContaining({
+ type: 'simple',
+ }),
+ );
+ });
});
- it('renders a BlobContent component', () => {
- expect(findBlobContent().exists()).toBe(true);
+ describe('BlobHeader action slot', () => {
+ it('renders BlobHeaderEdit button in simple viewer', async () => {
+ fullFactory({
+ mockData: { blobInfo: simpleMockData },
+ stubs: {
+ BlobContent: true,
+ },
+ });
+ await nextTick();
+ expect(findBlobHeaderEdit().props('editPath')).toEqual('some_file.js/edit');
+ });
- expect(findBlobContent().props('loading')).toEqual(false);
- expect(findBlobContent().props('content')).toEqual('raw content');
- expect(findBlobContent().props('isRawContent')).toBe(true);
- expect(findBlobContent().props('activeViewer')).toEqual({
- fileType: 'text',
- tooLarge: false,
- type: 'text',
+ it('renders BlobHeaderEdit button in rich viewer', async () => {
+ fullFactory({
+ mockData: { blobInfo: richMockData },
+ stubs: {
+ BlobContent: true,
+ },
+ });
+ await nextTick();
+ expect(findBlobHeaderEdit().props('editPath')).toEqual('some_file.js/edit');
});
});
});
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index af263f43d7d..e9e51abaf0f 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -55,8 +55,8 @@ describe('Repository table component', () => {
it.each`
path | ref
- ${'/'} | ${'master'}
- ${'app/assets'} | ${'master'}
+ ${'/'} | ${'main'}
+ ${'app/assets'} | ${'main'}
${'/'} | ${'test'}
`('renders table caption for $ref in $path', ({ path, ref }) => {
factory({ path });
diff --git a/spec/frontend/repository/components/table/parent_row_spec.js b/spec/frontend/repository/components/table/parent_row_spec.js
index cf1ed272634..9daae8c36ef 100644
--- a/spec/frontend/repository/components/table/parent_row_spec.js
+++ b/spec/frontend/repository/components/table/parent_row_spec.js
@@ -12,7 +12,7 @@ function factory(path, loadingPath) {
vm = shallowMount(ParentRow, {
propsData: {
- commitRef: 'master',
+ commitRef: 'main',
path,
loadingPath,
},
@@ -32,10 +32,10 @@ describe('Repository parent row component', () => {
it.each`
path | to
- ${'app'} | ${'/-/tree/master/'}
- ${'app/assets'} | ${'/-/tree/master/app'}
- ${'app/assets#/test'} | ${'/-/tree/master/app/assets%23'}
- ${'app/assets#/test/world'} | ${'/-/tree/master/app/assets%23/test'}
+ ${'app'} | ${'/-/tree/main/'}
+ ${'app/assets'} | ${'/-/tree/main/app'}
+ ${'app/assets#/test'} | ${'/-/tree/main/app/assets%23'}
+ ${'app/assets#/test/world'} | ${'/-/tree/main/app/assets%23/test'}
`('renders link in $path to $to', ({ path, to }) => {
factory(path);
@@ -50,7 +50,7 @@ describe('Repository parent row component', () => {
vm.find('td').trigger('click');
expect($router.push).toHaveBeenCalledWith({
- path: '/-/tree/master/app',
+ path: '/-/tree/main/app',
});
});
@@ -62,7 +62,7 @@ describe('Repository parent row component', () => {
vm.find('a').trigger('click');
expect($router.push).not.toHaveBeenCalledWith({
- path: '/-/tree/master/app',
+ path: '/-/tree/main/app',
});
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index 3ebffbedcdb..6ba6f993db1 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -30,7 +30,7 @@ function factory(propsData = {}) {
},
});
- vm.setData({ escapedRef: 'master' });
+ vm.setData({ escapedRef: 'main' });
}
describe('Repository table row component', () => {
@@ -115,7 +115,7 @@ describe('Repository table row component', () => {
return vm.vm.$nextTick().then(() => {
expect(vm.find({ ref: 'link' }).props('to')).toEqual({
- path: `/-/tree/master/${encodeURIComponent(path)}`,
+ path: `/-/tree/main/${encodeURIComponent(path)}`,
});
});
});
@@ -130,7 +130,7 @@ describe('Repository table row component', () => {
});
return vm.vm.$nextTick().then(() => {
- expect(vm.find('.tree-item-link').props('to')).toEqual({ path: '/-/tree/master/test%23' });
+ expect(vm.find('.tree-item-link').props('to')).toEqual({ path: '/-/tree/main/test%23' });
});
});
diff --git a/spec/frontend/repository/components/upload_blob_modal_spec.js b/spec/frontend/repository/components/upload_blob_modal_spec.js
index 935ed08f67a..ec85d5666fb 100644
--- a/spec/frontend/repository/components/upload_blob_modal_spec.js
+++ b/spec/frontend/repository/components/upload_blob_modal_spec.js
@@ -20,8 +20,8 @@ jest.mock('~/lib/utils/url_utility', () => ({
const initialProps = {
modalId: 'upload-blob',
commitMessage: 'Upload New File',
- targetBranch: 'master',
- originalBranch: 'master',
+ targetBranch: 'main',
+ originalBranch: 'main',
canPushCode: true,
path: 'new_upload',
};
@@ -111,7 +111,7 @@ describe('UploadBlobModal', () => {
if (canPushCode) {
describe('when changing the branch name', () => {
it('displays the MR toggle', async () => {
- wrapper.setData({ target: 'Not master' });
+ wrapper.setData({ target: 'Not main' });
await wrapper.vm.$nextTick();
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index ddc95feccd6..a842053caad 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -53,7 +53,7 @@ describe('fetchLogsTree', () => {
client = {
readQuery: () => ({
projectPath: 'gitlab-org/gitlab-foss',
- escapedRef: 'master',
+ escapedRef: 'main',
commits: [],
}),
writeQuery: jest.fn(),
@@ -71,7 +71,7 @@ describe('fetchLogsTree', () => {
it('calls axios get', () =>
fetchLogsTree(client, '', '0', resolver).then(() => {
- expect(axios.get).toHaveBeenCalledWith('/gitlab-org/gitlab-foss/-/refs/master/logs_tree/', {
+ expect(axios.get).toHaveBeenCalledWith('/gitlab-org/gitlab-foss/-/refs/main/logs_tree/', {
params: { format: 'json', offset: '0' },
});
}));
@@ -114,7 +114,7 @@ describe('fetchLogsTree', () => {
query: expect.anything(),
data: {
projectPath: 'gitlab-org/gitlab-foss',
- escapedRef: 'master',
+ escapedRef: 'main',
commits: [
expect.objectContaining({
__typename: 'LogTreeCommit',
diff --git a/spec/frontend/repository/pages/blob_spec.js b/spec/frontend/repository/pages/blob_spec.js
index 3e7ead4ad00..41ab4d616b8 100644
--- a/spec/frontend/repository/pages/blob_spec.js
+++ b/spec/frontend/repository/pages/blob_spec.js
@@ -11,7 +11,9 @@ describe('Repository blob page component', () => {
const path = 'file.js';
beforeEach(() => {
- wrapper = shallowMount(BlobPage, { propsData: { path } });
+ wrapper = shallowMount(BlobPage, {
+ propsData: { path, projectPath: 'some/path' },
+ });
});
afterEach(() => {
diff --git a/spec/frontend/repository/router_spec.js b/spec/frontend/repository/router_spec.js
index 3354b2315fc..bb82fa706fd 100644
--- a/spec/frontend/repository/router_spec.js
+++ b/spec/frontend/repository/router_spec.js
@@ -5,14 +5,14 @@ import createRouter from '~/repository/router';
describe('Repository router spec', () => {
it.each`
- path | branch | component | componentName
- ${'/'} | ${'master'} | ${IndexPage} | ${'IndexPage'}
- ${'/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
- ${'/tree/feat(test)'} | ${'feat(test)'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/master/app/assets'} | ${'master'} | ${TreePage} | ${'TreePage'}
- ${'/-/tree/123/app/assets'} | ${'master'} | ${null} | ${'null'}
- ${'/-/blob/master/file.md'} | ${'master'} | ${BlobPage} | ${'BlobPage'}
+ path | branch | component | componentName
+ ${'/'} | ${'main'} | ${IndexPage} | ${'IndexPage'}
+ ${'/tree/main'} | ${'main'} | ${TreePage} | ${'TreePage'}
+ ${'/tree/feat(test)'} | ${'feat(test)'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/main'} | ${'main'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/main/app/assets'} | ${'main'} | ${TreePage} | ${'TreePage'}
+ ${'/-/tree/123/app/assets'} | ${'main'} | ${null} | ${'null'}
+ ${'/-/blob/main/file.md'} | ${'main'} | ${BlobPage} | ${'BlobPage'}
`('sets component as $componentName for path "$path"', ({ path, component, branch }) => {
const router = createRouter('', branch);
diff --git a/spec/frontend/repository/utils/title_spec.js b/spec/frontend/repository/utils/title_spec.js
index a1213c13be8..d5206bdea92 100644
--- a/spec/frontend/repository/utils/title_spec.js
+++ b/spec/frontend/repository/utils/title_spec.js
@@ -8,9 +8,9 @@ describe('setTitle', () => {
${'app/assets'} | ${'app/assets'}
${'app/assets/javascripts'} | ${'app/assets/javascripts'}
`('sets document title as $title for $path', ({ path, title }) => {
- setTitle(path, 'master', 'GitLab Org / GitLab');
+ setTitle(path, 'main', 'GitLab Org / GitLab');
- expect(document.title).toEqual(`${title} · master · GitLab Org / GitLab · GitLab`);
+ expect(document.title).toEqual(`${title} · main · GitLab Org / GitLab · GitLab`);
});
});
diff --git a/spec/frontend/runner/components/runner_type_badge_spec.js b/spec/frontend/runner/components/runner_type_badge_spec.js
new file mode 100644
index 00000000000..8e52d3398bd
--- /dev/null
+++ b/spec/frontend/runner/components/runner_type_badge_spec.js
@@ -0,0 +1,40 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
+import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
+
+describe('RunnerTypeBadge', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerTypeBadge, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ type | text | variant
+ ${INSTANCE_TYPE} | ${'shared'} | ${'success'}
+ ${GROUP_TYPE} | ${'group'} | ${'success'}
+ ${PROJECT_TYPE} | ${'specific'} | ${'info'}
+ `('displays $type runner with as "$text" with a $variant variant ', ({ type, text, variant }) => {
+ createComponent({ props: { type } });
+
+ expect(findBadge().text()).toBe(text);
+ expect(findBadge().props('variant')).toBe(variant);
+ });
+
+ it('does not display a badge when type is unknown', () => {
+ createComponent({ props: { type: 'AN_UNKNOWN_VALUE' } });
+
+ expect(findBadge().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/runner/runner_detail/runner_detail_app_spec.js b/spec/frontend/runner/runner_detail/runner_detail_app_spec.js
deleted file mode 100644
index 5caa37c8cb3..00000000000
--- a/spec/frontend/runner/runner_detail/runner_detail_app_spec.js
+++ /dev/null
@@ -1,29 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
-
-const mockRunnerId = '55';
-
-describe('RunnerDetailsApp', () => {
- let wrapper;
-
- const createComponent = (props) => {
- wrapper = shallowMount(RunnerDetailsApp, {
- propsData: {
- runnerId: mockRunnerId,
- ...props,
- },
- });
- };
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('displays the runner id', () => {
- expect(wrapper.text()).toContain('Runner #55');
- });
-});
diff --git a/spec/frontend/runner/runner_detail/runner_details_app_spec.js b/spec/frontend/runner/runner_detail/runner_details_app_spec.js
new file mode 100644
index 00000000000..c61cb647ae6
--- /dev/null
+++ b/spec/frontend/runner/runner_detail/runner_details_app_spec.js
@@ -0,0 +1,71 @@
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
+import { INSTANCE_TYPE } from '~/runner/constants';
+import getRunnerQuery from '~/runner/graphql/get_runner.query.graphql';
+import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
+
+const mockRunnerId = '55';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('RunnerDetailsApp', () => {
+ let wrapper;
+ let mockRunnerQuery;
+
+ const findRunnerTypeBadge = () => wrapper.findComponent(RunnerTypeBadge);
+
+ const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
+ const handlers = [[getRunnerQuery, mockRunnerQuery]];
+
+ wrapper = mountFn(RunnerDetailsApp, {
+ localVue,
+ apolloProvider: createMockApollo(handlers),
+ propsData: {
+ runnerId: mockRunnerId,
+ ...props,
+ },
+ });
+
+ return waitForPromises();
+ };
+
+ beforeEach(async () => {
+ mockRunnerQuery = jest.fn().mockResolvedValue({
+ data: {
+ runner: {
+ id: `gid://gitlab/Ci::Runner/${mockRunnerId}`,
+ runnerType: INSTANCE_TYPE,
+ __typename: 'CiRunner',
+ },
+ },
+ });
+ });
+
+ afterEach(() => {
+ mockRunnerQuery.mockReset();
+ wrapper.destroy();
+ });
+
+ it('expect GraphQL ID to be requested', async () => {
+ await createComponentWithApollo();
+
+ expect(mockRunnerQuery).toHaveBeenCalledWith({ id: `gid://gitlab/Ci::Runner/${mockRunnerId}` });
+ });
+
+ it('displays the runner id', async () => {
+ await createComponentWithApollo();
+
+ expect(wrapper.text()).toContain('Runner #55');
+ });
+
+ it('displays the runner type', async () => {
+ await createComponentWithApollo({ mountFn: mount });
+
+ expect(findRunnerTypeBadge().text()).toBe('shared');
+ });
+});
diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js
new file mode 100644
index 00000000000..c69e135012e
--- /dev/null
+++ b/spec/frontend/security_configuration/components/feature_card_spec.js
@@ -0,0 +1,245 @@
+import { GlIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import FeatureCard from '~/security_configuration/components/feature_card.vue';
+import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
+import { makeFeature } from './utils';
+
+describe('FeatureCard component', () => {
+ let feature;
+ let wrapper;
+
+ const createComponent = (propsData) => {
+ wrapper = extendedWrapper(
+ mount(FeatureCard, {
+ propsData,
+ stubs: {
+ ManageViaMr: true,
+ },
+ }),
+ );
+ };
+
+ const findLinks = ({ text, href }) =>
+ wrapper.findAll(`a[href="${href}"]`).filter((link) => link.text() === text);
+
+ const findEnableLinks = () =>
+ findLinks({
+ text: `Enable ${feature.shortName ?? feature.name}`,
+ href: feature.configurationPath,
+ });
+ const findConfigureLinks = () =>
+ findLinks({
+ text: `Configure ${feature.shortName ?? feature.name}`,
+ href: feature.configurationPath,
+ });
+ const findManageViaMr = () => wrapper.findComponent(ManageViaMr);
+ const findConfigGuideLinks = () =>
+ findLinks({ text: 'Configuration guide', href: feature.configurationHelpPath });
+
+ const findSecondarySection = () => wrapper.findByTestId('secondary-feature');
+
+ const expectAction = (action) => {
+ const expectEnableAction = action === 'enable';
+ const expectConfigureAction = action === 'configure';
+ const expectCreateMrAction = action === 'create-mr';
+ const expectGuideAction = action === 'guide';
+
+ const enableLinks = findEnableLinks();
+ expect(enableLinks.exists()).toBe(expectEnableAction);
+ if (expectEnableAction) {
+ expect(enableLinks).toHaveLength(1);
+ expect(enableLinks.at(0).props('category')).toBe('primary');
+ }
+
+ const configureLinks = findConfigureLinks();
+ expect(configureLinks.exists()).toBe(expectConfigureAction);
+ if (expectConfigureAction) {
+ expect(configureLinks).toHaveLength(1);
+ expect(configureLinks.at(0).props('category')).toBe('secondary');
+ }
+
+ const manageViaMr = findManageViaMr();
+ expect(manageViaMr.exists()).toBe(expectCreateMrAction);
+ if (expectCreateMrAction) {
+ expect(manageViaMr.props('feature')).toBe(feature);
+ }
+
+ const configGuideLinks = findConfigGuideLinks();
+ expect(configGuideLinks.exists()).toBe(expectGuideAction);
+ if (expectGuideAction) {
+ expect(configGuideLinks).toHaveLength(1);
+ }
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ feature = undefined;
+ });
+
+ describe('basic structure', () => {
+ beforeEach(() => {
+ feature = makeFeature();
+ createComponent({ feature });
+ });
+
+ it('shows the name', () => {
+ expect(wrapper.text()).toContain(feature.name);
+ });
+
+ it('shows the description', () => {
+ expect(wrapper.text()).toContain(feature.description);
+ });
+
+ it('shows the help link', () => {
+ const links = findLinks({ text: 'Learn more', href: feature.helpPath });
+ expect(links.exists()).toBe(true);
+ expect(links).toHaveLength(1);
+ });
+ });
+
+ describe('status', () => {
+ describe.each`
+ context | available | configured | expectedStatus
+ ${'a configured feature'} | ${true} | ${true} | ${'Enabled'}
+ ${'an unconfigured feature'} | ${true} | ${false} | ${'Not enabled'}
+ ${'an available feature with unknown status'} | ${true} | ${undefined} | ${''}
+ ${'an unavailable feature'} | ${false} | ${false} | ${'Available with Ultimate'}
+ ${'an unavailable feature with unknown status'} | ${false} | ${undefined} | ${'Available with Ultimate'}
+ `('given $context', ({ available, configured, expectedStatus }) => {
+ beforeEach(() => {
+ feature = makeFeature({ available, configured });
+ createComponent({ feature });
+ });
+
+ it(`shows the status "${expectedStatus}"`, () => {
+ expect(wrapper.findByTestId('feature-status').text()).toBe(expectedStatus);
+ });
+
+ if (configured) {
+ it('shows a success icon', () => {
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe('check-circle-filled');
+ });
+ }
+ });
+ });
+
+ describe('actions', () => {
+ describe.each`
+ context | available | configured | configurationPath | canEnableByMergeRequest | action
+ ${'unavailable'} | ${false} | ${false} | ${null} | ${false} | ${null}
+ ${'available'} | ${true} | ${false} | ${null} | ${false} | ${'guide'}
+ ${'configured'} | ${true} | ${true} | ${null} | ${false} | ${'guide'}
+ ${'available, can enable by MR'} | ${true} | ${false} | ${null} | ${true} | ${'create-mr'}
+ ${'configured, can enable by MR'} | ${true} | ${true} | ${null} | ${true} | ${'guide'}
+ ${'available with config path'} | ${true} | ${false} | ${'foo'} | ${false} | ${'enable'}
+ ${'available with config path, can enable by MR'} | ${true} | ${false} | ${'foo'} | ${true} | ${'enable'}
+ ${'configured with config path'} | ${true} | ${true} | ${'foo'} | ${false} | ${'configure'}
+ ${'configured with config path, can enable by MR'} | ${true} | ${true} | ${'foo'} | ${true} | ${'configure'}
+ `(
+ 'given $context feature',
+ ({ available, configured, configurationPath, canEnableByMergeRequest, action }) => {
+ beforeEach(() => {
+ feature = makeFeature({
+ available,
+ configured,
+ configurationPath,
+ canEnableByMergeRequest,
+ });
+ createComponent({ feature });
+ });
+
+ it(`shows ${action} action`, () => {
+ expectAction(action);
+ });
+ },
+ );
+ });
+
+ describe('secondary feature', () => {
+ describe('basic structure', () => {
+ describe('given no secondary', () => {
+ beforeEach(() => {
+ feature = makeFeature();
+ createComponent({ feature });
+ });
+
+ it('does not show a secondary feature', () => {
+ expect(findSecondarySection().exists()).toBe(false);
+ });
+ });
+
+ describe('given a secondary', () => {
+ beforeEach(() => {
+ feature = makeFeature({
+ secondary: {
+ name: 'secondary name',
+ description: 'secondary description',
+ configurationText: 'manage secondary',
+ },
+ });
+ createComponent({ feature });
+ });
+
+ it('shows a secondary feature', () => {
+ const secondaryText = findSecondarySection().text();
+ expect(secondaryText).toContain(feature.secondary.name);
+ expect(secondaryText).toContain(feature.secondary.description);
+ });
+ });
+ });
+
+ describe('actions', () => {
+ describe('given available feature with secondary', () => {
+ beforeEach(() => {
+ feature = makeFeature({
+ available: true,
+ secondary: {
+ name: 'secondary name',
+ description: 'secondary description',
+ configurationPath: '/secondary',
+ configurationText: 'manage secondary',
+ },
+ });
+ createComponent({ feature });
+ });
+
+ it('shows the secondary action', () => {
+ const links = findLinks({
+ text: feature.secondary.configurationText,
+ href: feature.secondary.configurationPath,
+ });
+ expect(links.exists()).toBe(true);
+ expect(links).toHaveLength(1);
+ });
+ });
+
+ describe.each`
+ context | available | secondaryConfigPath
+ ${'available feature without config path'} | ${true} | ${null}
+ ${'unavailable feature with config path'} | ${false} | ${'/secondary'}
+ `('given $context', ({ available, secondaryConfigPath }) => {
+ beforeEach(() => {
+ feature = makeFeature({
+ available,
+ secondary: {
+ name: 'secondary name',
+ description: 'secondary description',
+ configurationPath: secondaryConfigPath,
+ configurationText: 'manage secondary',
+ },
+ });
+ createComponent({ feature });
+ });
+
+ it('does not show the secondary action', () => {
+ const links = findLinks({
+ text: feature.secondary.configurationText,
+ href: feature.secondary.configurationPath,
+ });
+ expect(links.exists()).toBe(false);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/security_configuration/components/utils.js b/spec/frontend/security_configuration/components/utils.js
new file mode 100644
index 00000000000..a4e992afb15
--- /dev/null
+++ b/spec/frontend/security_configuration/components/utils.js
@@ -0,0 +1,8 @@
+export const makeFeature = (changes = {}) => ({
+ name: 'Foo Feature',
+ description: 'Lorem ipsum Foo',
+ type: 'foo_scanning',
+ helpPath: '/help/foo',
+ configurationHelpPath: '/help/foo#configure',
+ ...changes,
+});
diff --git a/spec/frontend/security_configuration/configuration_table_spec.js b/spec/frontend/security_configuration/configuration_table_spec.js
index a1789052c92..fbd72265c4b 100644
--- a/spec/frontend/security_configuration/configuration_table_spec.js
+++ b/spec/frontend/security_configuration/configuration_table_spec.js
@@ -1,7 +1,7 @@
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import ConfigurationTable from '~/security_configuration/components/configuration_table.vue';
-import { scanners, UPGRADE_CTA } from '~/security_configuration/components/scanners_constants';
+import { scanners, UPGRADE_CTA } from '~/security_configuration/components/constants';
import {
REPORT_TYPE_SAST,
@@ -12,7 +12,13 @@ describe('Configuration Table Component', () => {
let wrapper;
const createComponent = () => {
- wrapper = extendedWrapper(mount(ConfigurationTable, {}));
+ wrapper = extendedWrapper(
+ mount(ConfigurationTable, {
+ provide: {
+ projectPath: 'testProjectPath',
+ },
+ }),
+ );
};
const findHelpLinks = () => wrapper.findAll('[data-testid="help-link"]');
@@ -30,8 +36,10 @@ describe('Configuration Table Component', () => {
expect(wrapper.text()).toContain(scanner.name);
expect(wrapper.text()).toContain(scanner.description);
if (scanner.type === REPORT_TYPE_SAST) {
- expect(wrapper.findByTestId(scanner.type).text()).toBe('Configure via merge request');
- } else if (scanner.type !== REPORT_TYPE_SECRET_DETECTION) {
+ expect(wrapper.findByTestId(scanner.type).text()).toBe('Configure via Merge Request');
+ } else if (scanner.type === REPORT_TYPE_SECRET_DETECTION) {
+ expect(wrapper.findByTestId(scanner.type).exists()).toBe(false);
+ } else {
expect(wrapper.findByTestId(scanner.type).text()).toMatchInterpolatedText(UPGRADE_CTA);
}
});
diff --git a/spec/frontend/security_configuration/manage_sast_spec.js b/spec/frontend/security_configuration/manage_sast_spec.js
deleted file mode 100644
index 15a57210246..00000000000
--- a/spec/frontend/security_configuration/manage_sast_spec.js
+++ /dev/null
@@ -1,136 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import { redirectTo } from '~/lib/utils/url_utility';
-import ManageSast from '~/security_configuration/components/manage_sast.vue';
-import configureSastMutation from '~/security_configuration/graphql/configure_sast.mutation.graphql';
-
-jest.mock('~/lib/utils/url_utility', () => ({
- redirectTo: jest.fn(),
-}));
-
-Vue.use(VueApollo);
-
-describe('Manage Sast Component', () => {
- let wrapper;
-
- const findButton = () => wrapper.findComponent(GlButton);
- const successHandler = async () => {
- return {
- data: {
- configureSast: {
- successPath: 'testSuccessPath',
- errors: [],
- __typename: 'ConfigureSastPayload',
- },
- },
- };
- };
-
- const noSuccessPathHandler = async () => {
- return {
- data: {
- configureSast: {
- successPath: '',
- errors: [],
- __typename: 'ConfigureSastPayload',
- },
- },
- };
- };
-
- const errorHandler = async () => {
- return {
- data: {
- configureSast: {
- successPath: 'testSuccessPath',
- errors: ['foo'],
- __typename: 'ConfigureSastPayload',
- },
- },
- };
- };
-
- const pendingHandler = () => new Promise(() => {});
-
- function createMockApolloProvider(handler) {
- const requestHandlers = [[configureSastMutation, handler]];
-
- return createMockApollo(requestHandlers);
- }
-
- function createComponent(options = {}) {
- const { mockApollo } = options;
- wrapper = extendedWrapper(
- mount(ManageSast, {
- apolloProvider: mockApollo,
- }),
- );
- }
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('should render Button with correct text', () => {
- createComponent();
- expect(findButton().text()).toContain('Configure via merge request');
- });
-
- describe('given a successful response', () => {
- beforeEach(() => {
- const mockApollo = createMockApolloProvider(successHandler);
- createComponent({ mockApollo });
- });
-
- it('should call redirect helper with correct value', async () => {
- await wrapper.trigger('click');
- await waitForPromises();
- expect(redirectTo).toHaveBeenCalledTimes(1);
- expect(redirectTo).toHaveBeenCalledWith('testSuccessPath');
- // This is done for UX reasons. If the loading prop is set to false
- // on success, then there's a period where the button is clickable
- // again. Instead, we want the button to display a loading indicator
- // for the remainder of the lifetime of the page (i.e., until the
- // browser can start painting the new page it's been redirected to).
- expect(findButton().props().loading).toBe(true);
- });
- });
-
- describe('given a pending response', () => {
- beforeEach(() => {
- const mockApollo = createMockApolloProvider(pendingHandler);
- createComponent({ mockApollo });
- });
-
- it('renders spinner correctly', async () => {
- expect(findButton().props('loading')).toBe(false);
- await wrapper.trigger('click');
- await waitForPromises();
- expect(findButton().props('loading')).toBe(true);
- });
- });
-
- describe.each`
- handler | message
- ${noSuccessPathHandler} | ${'SAST merge request creation mutation failed'}
- ${errorHandler} | ${'foo'}
- `('given an error response', ({ handler, message }) => {
- beforeEach(() => {
- const mockApollo = createMockApolloProvider(handler);
- createComponent({ mockApollo });
- });
-
- it('should catch and emit error', async () => {
- await wrapper.trigger('click');
- await waitForPromises();
- expect(wrapper.emitted('error')).toEqual([[message]]);
- expect(findButton().props('loading')).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/security_configuration/upgrade_spec.js b/spec/frontend/security_configuration/upgrade_spec.js
index 1f0cc795fc5..20bb38aa469 100644
--- a/spec/frontend/security_configuration/upgrade_spec.js
+++ b/spec/frontend/security_configuration/upgrade_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { UPGRADE_CTA } from '~/security_configuration/components/scanners_constants';
+import { UPGRADE_CTA } from '~/security_configuration/components/constants';
import Upgrade from '~/security_configuration/components/upgrade.vue';
const TEST_URL = 'http://www.example.test';
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index 403f9509f84..82fc06e1166 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -23,7 +23,6 @@ describe('SetStatusModalWrapper', () => {
currentEmoji: defaultEmoji,
currentMessage: defaultMessage,
defaultEmoji,
- canSetUserAvailability: true,
};
const createComponent = (props = {}) => {
@@ -278,16 +277,4 @@ describe('SetStatusModalWrapper', () => {
});
});
});
-
- describe('with canSetUserAvailability=false', () => {
- beforeEach(async () => {
- mockEmoji = await initEmojiMock();
- wrapper = createComponent({ canSetUserAvailability: false });
- return initModal();
- });
-
- it('hides the set availability checkbox', () => {
- expect(findAvailabilityCheckbox().exists()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/sidebar/assignees_realtime_spec.js b/spec/frontend/sidebar/assignees_realtime_spec.js
index f0a6fa40d67..ecf33d6de37 100644
--- a/spec/frontend/sidebar/assignees_realtime_spec.js
+++ b/spec/frontend/sidebar/assignees_realtime_spec.js
@@ -1,41 +1,44 @@
-import ActionCable from '@rails/actioncable';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import AssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue';
-import { assigneesQueries } from '~/sidebar/constants';
+import issuableAssigneesSubscription from '~/sidebar/queries/issuable_assignees.subscription.graphql';
import SidebarMediator from '~/sidebar/sidebar_mediator';
-import Mock from './mock_data';
+import getIssueAssigneesQuery from '~/vue_shared/components/sidebar/queries/get_issue_assignees.query.graphql';
+import Mock, { issuableQueryResponse, subscriptionNullResponse } from './mock_data';
-jest.mock('@rails/actioncable', () => {
- const mockConsumer = {
- subscriptions: { create: jest.fn().mockReturnValue({ unsubscribe: jest.fn() }) },
- };
- return {
- createConsumer: jest.fn().mockReturnValue(mockConsumer),
- };
-});
+const localVue = createLocalVue();
+localVue.use(VueApollo);
describe('Assignees Realtime', () => {
let wrapper;
let mediator;
+ let fakeApollo;
+
+ const issuableQueryHandler = jest.fn().mockResolvedValue(issuableQueryResponse);
+ const subscriptionInitialHandler = jest.fn().mockResolvedValue(subscriptionNullResponse);
- const createComponent = (issuableType = 'issue') => {
+ const createComponent = ({
+ issuableType = 'issue',
+ issuableId = 1,
+ subscriptionHandler = subscriptionInitialHandler,
+ } = {}) => {
+ fakeApollo = createMockApollo([
+ [getIssueAssigneesQuery, issuableQueryHandler],
+ [issuableAssigneesSubscription, subscriptionHandler],
+ ]);
wrapper = shallowMount(AssigneesRealtime, {
propsData: {
- issuableIid: '1',
- mediator,
- projectPath: 'path/to/project',
issuableType,
- },
- mocks: {
- $apollo: {
- query: assigneesQueries[issuableType].query,
- queries: {
- workspace: {
- refetch: jest.fn(),
- },
- },
+ issuableId,
+ queryVariables: {
+ issuableIid: '1',
+ projectPath: 'path/to/project',
},
+ mediator,
},
+ apolloProvider: fakeApollo,
+ localVue,
});
};
@@ -45,59 +48,24 @@ describe('Assignees Realtime', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
+ fakeApollo = null;
SidebarMediator.singleton = null;
});
- describe('when handleFetchResult is called from smart query', () => {
- it('sets assignees to the store', () => {
- const data = {
- workspace: {
- issuable: {
- assignees: {
- nodes: [{ id: 'gid://gitlab/Environments/123', avatarUrl: 'url' }],
- },
- },
- },
- };
- const expected = [{ id: 123, avatar_url: 'url', avatarUrl: 'url' }];
- createComponent();
+ it('calls the query with correct variables', () => {
+ createComponent();
- wrapper.vm.handleFetchResult({ data });
-
- expect(mediator.store.assignees).toEqual(expected);
+ expect(issuableQueryHandler).toHaveBeenCalledWith({
+ issuableIid: '1',
+ projectPath: 'path/to/project',
});
});
- describe('when mounted', () => {
- it('calls create subscription', () => {
- const cable = ActionCable.createConsumer();
-
- createComponent();
-
- return wrapper.vm.$nextTick().then(() => {
- expect(cable.subscriptions.create).toHaveBeenCalledTimes(1);
- expect(cable.subscriptions.create).toHaveBeenCalledWith(
- {
- channel: 'IssuesChannel',
- iid: wrapper.props('issuableIid'),
- project_path: wrapper.props('projectPath'),
- },
- { received: wrapper.vm.received },
- );
- });
- });
- });
-
- describe('when subscription is recieved', () => {
- it('refetches the GraphQL project query', () => {
- createComponent();
-
- wrapper.vm.received({ event: 'updated' });
+ it('calls the subscription with correct variable for issue', () => {
+ createComponent();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.$apollo.queries.workspace.refetch).toHaveBeenCalledTimes(1);
- });
+ expect(subscriptionInitialHandler).toHaveBeenCalledWith({
+ issuableId: 'gid://gitlab/Issue/1',
});
});
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
index 824f6d49c65..0e052abffeb 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -1,27 +1,20 @@
import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { cloneDeep } from 'lodash';
import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
-import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphql';
import { IssuableType } from '~/issue_show/constants';
import SidebarAssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue';
import IssuableAssignees from '~/sidebar/components/assignees/issuable_assignees.vue';
import SidebarAssigneesWidget from '~/sidebar/components/assignees/sidebar_assignees_widget.vue';
import SidebarInviteMembers from '~/sidebar/components/assignees/sidebar_invite_members.vue';
import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
-import { ASSIGNEES_DEBOUNCE_DELAY } from '~/sidebar/constants';
-import MultiSelectDropdown from '~/vue_shared/components/sidebar/multiselect_dropdown.vue';
-import getIssueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
+import getIssueAssigneesQuery from '~/vue_shared/components/sidebar/queries/get_issue_assignees.query.graphql';
import updateIssueAssigneesMutation from '~/vue_shared/components/sidebar/queries/update_issue_assignees.mutation.graphql';
-import {
- issuableQueryResponse,
- searchQueryResponse,
- updateIssueAssigneesMutationResponse,
-} from '../../mock_data';
+import UserSelect from '~/vue_shared/components/user_select/user_select.vue';
+import { issuableQueryResponse, updateIssueAssigneesMutationResponse } from '../../mock_data';
jest.mock('~/flash');
@@ -50,31 +43,19 @@ describe('Sidebar assignees widget', () => {
const findAssignees = () => wrapper.findComponent(IssuableAssignees);
const findRealtimeAssignees = () => wrapper.findComponent(SidebarAssigneesRealtime);
const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
- const findDropdown = () => wrapper.findComponent(MultiSelectDropdown);
const findInviteMembersLink = () => wrapper.findComponent(SidebarInviteMembers);
- const findSearchField = () => wrapper.findComponent(GlSearchBoxByType);
-
- const findParticipantsLoading = () => wrapper.find('[data-testid="loading-participants"]');
- const findSelectedParticipants = () => wrapper.findAll('[data-testid="selected-participant"]');
- const findUnselectedParticipants = () =>
- wrapper.findAll('[data-testid="unselected-participant"]');
- const findCurrentUser = () => wrapper.findAll('[data-testid="current-user"]');
- const findUnassignLink = () => wrapper.find('[data-testid="unassign"]');
- const findEmptySearchResults = () => wrapper.find('[data-testid="empty-results"]');
+ const findUserSelect = () => wrapper.findComponent(UserSelect);
const expandDropdown = () => wrapper.vm.$refs.toggle.expand();
const createComponent = ({
- search = '',
issuableQueryHandler = jest.fn().mockResolvedValue(issuableQueryResponse),
- searchQueryHandler = jest.fn().mockResolvedValue(searchQueryResponse),
updateIssueAssigneesMutationHandler = updateIssueAssigneesMutationSuccess,
props = {},
provide = {},
} = {}) => {
fakeApollo = createMockApollo([
- [getIssueParticipantsQuery, issuableQueryHandler],
- [searchUsersQuery, searchQueryHandler],
+ [getIssueAssigneesQuery, issuableQueryHandler],
[updateIssueAssigneesMutation, updateIssueAssigneesMutationHandler],
]);
wrapper = shallowMount(SidebarAssigneesWidget, {
@@ -82,15 +63,11 @@ describe('Sidebar assignees widget', () => {
apolloProvider: fakeApollo,
propsData: {
iid: '1',
+ issuableId: 0,
fullPath: '/mygroup/myProject',
+ allowMultipleAssignees: true,
...props,
},
- data() {
- return {
- search,
- selected: [],
- };
- },
provide: {
canUpdate: true,
rootPath: '/',
@@ -98,7 +75,7 @@ describe('Sidebar assignees widget', () => {
},
stubs: {
SidebarEditableItem,
- MultiSelectDropdown,
+ UserSelect,
GlSearchBoxByType,
GlDropdown,
},
@@ -148,19 +125,6 @@ describe('Sidebar assignees widget', () => {
expect(findEditableItem().props('title')).toBe('Assignee');
});
-
- describe('when expanded', () => {
- it('renders a loading spinner if participants are loading', () => {
- createComponent({
- props: {
- initialAssignees,
- },
- });
- expandDropdown();
-
- expect(findParticipantsLoading().exists()).toBe(true);
- });
- });
});
describe('without passed initial assignees', () => {
@@ -198,7 +162,7 @@ describe('Sidebar assignees widget', () => {
findAssignees().vm.$emit('assign-self');
expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
- assigneeUsernames: 'root',
+ assigneeUsernames: ['root'],
fullPath: '/mygroup/myProject',
iid: '1',
});
@@ -220,7 +184,7 @@ describe('Sidebar assignees widget', () => {
findAssignees().vm.$emit('assign-self');
expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
- assigneeUsernames: 'root',
+ assigneeUsernames: ['root'],
fullPath: '/mygroup/myProject',
iid: '1',
});
@@ -245,16 +209,20 @@ describe('Sidebar assignees widget', () => {
]);
});
- it('renders current user if they are not in participants or assignees', async () => {
- gon.current_username = 'random';
- gon.current_user_fullname = 'Mr Random';
- gon.current_user_avatar_url = '/random';
-
+ it('does not trigger mutation or fire event when editing and exiting without making changes', async () => {
createComponent();
+
await waitForPromises();
- expandDropdown();
- expect(findCurrentUser().exists()).toBe(true);
+ findEditableItem().vm.$emit('open');
+
+ await waitForPromises();
+
+ findEditableItem().vm.$emit('close');
+
+ expect(findEditableItem().props('isDirty')).toBe(false);
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledTimes(0);
+ expect(wrapper.emitted('assignees-updated')).toBe(undefined);
});
describe('when expanded', () => {
@@ -264,27 +232,18 @@ describe('Sidebar assignees widget', () => {
expandDropdown();
});
- it('collapses the widget on multiselect dropdown toggle event', async () => {
- findDropdown().vm.$emit('toggle');
+ it('collapses the widget on user select toggle event', async () => {
+ findUserSelect().vm.$emit('toggle');
await nextTick();
- expect(findDropdown().isVisible()).toBe(false);
+ expect(findUserSelect().isVisible()).toBe(false);
});
- it('renders participants list with correct amount of selected and unselected', async () => {
- expect(findSelectedParticipants()).toHaveLength(1);
- expect(findUnselectedParticipants()).toHaveLength(2);
- });
-
- it('does not render current user if they are in participants', () => {
- expect(findCurrentUser().exists()).toBe(false);
- });
-
- it('unassigns all participants when clicking on `Unassign`', () => {
- findUnassignLink().vm.$emit('click');
+ it('calls an update mutation with correct variables on User Select input event', () => {
+ findUserSelect().vm.$emit('input', [{ username: 'root' }]);
findEditableItem().vm.$emit('close');
expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
- assigneeUsernames: [],
+ assigneeUsernames: ['root'],
fullPath: '/mygroup/myProject',
iid: '1',
});
@@ -293,68 +252,38 @@ describe('Sidebar assignees widget', () => {
describe('when multiselect is disabled', () => {
beforeEach(async () => {
- createComponent({ props: { multipleAssignees: false } });
+ createComponent({ props: { allowMultipleAssignees: false } });
await waitForPromises();
expandDropdown();
});
- it('adds a single assignee when clicking on unselected user', async () => {
- findUnselectedParticipants().at(0).vm.$emit('click');
+ it('closes a dropdown after User Select input event', async () => {
+ findUserSelect().vm.$emit('input', [{ username: 'root' }]);
expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
assigneeUsernames: ['root'],
fullPath: '/mygroup/myProject',
iid: '1',
});
- });
- it('removes an assignee when clicking on selected user', () => {
- findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+ await waitForPromises();
- expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
- assigneeUsernames: [],
- fullPath: '/mygroup/myProject',
- iid: '1',
- });
+ expect(findUserSelect().isVisible()).toBe(false);
});
});
describe('when multiselect is enabled', () => {
beforeEach(async () => {
- createComponent({ props: { multipleAssignees: true } });
+ createComponent({ props: { allowMultipleAssignees: true } });
await waitForPromises();
expandDropdown();
});
- it('adds a few assignees after clicking on unselected users and closing a dropdown', () => {
- findUnselectedParticipants().at(0).vm.$emit('click');
- findUnselectedParticipants().at(1).vm.$emit('click');
- findEditableItem().vm.$emit('close');
-
- expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
- assigneeUsernames: ['francina.skiles', 'root', 'johndoe'],
- fullPath: '/mygroup/myProject',
- iid: '1',
- });
- });
-
- it('removes an assignee when clicking on selected user and then closing dropdown', () => {
- findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
-
- findEditableItem().vm.$emit('close');
-
- expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
- assigneeUsernames: [],
- fullPath: '/mygroup/myProject',
- iid: '1',
- });
- });
-
it('does not call a mutation when clicking on participants until dropdown is closed', () => {
- findUnselectedParticipants().at(0).vm.$emit('click');
- findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+ findUserSelect().vm.$emit('input', [{ username: 'root' }]);
expect(updateIssueAssigneesMutationSuccess).not.toHaveBeenCalled();
+ expect(findUserSelect().isVisible()).toBe(true);
});
});
@@ -363,7 +292,7 @@ describe('Sidebar assignees widget', () => {
await waitForPromises();
expandDropdown();
- findUnassignLink().vm.$emit('click');
+ findUserSelect().vm.$emit('input', []);
findEditableItem().vm.$emit('close');
await waitForPromises();
@@ -372,95 +301,6 @@ describe('Sidebar assignees widget', () => {
message: 'An error occurred while updating assignees.',
});
});
-
- describe('when searching', () => {
- it('does not show loading spinner when debounce timer is still running', async () => {
- createComponent({ search: 'roo' });
- await waitForPromises();
- expandDropdown();
-
- expect(findParticipantsLoading().exists()).toBe(false);
- });
-
- it('shows loading spinner when searching for users', async () => {
- createComponent({ search: 'roo' });
- await waitForPromises();
- expandDropdown();
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
- await nextTick();
-
- expect(findParticipantsLoading().exists()).toBe(true);
- });
-
- it('renders a list of found users and external participants matching search term', async () => {
- const responseCopy = cloneDeep(issuableQueryResponse);
- responseCopy.data.workspace.issuable.participants.nodes.push({
- id: 'gid://gitlab/User/5',
- avatarUrl: '/someavatar',
- name: 'Roodie',
- username: 'roodie',
- webUrl: '/roodie',
- status: null,
- });
-
- const issuableQueryHandler = jest.fn().mockResolvedValue(responseCopy);
-
- createComponent({ issuableQueryHandler });
- await waitForPromises();
- expandDropdown();
-
- findSearchField().vm.$emit('input', 'roo');
- await nextTick();
-
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
- await nextTick();
- await waitForPromises();
-
- expect(findUnselectedParticipants()).toHaveLength(3);
- });
-
- it('renders a list of found users only if no external participants match search term', async () => {
- createComponent({ search: 'roo' });
- await waitForPromises();
- expandDropdown();
- jest.advanceTimersByTime(250);
- await nextTick();
- await waitForPromises();
-
- expect(findUnselectedParticipants()).toHaveLength(2);
- });
-
- it('shows a message about no matches if search returned an empty list', async () => {
- const responseCopy = cloneDeep(searchQueryResponse);
- responseCopy.data.workspace.users.nodes = [];
-
- createComponent({
- search: 'roo',
- searchQueryHandler: jest.fn().mockResolvedValue(responseCopy),
- });
- await waitForPromises();
- expandDropdown();
- jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
- await nextTick();
- await waitForPromises();
-
- expect(findUnselectedParticipants()).toHaveLength(0);
- expect(findEmptySearchResults().exists()).toBe(true);
- });
-
- it('shows an error if search query was rejected', async () => {
- createComponent({ search: 'roo', searchQueryHandler: mockError });
- await waitForPromises();
- expandDropdown();
- jest.advanceTimersByTime(250);
- await nextTick();
- await waitForPromises();
-
- expect(createFlash).toHaveBeenCalledWith({
- message: 'An error occurred while searching users.',
- });
- });
- });
});
describe('when user is not signed in', () => {
@@ -469,11 +309,6 @@ describe('Sidebar assignees widget', () => {
createComponent();
});
- it('does not show current user in the dropdown', () => {
- expandDropdown();
- expect(findCurrentUser().exists()).toBe(false);
- });
-
it('passes signedIn prop as false to IssuableAssignees', () => {
expect(findAssignees().props('signedIn')).toBe(false);
});
@@ -507,17 +342,17 @@ describe('Sidebar assignees widget', () => {
expect(findEditableItem().props('isDirty')).toBe(false);
});
- it('passes truthy `isDirty` prop if selected users list was changed', async () => {
+ it('passes truthy `isDirty` prop after User Select component emitted an input event', async () => {
expandDropdown();
expect(findEditableItem().props('isDirty')).toBe(false);
- findUnselectedParticipants().at(0).vm.$emit('click');
+ findUserSelect().vm.$emit('input', []);
await nextTick();
expect(findEditableItem().props('isDirty')).toBe(true);
});
it('passes falsy `isDirty` prop after dropdown is closed', async () => {
expandDropdown();
- findUnselectedParticipants().at(0).vm.$emit('click');
+ findUserSelect().vm.$emit('input', []);
findEditableItem().vm.$emit('close');
await waitForPromises();
expect(findEditableItem().props('isDirty')).toBe(false);
@@ -530,7 +365,7 @@ describe('Sidebar assignees widget', () => {
expect(findInviteMembersLink().exists()).toBe(false);
});
- it('does not render invite members link if `directlyInviteMembers` and `indirectlyInviteMembers` were not passed', async () => {
+ it('does not render invite members link if `directlyInviteMembers` was not passed', async () => {
createComponent();
await waitForPromises();
expect(findInviteMembersLink().exists()).toBe(false);
@@ -545,14 +380,4 @@ describe('Sidebar assignees widget', () => {
await waitForPromises();
expect(findInviteMembersLink().exists()).toBe(true);
});
-
- it('renders invite members link if `indirectlyInviteMembers` is true', async () => {
- createComponent({
- provide: {
- indirectlyInviteMembers: true,
- },
- });
- await waitForPromises();
- expect(findInviteMembersLink().exists()).toBe(true);
- });
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
index 06f7da3d1ab..cfbe7227915 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
@@ -1,25 +1,14 @@
import { shallowMount } from '@vue/test-utils';
-import InviteMemberModal from '~/invite_member/components/invite_member_modal.vue';
-import InviteMemberTrigger from '~/invite_member/components/invite_member_trigger.vue';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import SidebarInviteMembers from '~/sidebar/components/assignees/sidebar_invite_members.vue';
-const testProjectMembersPath = 'test-path';
-
describe('Sidebar invite members component', () => {
let wrapper;
const findDirectInviteLink = () => wrapper.findComponent(InviteMembersTrigger);
- const findIndirectInviteLink = () => wrapper.findComponent(InviteMemberTrigger);
- const findInviteModal = () => wrapper.findComponent(InviteMemberModal);
- const createComponent = ({ directlyInviteMembers = false } = {}) => {
- wrapper = shallowMount(SidebarInviteMembers, {
- provide: {
- directlyInviteMembers,
- projectMembersPath: testProjectMembersPath,
- },
- });
+ const createComponent = () => {
+ wrapper = shallowMount(SidebarInviteMembers);
};
afterEach(() => {
@@ -28,32 +17,11 @@ describe('Sidebar invite members component', () => {
describe('when directly inviting members', () => {
beforeEach(() => {
- createComponent({ directlyInviteMembers: true });
+ createComponent();
});
it('renders a direct link to project members path', () => {
expect(findDirectInviteLink().exists()).toBe(true);
});
-
- it('does not render invite members trigger and modal components', () => {
- expect(findIndirectInviteLink().exists()).toBe(false);
- expect(findInviteModal().exists()).toBe(false);
- });
- });
-
- describe('when indirectly inviting members', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('does not render a direct link to project members path', () => {
- expect(findDirectInviteLink().exists()).toBe(false);
- });
-
- it('does not render invite members trigger and modal components', () => {
- expect(findIndirectInviteLink().exists()).toBe(true);
- expect(findInviteModal().exists()).toBe(true);
- expect(findInviteModal().props('membersPath')).toBe(testProjectMembersPath);
- });
});
});
diff --git a/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js b/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js
new file mode 100644
index 00000000000..91cbcc6cc27
--- /dev/null
+++ b/spec/frontend/sidebar/components/date/sidebar_date_widget_spec.js
@@ -0,0 +1,183 @@
+import { GlDatepicker } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import SidebarDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue';
+import SidebarFormattedDate from '~/sidebar/components/date/sidebar_formatted_date.vue';
+import SidebarInheritDate from '~/sidebar/components/date/sidebar_inherit_date.vue';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import epicStartDateQuery from '~/sidebar/queries/epic_start_date.query.graphql';
+import issueDueDateQuery from '~/sidebar/queries/issue_due_date.query.graphql';
+import { issuableDueDateResponse, issuableStartDateResponse } from '../../mock_data';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+describe('Sidebar date Widget', () => {
+ let wrapper;
+ let fakeApollo;
+ const date = '2021-04-15';
+
+ const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
+ const findPopoverIcon = () => wrapper.find('[data-testid="inherit-date-popover"]');
+ const findDatePicker = () => wrapper.find(GlDatepicker);
+
+ const createComponent = ({
+ dueDateQueryHandler = jest.fn().mockResolvedValue(issuableDueDateResponse()),
+ startDateQueryHandler = jest.fn().mockResolvedValue(issuableStartDateResponse()),
+ canInherit = false,
+ dateType = undefined,
+ issuableType = 'issue',
+ } = {}) => {
+ fakeApollo = createMockApollo([
+ [issueDueDateQuery, dueDateQueryHandler],
+ [epicStartDateQuery, startDateQueryHandler],
+ ]);
+
+ wrapper = shallowMount(SidebarDateWidget, {
+ apolloProvider: fakeApollo,
+ provide: {
+ canUpdate: true,
+ },
+ propsData: {
+ fullPath: 'group/project',
+ iid: '1',
+ issuableType,
+ canInherit,
+ dateType,
+ },
+ stubs: {
+ SidebarEditableItem,
+ GlDatepicker,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('passes a `loading` prop as true to editable item when query is loading', () => {
+ createComponent();
+
+ expect(findEditableItem().props('loading')).toBe(true);
+ });
+
+ it('dateType is due date by default', () => {
+ createComponent();
+
+ expect(wrapper.text()).toContain('Due date');
+ });
+
+ it('does not display icon popover by default', () => {
+ createComponent();
+
+ expect(findPopoverIcon().exists()).toBe(false);
+ });
+
+ it('does not render GlDatePicker', () => {
+ createComponent();
+
+ expect(findDatePicker().exists()).toBe(false);
+ });
+
+ describe('when issuable has no due date', () => {
+ beforeEach(async () => {
+ createComponent({
+ dueDateQueryHandler: jest.fn().mockResolvedValue(issuableDueDateResponse(null)),
+ });
+ await waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findEditableItem().props('loading')).toBe(false);
+ });
+
+ it('emits `dueDateUpdated` event with a `null` payload', () => {
+ expect(wrapper.emitted('dueDateUpdated')).toEqual([[null]]);
+ });
+ });
+
+ describe('when issue has due date', () => {
+ beforeEach(async () => {
+ createComponent({
+ dueDateQueryHandler: jest.fn().mockResolvedValue(issuableDueDateResponse(date)),
+ });
+ await waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findEditableItem().props('loading')).toBe(false);
+ });
+
+ it('emits `dueDateUpdated` event with the date payload', () => {
+ expect(wrapper.emitted('dueDateUpdated')).toEqual([[date]]);
+ });
+
+ it('uses a correct prop to set the initial date for GlDatePicker', () => {
+ expect(findDatePicker().props()).toMatchObject({
+ value: null,
+ autocomplete: 'off',
+ defaultDate: expect.any(Object),
+ });
+ });
+
+ it('renders GlDatePicker', async () => {
+ expect(findDatePicker().exists()).toBe(true);
+ });
+ });
+
+ it.each`
+ canInherit | component | componentName | expected
+ ${true} | ${SidebarFormattedDate} | ${'SidebarFormattedDate'} | ${false}
+ ${true} | ${SidebarInheritDate} | ${'SidebarInheritDate'} | ${true}
+ ${false} | ${SidebarFormattedDate} | ${'SidebarFormattedDate'} | ${true}
+ ${false} | ${SidebarInheritDate} | ${'SidebarInheritDate'} | ${false}
+ `(
+ 'when canInherit is $canInherit, $componentName display is $expected',
+ ({ canInherit, component, expected }) => {
+ createComponent({ canInherit });
+
+ expect(wrapper.find(component).exists()).toBe(expected);
+ },
+ );
+
+ it('displays a flash message when query is rejected', async () => {
+ createComponent({
+ dueDateQueryHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ });
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+
+ it.each`
+ dateType | text | event | mockedResponse | issuableType | queryHandler
+ ${'dueDate'} | ${'Due date'} | ${'dueDateUpdated'} | ${issuableDueDateResponse} | ${'issue'} | ${'dueDateQueryHandler'}
+ ${'startDate'} | ${'Start date'} | ${'startDateUpdated'} | ${issuableStartDateResponse} | ${'epic'} | ${'startDateQueryHandler'}
+ `(
+ 'when dateType is $dateType, component renders $text and emits $event',
+ async ({ dateType, text, event, mockedResponse, issuableType, queryHandler }) => {
+ createComponent({
+ dateType,
+ issuableType,
+ [queryHandler]: jest.fn().mockResolvedValue(mockedResponse(date)),
+ });
+ await waitForPromises();
+
+ expect(wrapper.text()).toContain(text);
+ expect(wrapper.emitted(event)).toEqual([[date]]);
+ },
+ );
+
+ it('displays icon popover when issuable can inherit date', () => {
+ createComponent({ canInherit: true });
+
+ expect(findPopoverIcon().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js b/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js
new file mode 100644
index 00000000000..1eda4ea977f
--- /dev/null
+++ b/spec/frontend/sidebar/components/date/sidebar_formatted_date_spec.js
@@ -0,0 +1,62 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import SidebarFormattedDate from '~/sidebar/components/date/sidebar_formatted_date.vue';
+
+describe('SidebarFormattedDate', () => {
+ let wrapper;
+ const findFormattedDate = () => wrapper.find("[data-testid='sidebar-date-value']");
+ const findRemoveButton = () => wrapper.find(GlButton);
+
+ const createComponent = ({ hasDate = true } = {}) => {
+ wrapper = shallowMount(SidebarFormattedDate, {
+ provide: {
+ canUpdate: true,
+ },
+ propsData: {
+ formattedDate: 'Apr 15, 2021',
+ hasDate,
+ issuableType: 'issue',
+ resetText: 'remove',
+ isLoading: false,
+ canDelete: true,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays formatted date', () => {
+ expect(findFormattedDate().text()).toBe('Apr 15, 2021');
+ });
+
+ describe('when issue has due date', () => {
+ it('displays remove button', () => {
+ expect(findRemoveButton().exists()).toBe(true);
+ expect(findRemoveButton().children).toEqual(wrapper.props.resetText);
+ });
+
+ it('emits reset-date event on click on remove button', () => {
+ findRemoveButton().vm.$emit('click');
+
+ expect(wrapper.emitted('reset-date')).toEqual([[undefined]]);
+ });
+ });
+
+ describe('when issuable has no due date', () => {
+ beforeEach(() => {
+ createComponent({
+ hasDate: false,
+ });
+ });
+
+ it('does not display remove button', () => {
+ expect(findRemoveButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js b/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js
new file mode 100644
index 00000000000..4d38eba8035
--- /dev/null
+++ b/spec/frontend/sidebar/components/date/sidebar_inherit_date_spec.js
@@ -0,0 +1,53 @@
+import { GlFormRadio } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import SidebarFormattedDate from '~/sidebar/components/date/sidebar_formatted_date.vue';
+import SidebarInheritDate from '~/sidebar/components/date/sidebar_inherit_date.vue';
+
+describe('SidebarInheritDate', () => {
+ let wrapper;
+ const findFixedFormattedDate = () => wrapper.findAll(SidebarFormattedDate).at(0);
+ const findInheritFormattedDate = () => wrapper.findAll(SidebarFormattedDate).at(1);
+ const findFixedRadio = () => wrapper.findAll(GlFormRadio).at(0);
+ const findInheritRadio = () => wrapper.findAll(GlFormRadio).at(1);
+
+ const createComponent = () => {
+ wrapper = shallowMount(SidebarInheritDate, {
+ provide: {
+ canUpdate: true,
+ },
+ propsData: {
+ issuable: {
+ dueDate: '2021-04-15',
+ dueDateIsFixed: true,
+ dueDateFixed: '2021-04-15',
+ dueDateFromMilestones: '2021-05-15',
+ },
+ isLoading: false,
+ dateType: 'dueDate',
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays formatted fixed and inherited dates with radio buttons', () => {
+ expect(wrapper.findAll(SidebarFormattedDate)).toHaveLength(2);
+ expect(wrapper.findAll(GlFormRadio)).toHaveLength(2);
+ expect(findFixedFormattedDate().props('formattedDate')).toBe('Apr 15, 2021');
+ expect(findInheritFormattedDate().props('formattedDate')).toBe('May 15, 2021');
+ expect(findFixedRadio().text()).toBe('Fixed:');
+ expect(findInheritRadio().text()).toBe('Inherited:');
+ });
+
+ it('emits set-date event on click on radio button', () => {
+ findFixedRadio().vm.$emit('input', true);
+
+ expect(wrapper.emitted('set-date')).toEqual([[true]]);
+ });
+});
diff --git a/spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js b/spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js
deleted file mode 100644
index f58ceb0f1be..00000000000
--- a/spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js
+++ /dev/null
@@ -1,106 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
-import SidebarDueDateWidget from '~/sidebar/components/due_date/sidebar_due_date_widget.vue';
-import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
-import issueDueDateQuery from '~/sidebar/queries/issue_due_date.query.graphql';
-import { issueDueDateResponse } from '../../mock_data';
-
-jest.mock('~/flash');
-
-Vue.use(VueApollo);
-
-describe('Sidebar Due date Widget', () => {
- let wrapper;
- let fakeApollo;
- const date = '2021-04-15';
-
- const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
- const findFormattedDueDate = () => wrapper.find("[data-testid='sidebar-duedate-value']");
-
- const createComponent = ({
- dueDateQueryHandler = jest.fn().mockResolvedValue(issueDueDateResponse()),
- } = {}) => {
- fakeApollo = createMockApollo([[issueDueDateQuery, dueDateQueryHandler]]);
-
- wrapper = shallowMount(SidebarDueDateWidget, {
- apolloProvider: fakeApollo,
- provide: {
- fullPath: 'group/project',
- iid: '1',
- canUpdate: true,
- },
- propsData: {
- issuableType: 'issue',
- },
- stubs: {
- SidebarEditableItem,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- fakeApollo = null;
- });
-
- it('passes a `loading` prop as true to editable item when query is loading', () => {
- createComponent();
-
- expect(findEditableItem().props('loading')).toBe(true);
- });
-
- describe('when issue has no due date', () => {
- beforeEach(async () => {
- createComponent({
- dueDateQueryHandler: jest.fn().mockResolvedValue(issueDueDateResponse(null)),
- });
- await waitForPromises();
- });
-
- it('passes a `loading` prop as false to editable item', () => {
- expect(findEditableItem().props('loading')).toBe(false);
- });
-
- it('dueDate is null by default', () => {
- expect(findFormattedDueDate().text()).toBe('None');
- });
-
- it('emits `dueDateUpdated` event with a `null` payload', () => {
- expect(wrapper.emitted('dueDateUpdated')).toEqual([[null]]);
- });
- });
-
- describe('when issue has due date', () => {
- beforeEach(async () => {
- createComponent({
- dueDateQueryHandler: jest.fn().mockResolvedValue(issueDueDateResponse(date)),
- });
- await waitForPromises();
- });
-
- it('passes a `loading` prop as false to editable item', () => {
- expect(findEditableItem().props('loading')).toBe(false);
- });
-
- it('has dueDate', () => {
- expect(findFormattedDueDate().text()).toBe('Apr 15, 2021');
- });
-
- it('emits `dueDateUpdated` event with the date payload', () => {
- expect(wrapper.emitted('dueDateUpdated')).toEqual([[date]]);
- });
- });
-
- it('displays a flash message when query is rejected', async () => {
- createComponent({
- dueDateQueryHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
- });
- await waitForPromises();
-
- expect(createFlash).toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js b/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js
new file mode 100644
index 00000000000..57b9a10b23e
--- /dev/null
+++ b/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js
@@ -0,0 +1,89 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import Participants from '~/sidebar/components/participants/participants.vue';
+import SidebarParticipantsWidget from '~/sidebar/components/participants/sidebar_participants_widget.vue';
+import epicParticipantsQuery from '~/sidebar/queries/epic_participants.query.graphql';
+import { epicParticipantsResponse } from '../../mock_data';
+
+Vue.use(VueApollo);
+
+describe('Sidebar Participants Widget', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findParticipants = () => wrapper.findComponent(Participants);
+
+ const createComponent = ({
+ participantsQueryHandler = jest.fn().mockResolvedValue(epicParticipantsResponse()),
+ } = {}) => {
+ fakeApollo = createMockApollo([[epicParticipantsQuery, participantsQueryHandler]]);
+
+ wrapper = shallowMount(SidebarParticipantsWidget, {
+ apolloProvider: fakeApollo,
+ propsData: {
+ fullPath: 'group',
+ iid: '1',
+ issuableType: 'epic',
+ },
+ stubs: {
+ Participants,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('passes a `loading` prop as true to child component when query is loading', () => {
+ createComponent();
+
+ expect(findParticipants().props('loading')).toBe(true);
+ });
+
+ describe('when participants are loaded', () => {
+ beforeEach(() => {
+ createComponent({
+ participantsQueryHandler: jest.fn().mockResolvedValue(epicParticipantsResponse()),
+ });
+ return waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findParticipants().props('loading')).toBe(false);
+ });
+
+ it('passes participants to child component', () => {
+ expect(findParticipants().props('participants')).toEqual(
+ epicParticipantsResponse().data.workspace.issuable.participants.nodes,
+ );
+ });
+ });
+
+ describe('when error occurs', () => {
+ it('emits error event with correct parameters', async () => {
+ const mockError = new Error('mayday');
+
+ createComponent({
+ participantsQueryHandler: jest.fn().mockRejectedValue(mockError),
+ });
+
+ await waitForPromises();
+
+ const [
+ [
+ {
+ message,
+ error: { networkError },
+ },
+ ],
+ ] = wrapper.emitted('fetch-error');
+ expect(message).toBe(wrapper.vm.$options.i18n.fetchingError);
+ expect(networkError).toEqual(mockError);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
new file mode 100644
index 00000000000..549ab99c6af
--- /dev/null
+++ b/spec/frontend/sidebar/components/subscriptions/sidebar_subscriptions_widget_spec.js
@@ -0,0 +1,131 @@
+import { GlIcon, GlToggle } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import SidebarSubscriptionWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
+import issueSubscribedQuery from '~/sidebar/queries/issue_subscribed.query.graphql';
+import { issueSubscriptionsResponse } from '../../mock_data';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+describe('Sidebar Subscriptions Widget', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
+ const findToggle = () => wrapper.findComponent(GlToggle);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+
+ const createComponent = ({
+ subscriptionsQueryHandler = jest.fn().mockResolvedValue(issueSubscriptionsResponse()),
+ } = {}) => {
+ fakeApollo = createMockApollo([[issueSubscribedQuery, subscriptionsQueryHandler]]);
+
+ wrapper = shallowMount(SidebarSubscriptionWidget, {
+ apolloProvider: fakeApollo,
+ provide: {
+ canUpdate: true,
+ },
+ propsData: {
+ fullPath: 'group/project',
+ iid: '1',
+ issuableType: 'issue',
+ },
+ stubs: {
+ SidebarEditableItem,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('passes a `loading` prop as true to editable item when query is loading', () => {
+ createComponent();
+
+ expect(findEditableItem().props('loading')).toBe(true);
+ });
+
+ describe('when user is not subscribed to the issue', () => {
+ beforeEach(() => {
+ createComponent();
+ return waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findEditableItem().props('loading')).toBe(false);
+ });
+
+ it('toggle is unchecked', () => {
+ expect(findToggle().props('value')).toBe(false);
+ });
+
+ it('emits `subscribedUpdated` event with a `false` payload', () => {
+ expect(wrapper.emitted('subscribedUpdated')).toEqual([[false]]);
+ });
+ });
+
+ describe('when user is subscribed to the issue', () => {
+ beforeEach(() => {
+ createComponent({
+ subscriptionsQueryHandler: jest.fn().mockResolvedValue(issueSubscriptionsResponse(true)),
+ });
+ return waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findEditableItem().props('loading')).toBe(false);
+ });
+
+ it('toggle is checked', () => {
+ expect(findToggle().props('value')).toBe(true);
+ });
+
+ it('emits `subscribedUpdated` event with a `true` payload', () => {
+ expect(wrapper.emitted('subscribedUpdated')).toEqual([[true]]);
+ });
+ });
+
+ describe('when emails are disabled', () => {
+ it('toggle is disabled and off when user is subscribed', async () => {
+ createComponent({
+ subscriptionsQueryHandler: jest
+ .fn()
+ .mockResolvedValue(issueSubscriptionsResponse(true, true)),
+ });
+ await waitForPromises();
+
+ expect(findIcon().props('name')).toBe('notifications-off');
+ expect(findToggle().props('disabled')).toBe(true);
+ });
+
+ it('toggle is disabled and off when user is not subscribed', async () => {
+ createComponent({
+ subscriptionsQueryHandler: jest
+ .fn()
+ .mockResolvedValue(issueSubscriptionsResponse(false, true)),
+ });
+ await waitForPromises();
+
+ expect(findIcon().props('name')).toBe('notifications-off');
+ expect(findToggle().props('disabled')).toBe(true);
+ });
+ });
+
+ it('displays a flash message when query is rejected', async () => {
+ createComponent({
+ subscriptionsQueryHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ });
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/sidebar/components/time_tracking/mock_data.js b/spec/frontend/sidebar/components/time_tracking/mock_data.js
new file mode 100644
index 00000000000..862bcbe861e
--- /dev/null
+++ b/spec/frontend/sidebar/components/time_tracking/mock_data.js
@@ -0,0 +1,102 @@
+export const getIssueTimelogsQueryResponse = {
+ data: {
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/148',
+ title:
+ 'Est perferendis dicta expedita ipsum adipisci laudantium omnis consequatur consequatur et.',
+ timelogs: {
+ nodes: [
+ {
+ __typename: 'Timelog',
+ timeSpent: 14400,
+ user: {
+ name: 'John Doe18',
+ __typename: 'UserCore',
+ },
+ spentAt: '2020-05-01T00:00:00Z',
+ note: {
+ body: 'I paired with @root on this last week.',
+ __typename: 'Note',
+ },
+ },
+ {
+ __typename: 'Timelog',
+ timeSpent: 1800,
+ user: {
+ name: 'Administrator',
+ __typename: 'UserCore',
+ },
+ spentAt: '2021-05-07T13:19:01Z',
+ note: null,
+ },
+ {
+ __typename: 'Timelog',
+ timeSpent: 14400,
+ user: {
+ name: 'Administrator',
+ __typename: 'UserCore',
+ },
+ spentAt: '2021-05-01T00:00:00Z',
+ note: {
+ body: 'I did some work on this last week.',
+ __typename: 'Note',
+ },
+ },
+ ],
+ __typename: 'TimelogConnection',
+ },
+ },
+ },
+};
+
+export const getMrTimelogsQueryResponse = {
+ data: {
+ issuable: {
+ __typename: 'MergeRequest',
+ id: 'gid://gitlab/MergeRequest/29',
+ title: 'Esse amet perspiciatis voluptas et sed praesentium debitis repellat.',
+ timelogs: {
+ nodes: [
+ {
+ __typename: 'Timelog',
+ timeSpent: 1800,
+ user: {
+ name: 'Administrator',
+ __typename: 'UserCore',
+ },
+ spentAt: '2021-05-07T14:44:55Z',
+ note: {
+ body: 'Thirty minutes!',
+ __typename: 'Note',
+ },
+ },
+ {
+ __typename: 'Timelog',
+ timeSpent: 3600,
+ user: {
+ name: 'Administrator',
+ __typename: 'UserCore',
+ },
+ spentAt: '2021-05-07T14:44:39Z',
+ note: null,
+ },
+ {
+ __typename: 'Timelog',
+ timeSpent: 300,
+ user: {
+ name: 'Administrator',
+ __typename: 'UserCore',
+ },
+ spentAt: '2021-03-10T00:00:00Z',
+ note: {
+ body: 'A note with some time',
+ __typename: 'Note',
+ },
+ },
+ ],
+ __typename: 'TimelogConnection',
+ },
+ },
+ },
+};
diff --git a/spec/frontend/sidebar/components/time_tracking/report_spec.js b/spec/frontend/sidebar/components/time_tracking/report_spec.js
new file mode 100644
index 00000000000..0aa5aa2f691
--- /dev/null
+++ b/spec/frontend/sidebar/components/time_tracking/report_spec.js
@@ -0,0 +1,125 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { getAllByRole, getByRole } from '@testing-library/dom';
+import { shallowMount, createLocalVue, mount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import Report from '~/sidebar/components/time_tracking/report.vue';
+import getIssueTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_issue_timelogs.query.graphql';
+import getMrTimelogsQuery from '~/vue_shared/components/sidebar/queries/get_mr_timelogs.query.graphql';
+import { getIssueTimelogsQueryResponse, getMrTimelogsQueryResponse } from './mock_data';
+
+jest.mock('~/flash');
+
+describe('Issuable Time Tracking Report', () => {
+ const localVue = createLocalVue();
+ localVue.use(VueApollo);
+ let wrapper;
+ let fakeApollo;
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const successIssueQueryHandler = jest.fn().mockResolvedValue(getIssueTimelogsQueryResponse);
+ const successMrQueryHandler = jest.fn().mockResolvedValue(getMrTimelogsQueryResponse);
+
+ const mountComponent = ({
+ queryHandler = successIssueQueryHandler,
+ issuableType = 'issue',
+ mountFunction = shallowMount,
+ limitToHours = false,
+ } = {}) => {
+ fakeApollo = createMockApollo([
+ [getIssueTimelogsQuery, queryHandler],
+ [getMrTimelogsQuery, queryHandler],
+ ]);
+ wrapper = mountFunction(Report, {
+ provide: {
+ issuableId: 1,
+ issuableType,
+ },
+ propsData: { limitToHours },
+ localVue,
+ apolloProvider: fakeApollo,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('should render loading spinner', () => {
+ mountComponent();
+
+ expect(findLoadingIcon()).toExist();
+ });
+
+ it('should render error message on reject', async () => {
+ mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+
+ describe('for issue', () => {
+ beforeEach(() => {
+ mountComponent({ mountFunction: mount });
+ });
+
+ it('calls correct query', () => {
+ expect(successIssueQueryHandler).toHaveBeenCalled();
+ });
+
+ it('renders correct results', async () => {
+ await waitForPromises();
+
+ expect(getAllByRole(wrapper.element, 'row', { name: /John Doe18/i })).toHaveLength(1);
+ expect(getAllByRole(wrapper.element, 'row', { name: /Administrator/i })).toHaveLength(2);
+ });
+ });
+
+ describe('for merge request', () => {
+ beforeEach(() => {
+ mountComponent({
+ queryHandler: successMrQueryHandler,
+ issuableType: 'merge_request',
+ mountFunction: mount,
+ });
+ });
+
+ it('calls correct query', () => {
+ expect(successMrQueryHandler).toHaveBeenCalled();
+ });
+
+ it('renders correct results', async () => {
+ await waitForPromises();
+
+ expect(getAllByRole(wrapper.element, 'row', { name: /Administrator/i })).toHaveLength(3);
+ });
+ });
+
+ describe('observes `limit display of time tracking units to hours` setting', () => {
+ describe('when false', () => {
+ beforeEach(() => {
+ mountComponent({ limitToHours: false, mountFunction: mount });
+ });
+
+ it('renders correct results', async () => {
+ await waitForPromises();
+
+ expect(getByRole(wrapper.element, 'columnheader', { name: /1d 30m/i })).not.toBeNull();
+ });
+ });
+
+ describe('when true', () => {
+ beforeEach(() => {
+ mountComponent({ limitToHours: true, mountFunction: mount });
+ });
+
+ it('renders correct results', async () => {
+ await waitForPromises();
+
+ expect(getByRole(wrapper.element, 'columnheader', { name: /8h 30m/i })).not.toBeNull();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
index 4d03aedf1be..f26cdcb8b20 100644
--- a/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
+++ b/spec/frontend/sidebar/components/time_tracking/time_tracker_spec.js
@@ -10,6 +10,7 @@ describe('Issuable Time Tracker', () => {
const findComparisonMeter = () => findByTestId('compareMeter').attributes('title');
const findCollapsedState = () => findByTestId('collapsedState');
const findTimeRemainingProgress = () => findByTestId('timeRemainingProgress');
+ const findReportLink = () => findByTestId('reportLink');
const defaultProps = {
timeEstimate: 10_000, // 2h 46m
@@ -192,6 +193,33 @@ describe('Issuable Time Tracker', () => {
});
});
+ describe('Time tracking report', () => {
+ describe('When no time spent', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ props: {
+ timeSpent: 0,
+ timeSpentHumanReadable: '',
+ },
+ });
+ });
+
+ it('link should not appear', () => {
+ expect(findReportLink().exists()).toBe(false);
+ });
+ });
+
+ describe('When time spent', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ });
+
+ it('link should appear', () => {
+ expect(findReportLink().exists()).toBe(true);
+ });
+ });
+ });
+
describe('Help pane', () => {
const findHelpButton = () => findByTestId('helpButton');
const findCloseHelpButton = () => findByTestId('closeHelpButton');
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 2a4858a6320..b052038661a 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -233,7 +233,7 @@ export const issueConfidentialityResponse = (confidential = false) => ({
},
});
-export const issueDueDateResponse = (dueDate = null) => ({
+export const issuableDueDateResponse = (dueDate = null) => ({
data: {
workspace: {
__typename: 'Project',
@@ -246,59 +246,82 @@ export const issueDueDateResponse = (dueDate = null) => ({
},
});
-export const issueReferenceResponse = (reference) => ({
+export const issuableStartDateResponse = (startDate = null) => ({
data: {
workspace: {
- __typename: 'Project',
+ __typename: 'Group',
issuable: {
- __typename: 'Issue',
- id: 'gid://gitlab/Issue/4',
- reference,
+ __typename: 'Epic',
+ id: 'gid://gitlab/Epic/4',
+ startDate,
+ startDateIsFixed: true,
+ startDateFixed: startDate,
+ startDateFromMilestones: null,
},
},
},
});
-export const issuableQueryResponse = {
+export const epicParticipantsResponse = () => ({
data: {
workspace: {
- __typename: 'Project',
+ __typename: 'Group',
issuable: {
- __typename: 'Issue',
- id: 'gid://gitlab/Issue/1',
- iid: '1',
+ __typename: 'Epic',
+ id: 'gid://gitlab/Epic/4',
participants: {
nodes: [
{
- id: 'gid://gitlab/User/1',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- name: 'Administrator',
- username: 'root',
- webUrl: '/root',
- status: null,
- },
- {
id: 'gid://gitlab/User/2',
avatarUrl:
'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
name: 'Jacki Kub',
username: 'francina.skiles',
webUrl: '/franc',
- status: {
- availability: 'BUSY',
- },
- },
- {
- id: 'gid://gitlab/User/3',
- avatarUrl: '/avatar',
- name: 'John Doe',
- username: 'johndoe',
- webUrl: '/john',
status: null,
},
],
},
+ },
+ },
+ },
+});
+
+export const issueReferenceResponse = (reference) => ({
+ data: {
+ workspace: {
+ __typename: 'Project',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ reference,
+ },
+ },
+ },
+});
+
+export const issueSubscriptionsResponse = (subscribed = false, emailsDisabled = false) => ({
+ data: {
+ workspace: {
+ __typename: 'Project',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ subscribed,
+ emailsDisabled,
+ },
+ },
+ },
+});
+
+export const issuableQueryResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/1',
+ iid: '1',
assignees: {
nodes: [
{
@@ -370,32 +393,121 @@ export const updateIssueAssigneesMutationResponse = {
],
__typename: 'UserConnection',
},
- participants: {
- nodes: [
- {
- __typename: 'User',
- id: 'gid://gitlab/User/1',
+ __typename: 'Issue',
+ },
+ },
+ },
+};
+
+export const subscriptionNullResponse = {
+ data: {
+ issuableAssigneesUpdated: null,
+ },
+};
+
+const mockUser1 = {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+};
+
+const mockUser2 = {
+ id: 'gid://gitlab/User/4',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+};
+
+export const searchResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ users: {
+ nodes: [
+ {
+ user: mockUser1,
+ },
+ {
+ user: mockUser2,
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const projectMembersResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ users: {
+ nodes: [
+ // Remove nulls https://gitlab.com/gitlab-org/gitlab/-/issues/329750
+ null,
+ null,
+ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
+ mockUser1,
+ mockUser1,
+ mockUser2,
+ {
+ user: {
+ id: 'gid://gitlab/User/2',
avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- name: 'Administrator',
- username: 'root',
- webUrl: '/root',
- status: null,
+ 'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
+ name: 'Jacki Kub',
+ username: 'francina.skiles',
+ webUrl: '/franc',
+ status: {
+ availability: 'BUSY',
+ },
},
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const participantsQueryResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/1',
+ iid: '1',
+ participants: {
+ nodes: [
+ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
+ mockUser1,
+ mockUser1,
{
- __typename: 'User',
id: 'gid://gitlab/User/2',
avatarUrl:
'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
name: 'Jacki Kub',
username: 'francina.skiles',
webUrl: '/franc',
+ status: {
+ availability: 'BUSY',
+ },
+ },
+ {
+ id: 'gid://gitlab/User/3',
+ avatarUrl: '/avatar',
+ name: 'John Doe',
+ username: 'rollie',
+ webUrl: '/john',
status: null,
},
],
- __typename: 'UserConnection',
},
- __typename: 'Issue',
},
},
},
diff --git a/spec/frontend/sidebar/sidebar_assignees_spec.js b/spec/frontend/sidebar/sidebar_assignees_spec.js
index e737b57e33d..dc121dcb897 100644
--- a/spec/frontend/sidebar/sidebar_assignees_spec.js
+++ b/spec/frontend/sidebar/sidebar_assignees_spec.js
@@ -17,6 +17,7 @@ describe('sidebar assignees', () => {
wrapper = shallowMount(SidebarAssignees, {
propsData: {
issuableIid: '1',
+ issuableId: 1,
mediator,
field: '',
projectPath: 'projectPath',
diff --git a/spec/frontend/sidebar/sidebar_subscriptions_spec.js b/spec/frontend/sidebar/sidebar_subscriptions_spec.js
deleted file mode 100644
index d900fde7e70..00000000000
--- a/spec/frontend/sidebar/sidebar_subscriptions_spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import SidebarSubscriptions from '~/sidebar/components/subscriptions/sidebar_subscriptions.vue';
-import SidebarService from '~/sidebar/services/sidebar_service';
-import SidebarMediator from '~/sidebar/sidebar_mediator';
-import SidebarStore from '~/sidebar/stores/sidebar_store';
-import Mock from './mock_data';
-
-describe('Sidebar Subscriptions', () => {
- let wrapper;
- let mediator;
-
- beforeEach(() => {
- mediator = new SidebarMediator(Mock.mediator);
- wrapper = shallowMount(SidebarSubscriptions, {
- propsData: {
- mediator,
- },
- });
- });
-
- afterEach(() => {
- wrapper.destroy();
- SidebarService.singleton = null;
- SidebarStore.singleton = null;
- SidebarMediator.singleton = null;
- });
-
- it('calls the mediator toggleSubscription on event', () => {
- const spy = jest.spyOn(mediator, 'toggleSubscription').mockReturnValue(Promise.resolve());
-
- wrapper.vm.onToggleSubscription();
-
- expect(spy).toHaveBeenCalled();
- spy.mockRestore();
- });
-});
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index 8bc65c6ce31..8d64e1799b8 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -55,7 +55,7 @@ export const mergeRequestTemplates = [
export const submitChangesError = 'Could not save changes';
export const commitBranchResponse = {
- web_url: '/tree/root-master-patch-88195',
+ web_url: '/tree/root-main-patch-88195',
};
export const commitMultipleResponse = {
short_id: 'ed899a2f4b5',
@@ -84,8 +84,8 @@ export const mounts = [
},
];
-export const branch = 'master';
+export const branch = 'main';
-export const baseUrl = '/user1/project1/-/sse/master%2Ftest.md';
+export const baseUrl = '/user1/project1/-/sse/main%2Ftest.md';
export const imageRoot = 'source/images/';
diff --git a/spec/frontend/static_site_editor/pages/home_spec.js b/spec/frontend/static_site_editor/pages/home_spec.js
index 0936ba3011c..eb056469603 100644
--- a/spec/frontend/static_site_editor/pages/home_spec.js
+++ b/spec/frontend/static_site_editor/pages/home_spec.js
@@ -275,6 +275,7 @@ describe('static_site_editor/pages/home', () => {
formattedMarkdown,
project,
sourcePath,
+ targetBranch: branch,
username,
images,
mergeRequestMeta,
diff --git a/spec/frontend/static_site_editor/services/generate_branch_name_spec.js b/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
index 0624fc3b7b4..7e437506a16 100644
--- a/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
+++ b/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
@@ -1,7 +1,7 @@
-import { DEFAULT_TARGET_BRANCH, BRANCH_SUFFIX_COUNT } from '~/static_site_editor/constants';
+import { BRANCH_SUFFIX_COUNT } from '~/static_site_editor/constants';
import generateBranchName from '~/static_site_editor/services/generate_branch_name';
-import { username } from '../mock_data';
+import { username, branch as targetBranch } from '../mock_data';
describe('generateBranchName', () => {
const timestamp = 12345678901234;
@@ -11,11 +11,11 @@ describe('generateBranchName', () => {
});
it('generates a name that includes the username and target branch', () => {
- expect(generateBranchName(username)).toMatch(`${username}-${DEFAULT_TARGET_BRANCH}`);
+ expect(generateBranchName(username, targetBranch)).toMatch(`${username}-${targetBranch}`);
});
it(`adds the first ${BRANCH_SUFFIX_COUNT} numbers of the current timestamp`, () => {
- expect(generateBranchName(username)).toMatch(
+ expect(generateBranchName(username, targetBranch)).toMatch(
timestamp.toString().substring(BRANCH_SUFFIX_COUNT),
);
});
diff --git a/spec/frontend/static_site_editor/services/renderers/render_image_spec.js b/spec/frontend/static_site_editor/services/renderers/render_image_spec.js
index e9e40835982..d3298aa0b26 100644
--- a/spec/frontend/static_site_editor/services/renderers/render_image_spec.js
+++ b/spec/frontend/static_site_editor/services/renderers/render_image_spec.js
@@ -47,11 +47,11 @@ describe('rich_content_editor/renderers/render_image', () => {
it.each`
destination | isAbsolute | src
${'http://test.host/absolute/path/to/image.png'} | ${true} | ${'http://test.host/absolute/path/to/image.png'}
- ${'/relative/path/to/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/master/default/source/relative/path/to/image.png'}
- ${'/target/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/master/source/with/target/image.png'}
- ${'relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/master/relative/to/current/image.png'}
- ${'./relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/master/./relative/to/current/image.png'}
- ${'../relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/master/../relative/to/current/image.png'}
+ ${'/relative/path/to/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/default/source/relative/path/to/image.png'}
+ ${'/target/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/source/with/target/image.png'}
+ ${'relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/relative/to/current/image.png'}
+ ${'./relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/./relative/to/current/image.png'}
+ ${'../relative/to/current/image.png'} | ${false} | ${'http://test.host/user1/project1/-/raw/main/../relative/to/current/image.png'}
`('returns an image with the correct attributes', ({ destination, isAbsolute, src }) => {
node.destination = destination;
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index d4cbc5d235e..d9bceb76a37 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -3,7 +3,6 @@ import Api from '~/api';
import { convertObjectPropsToSnakeCase } from '~/lib/utils/common_utils';
import {
- DEFAULT_TARGET_BRANCH,
SUBMIT_CHANGES_BRANCH_ERROR,
SUBMIT_CHANGES_COMMIT_ERROR,
SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
@@ -25,6 +24,7 @@ import {
createMergeRequestResponse,
mergeRequestMeta,
sourcePath,
+ branch as targetBranch,
sourceContentYAML as content,
trackingCategory,
images,
@@ -33,7 +33,7 @@ import {
jest.mock('~/static_site_editor/services/generate_branch_name');
describe('submitContentChanges', () => {
- const branch = 'branch-name';
+ const sourceBranch = 'branch-name';
let trackingSpy;
let origPage;
@@ -41,6 +41,7 @@ describe('submitContentChanges', () => {
username,
projectId,
sourcePath,
+ targetBranch,
content,
images,
mergeRequestMeta,
@@ -54,7 +55,7 @@ describe('submitContentChanges', () => {
.spyOn(Api, 'createProjectMergeRequest')
.mockResolvedValue({ data: createMergeRequestResponse });
- generateBranchName.mockReturnValue(branch);
+ generateBranchName.mockReturnValue(sourceBranch);
origPage = document.body.dataset.page;
document.body.dataset.page = trackingCategory;
@@ -69,8 +70,8 @@ describe('submitContentChanges', () => {
it('creates a branch named after the username and target branch', () => {
return submitContentChanges(buildPayload()).then(() => {
expect(Api.createBranch).toHaveBeenCalledWith(projectId, {
- ref: DEFAULT_TARGET_BRANCH,
- branch,
+ ref: targetBranch,
+ branch: sourceBranch,
});
});
});
@@ -86,7 +87,7 @@ describe('submitContentChanges', () => {
describe('committing markdown formatting changes', () => {
const formattedMarkdown = `formatted ${content}`;
const commitPayload = {
- branch,
+ branch: sourceBranch,
commit_message: `${DEFAULT_FORMATTING_CHANGES_COMMIT_MESSAGE}\n\n${DEFAULT_FORMATTING_CHANGES_COMMIT_DESCRIPTION}`,
actions: [
{
@@ -116,7 +117,7 @@ describe('submitContentChanges', () => {
it('commits the content changes to the branch when creating branch succeeds', () => {
return submitContentChanges(buildPayload()).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
- branch,
+ branch: sourceBranch,
commit_message: mergeRequestMeta.title,
actions: [
{
@@ -140,7 +141,7 @@ describe('submitContentChanges', () => {
const payload = buildPayload({ content: contentWithoutImages });
return submitContentChanges(payload).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
- branch,
+ branch: sourceBranch,
commit_message: mergeRequestMeta.title,
actions: [
{
@@ -169,8 +170,8 @@ describe('submitContentChanges', () => {
convertObjectPropsToSnakeCase({
title,
description,
- targetBranch: DEFAULT_TARGET_BRANCH,
- sourceBranch: branch,
+ targetBranch,
+ sourceBranch,
}),
);
});
@@ -194,7 +195,7 @@ describe('submitContentChanges', () => {
});
it('returns the branch name', () => {
- expect(result).toMatchObject({ branch: { label: branch } });
+ expect(result).toMatchObject({ branch: { label: sourceBranch } });
});
it('returns commit short id and web url', () => {
diff --git a/spec/frontend/task_list_spec.js b/spec/frontend/task_list_spec.js
index b6ac3167fea..2d7a735bd11 100644
--- a/spec/frontend/task_list_spec.js
+++ b/spec/frontend/task_list_spec.js
@@ -16,7 +16,20 @@ describe('TaskList', () => {
beforeEach(() => {
setFixtures(`
<div class="task-list">
- <div class="js-task-list-container"></div>
+ <div class="js-task-list-container">
+ <ul data-sourcepos="5:1-5:11" class="task-list" dir="auto">
+ <li data-sourcepos="5:1-5:11" class="task-list-item enabled">
+ <input type="checkbox" class="task-list-item-checkbox" checked=""> markdown task
+ </li>
+ </ul>
+
+ <ul class="task-list" dir="auto">
+ <li class="task-list-item enabled">
+ <input type="checkbox" class="task-list-item-checkbox"> hand-coded checkbox
+ </li>
+ </ul>
+ <textarea class="hidden js-task-list-field"></textarea>
+ </div>
</div>
`);
@@ -59,32 +72,47 @@ describe('TaskList', () => {
describe('disableTaskListItems', () => {
it('should call taskList method with disable param', () => {
- jest.spyOn($.prototype, 'taskList').mockImplementation(() => {});
+ taskList.disableTaskListItems();
- taskList.disableTaskListItems({ currentTarget });
-
- expect(currentTarget.taskList).toHaveBeenCalledWith('disable');
+ expect(document.querySelectorAll('.task-list-item input:disabled').length).toEqual(2);
});
});
describe('enableTaskListItems', () => {
- it('should call taskList method with enable param', () => {
- jest.spyOn($.prototype, 'taskList').mockImplementation(() => {});
+ it('should enable markdown tasks and disable non-markdown tasks', () => {
+ taskList.disableTaskListItems();
+ taskList.enableTaskListItems();
+
+ expect(document.querySelectorAll('.task-list-item input:enabled').length).toEqual(1);
+ expect(document.querySelectorAll('.task-list-item input:disabled').length).toEqual(1);
+ });
+ });
+
+ describe('enable', () => {
+ it('should enable task list items and on document event', () => {
+ jest.spyOn($.prototype, 'on').mockImplementation(() => {});
+
+ taskList.enable();
- taskList.enableTaskListItems({ currentTarget });
+ expect(document.querySelectorAll('.task-list-item input:enabled').length).toEqual(1);
+ expect(document.querySelectorAll('.task-list-item input:disabled').length).toEqual(1);
- expect(currentTarget.taskList).toHaveBeenCalledWith('enable');
+ expect($(document).on).toHaveBeenCalledWith(
+ 'tasklist:changed',
+ taskList.taskListContainerSelector,
+ taskList.updateHandler,
+ );
});
});
describe('disable', () => {
it('should disable task list items and off document event', () => {
- jest.spyOn(taskList, 'disableTaskListItems').mockImplementation(() => {});
jest.spyOn($.prototype, 'off').mockImplementation(() => {});
taskList.disable();
- expect(taskList.disableTaskListItems).toHaveBeenCalled();
+ expect(document.querySelectorAll('.task-list-item input:disabled').length).toEqual(2);
+
expect($(document).off).toHaveBeenCalledWith(
'tasklist:changed',
taskList.taskListContainerSelector,
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index 2c7bcaa98b0..dd4c8198b72 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -155,6 +155,32 @@ describe('Tracking', () => {
});
});
+ describe('.enableFormTracking', () => {
+ it('tells snowplow to enable form tracking', () => {
+ const config = { forms: { whitelist: [''] }, fields: { whitelist: [''] } };
+ Tracking.enableFormTracking(config, ['_passed_context_']);
+
+ expect(snowplowSpy).toHaveBeenCalledWith('enableFormTracking', config, [
+ { data: { source: 'gitlab-javascript' }, schema: undefined },
+ '_passed_context_',
+ ]);
+ });
+
+ it('throws an error if no whitelist rules are provided', () => {
+ const expectedError = new Error(
+ 'Unable to enable form event tracking without whitelist rules.',
+ );
+
+ expect(() => Tracking.enableFormTracking()).toThrow(expectedError);
+ expect(() => Tracking.enableFormTracking({ fields: { whitelist: [] } })).toThrow(
+ expectedError,
+ );
+ expect(() => Tracking.enableFormTracking({ fields: { whitelist: [1] } })).not.toThrow(
+ expectedError,
+ );
+ });
+ });
+
describe('.flushPendingEvents', () => {
it('flushes any pending events', () => {
Tracking.initialized = false;
diff --git a/spec/frontend/users_select/index_spec.js b/spec/frontend/users_select/index_spec.js
index 5b07087b76c..99caaf61c54 100644
--- a/spec/frontend/users_select/index_spec.js
+++ b/spec/frontend/users_select/index_spec.js
@@ -1,145 +1,33 @@
-import { waitFor } from '@testing-library/dom';
-import MockAdapter from 'axios-mock-adapter';
-import { cloneDeep } from 'lodash';
-import { getJSONFixture } from 'helpers/fixtures';
-import axios from '~/lib/utils/axios_utils';
-import UsersSelect from '~/users_select';
-
-// TODO: generate this from a fixture that guarantees the same output in CE and EE [(see issue)][1].
-// Hardcoding this HTML temproarily fixes a FOSS ~"master::broken" [(see issue)][2].
-// [1]: https://gitlab.com/gitlab-org/gitlab/-/issues/327809
-// [2]: https://gitlab.com/gitlab-org/gitlab/-/issues/327805
-const getUserSearchHTML = () => `
-<div class="js-sidebar-assignee-data selectbox hide-collapsed">
-<input type="hidden" name="merge_request[assignee_ids][]" value="0">
-<div class="dropdown js-sidebar-assignee-dropdown">
-<button class="dropdown-menu-toggle js-user-search js-author-search js-multiselect js-save-user-data js-invite-members-track" type="button" data-first-user="frontend-fixtures" data-current-user="true" data-iid="1" data-issuable-type="merge_request" data-project-id="1" data-author-id="1" data-field-name="merge_request[assignee_ids][]" data-issue-update="http://test.host/frontend-fixtures/merge-requests-project/-/merge_requests/1.json" data-ability-name="merge_request" data-null-user="true" data-display="static" data-multi-select="true" data-dropdown-title="Select assignee(s)" data-dropdown-header="Assignee(s)" data-track-event="show_invite_members" data-toggle="dropdown"><span class="dropdown-toggle-text ">Select assignee(s)</span><svg class="s16 dropdown-menu-toggle-icon gl-top-3" data-testid="chevron-down-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#chevron-down"></use></svg></button><div class="dropdown-menu dropdown-select dropdown-menu-user dropdown-menu-selectable dropdown-menu-author dropdown-extended-height">
-<div class="dropdown-title gl-display-flex">
-<span class="gl-ml-auto">Assign to</span><button class="dropdown-title-button dropdown-menu-close gl-ml-auto" aria-label="Close" type="button"><svg class="s16 dropdown-menu-close-icon" data-testid="close-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#close"></use></svg></button>
-</div>
-<div class="dropdown-input">
-<input type="search" id="" data-qa-selector="dropdown_input_field" class="dropdown-input-field" placeholder="Search users" autocomplete="off"><svg class="s16 dropdown-input-search" data-testid="search-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#search"></use></svg><svg class="s16 dropdown-input-clear js-dropdown-input-clear" data-testid="close-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#close"></use></svg>
-</div>
-<div data-qa-selector="dropdown_list_content" class="dropdown-content "></div>
-<div class="dropdown-footer">
-<ul class="dropdown-footer-list">
-<li>
-<div class="js-invite-members-trigger" data-display-text="Invite Members" data-event="click_invite_members" data-label="edit_assignee" data-trigger-element="anchor"></div>
-</li>
-</ul>
-</div>
-<div class="dropdown-loading"><div class="gl-spinner-container"><span class="gl-spinner gl-spinner-orange gl-spinner-md gl-mt-7" aria-label="Loading"></span></div></div>
-</div>
-</div>
-</div>
-`;
-
-const USER_SEARCH_HTML = getUserSearchHTML();
-const AUTOCOMPLETE_USERS = getJSONFixture('autocomplete/users.json');
+import {
+ createInputsModelExpectation,
+ createUnassignedExpectation,
+ createAssignedExpectation,
+ createTestContext,
+ findDropdownItemsModel,
+ findDropdownItem,
+ findAssigneesInputsModel,
+ getUsersFixtureAt,
+ setAssignees,
+ toggleDropdown,
+ waitForDropdownItems,
+} from './test_helper';
describe('~/users_select/index', () => {
- let subject;
- let mock;
-
- const createSubject = (currentUser = null) => {
- if (subject) {
- throw new Error('test subject is already created');
- }
-
- subject = new UsersSelect(currentUser);
- };
-
- // finders -------------------------------------------------------------------
- const findAssigneesInputs = () =>
- document.querySelectorAll('input[name="merge_request[assignee_ids][]');
- const findAssigneesInputsModel = () =>
- Array.from(findAssigneesInputs()).map((input) => ({
- value: input.value,
- dataset: { ...input.dataset },
- }));
- const findUserSearchButton = () => document.querySelector('.js-user-search');
- const findDropdownItem = ({ id }) => document.querySelector(`li[data-user-id="${id}"] a`);
- const findDropdownItemsModel = () =>
- Array.from(document.querySelectorAll('.dropdown-content li')).map((el) => {
- if (el.classList.contains('divider')) {
- return {
- type: 'divider',
- };
- } else if (el.classList.contains('dropdown-header')) {
- return {
- type: 'dropdown-header',
- text: el.textContent,
- };
- }
-
- return {
- type: 'user',
- userId: el.dataset.userId,
- };
- });
-
- // arrange/act helpers -------------------------------------------------------
- const setAssignees = (...users) => {
- findAssigneesInputs().forEach((x) => x.remove());
-
- const container = document.querySelector('.js-sidebar-assignee-data');
-
- container.prepend(
- ...users.map((user) => {
- const input = document.createElement('input');
- input.name = 'merge_request[assignee_ids][]';
- input.value = user.id.toString();
- input.setAttribute('data-avatar-url', user.avatar_url);
- input.setAttribute('data-name', user.name);
- input.setAttribute('data-username', user.username);
- input.setAttribute('data-can-merge', user.can_merge);
- return input;
- }),
- );
- };
- const toggleDropdown = () => findUserSearchButton().click();
- const waitForDropdownItems = () =>
- waitFor(() => expect(findDropdownItem(AUTOCOMPLETE_USERS[0])).not.toBeNull());
-
- // assertion helpers ---------------------------------------------------------
- const createUnassignedExpectation = () => {
- return [
- { type: 'user', userId: '0' },
- { type: 'divider' },
- ...AUTOCOMPLETE_USERS.map((x) => ({ type: 'user', userId: x.id.toString() })),
- ];
- };
- const createAssignedExpectation = (...selectedUsers) => {
- const selectedIds = new Set(selectedUsers.map((x) => x.id));
- const unselectedUsers = AUTOCOMPLETE_USERS.filter((x) => !selectedIds.has(x.id));
-
- return [
- { type: 'user', userId: '0' },
- { type: 'divider' },
- { type: 'dropdown-header', text: 'Assignee(s)' },
- ...selectedUsers.map((x) => ({ type: 'user', userId: x.id.toString() })),
- { type: 'divider' },
- ...unselectedUsers.map((x) => ({ type: 'user', userId: x.id.toString() })),
- ];
- };
+ const context = createTestContext({
+ fixturePath: 'merge_requests/merge_request_with_single_assignee_feature.html',
+ });
beforeEach(() => {
- const rootEl = document.createElement('div');
- rootEl.innerHTML = USER_SEARCH_HTML;
- document.body.appendChild(rootEl);
-
- mock = new MockAdapter(axios);
- mock.onGet('/-/autocomplete/users.json').reply(200, cloneDeep(AUTOCOMPLETE_USERS));
+ context.setup();
});
afterEach(() => {
- document.body.innerHTML = '';
- subject = null;
+ context.teardown();
});
describe('when opened', () => {
beforeEach(async () => {
- createSubject();
+ context.createSubject();
toggleDropdown();
await waitForDropdownItems();
@@ -150,8 +38,12 @@ describe('~/users_select/index', () => {
});
describe('when users are selected', () => {
- const selectedUsers = [AUTOCOMPLETE_USERS[2], AUTOCOMPLETE_USERS[4]];
- const expectation = createAssignedExpectation(...selectedUsers);
+ const selectedUsers = [getUsersFixtureAt(2), getUsersFixtureAt(4)];
+ const lastSelected = selectedUsers[selectedUsers.length - 1];
+ const expectation = createAssignedExpectation({
+ header: 'Assignee',
+ assigned: [lastSelected],
+ });
beforeEach(() => {
selectedUsers.forEach((user) => {
@@ -163,42 +55,22 @@ describe('~/users_select/index', () => {
expect(findDropdownItemsModel()).toEqual(expectation);
});
- it('shows assignee even after close and open', () => {
- toggleDropdown();
- toggleDropdown();
-
- expect(findDropdownItemsModel()).toEqual(expectation);
- });
-
it('updates field', () => {
- expect(findAssigneesInputsModel()).toEqual(
- selectedUsers.map((user) => ({
- value: user.id.toString(),
- dataset: {
- approved: user.approved.toString(),
- avatar_url: user.avatar_url,
- can_merge: user.can_merge.toString(),
- can_update_merge_request: user.can_update_merge_request.toString(),
- id: user.id.toString(),
- name: user.name,
- show_status: user.show_status.toString(),
- state: user.state,
- username: user.username,
- web_url: user.web_url,
- },
- })),
- );
+ expect(findAssigneesInputsModel()).toEqual(createInputsModelExpectation([lastSelected]));
});
});
});
describe('with preselected user and opened', () => {
- const expectation = createAssignedExpectation(AUTOCOMPLETE_USERS[0]);
+ const expectation = createAssignedExpectation({
+ header: 'Assignee',
+ assigned: [getUsersFixtureAt(0)],
+ });
beforeEach(async () => {
- setAssignees(AUTOCOMPLETE_USERS[0]);
+ setAssignees(getUsersFixtureAt(0));
- createSubject();
+ context.createSubject();
toggleDropdown();
await waitForDropdownItems();
diff --git a/spec/frontend/users_select/test_helper.js b/spec/frontend/users_select/test_helper.js
new file mode 100644
index 00000000000..c5adbe9bb09
--- /dev/null
+++ b/spec/frontend/users_select/test_helper.js
@@ -0,0 +1,152 @@
+import MockAdapter from 'axios-mock-adapter';
+import { memoize, cloneDeep } from 'lodash';
+import { getFixture, getJSONFixture } from 'helpers/fixtures';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import UsersSelect from '~/users_select';
+
+// fixtures -------------------------------------------------------------------
+const getUserSearchHTML = memoize((fixturePath) => {
+ const html = getFixture(fixturePath);
+ const parser = new DOMParser();
+
+ const el = parser.parseFromString(html, 'text/html').querySelector('.assignee');
+
+ return el.outerHTML;
+});
+
+const getUsersFixture = memoize(() => getJSONFixture('autocomplete/users.json'));
+
+export const getUsersFixtureAt = (idx) => getUsersFixture()[idx];
+
+// test context ---------------------------------------------------------------
+export const createTestContext = ({ fixturePath }) => {
+ let mock = null;
+ let subject = null;
+
+ const setup = () => {
+ const rootEl = document.createElement('div');
+ rootEl.innerHTML = getUserSearchHTML(fixturePath);
+ document.body.appendChild(rootEl);
+
+ mock = new MockAdapter(axios);
+ mock.onGet('/-/autocomplete/users.json').reply(200, cloneDeep(getUsersFixture()));
+ };
+
+ const teardown = () => {
+ mock.restore();
+ document.body.innerHTML = '';
+ subject = null;
+ };
+
+ const createSubject = () => {
+ if (subject) {
+ throw new Error('test subject is already created');
+ }
+
+ subject = new UsersSelect(null);
+ };
+
+ return {
+ setup,
+ teardown,
+ createSubject,
+ };
+};
+
+// finders -------------------------------------------------------------------
+export const findAssigneesInputs = () =>
+ document.querySelectorAll('input[name="merge_request[assignee_ids][]');
+export const findAssigneesInputsModel = () =>
+ Array.from(findAssigneesInputs()).map((input) => ({
+ value: input.value,
+ dataset: { ...input.dataset },
+ }));
+export const findUserSearchButton = () => document.querySelector('.js-user-search');
+export const findDropdownItem = ({ id }) => document.querySelector(`li[data-user-id="${id}"] a`);
+export const findDropdownItemsModel = () =>
+ Array.from(document.querySelectorAll('.dropdown-content li')).map((el) => {
+ if (el.classList.contains('divider')) {
+ return {
+ type: 'divider',
+ };
+ } else if (el.classList.contains('dropdown-header')) {
+ return {
+ type: 'dropdown-header',
+ text: el.textContent,
+ };
+ }
+
+ return {
+ type: 'user',
+ userId: el.dataset.userId,
+ };
+ });
+
+// arrange/act helpers -------------------------------------------------------
+export const setAssignees = (...users) => {
+ findAssigneesInputs().forEach((x) => x.remove());
+
+ const container = document.querySelector('.js-sidebar-assignee-data');
+
+ container.prepend(
+ ...users.map((user) => {
+ const input = document.createElement('input');
+ input.name = 'merge_request[assignee_ids][]';
+ input.value = user.id.toString();
+ input.setAttribute('data-avatar-url', user.avatar_url);
+ input.setAttribute('data-name', user.name);
+ input.setAttribute('data-username', user.username);
+ input.setAttribute('data-can-merge', user.can_merge);
+ return input;
+ }),
+ );
+};
+export const toggleDropdown = () => findUserSearchButton().click();
+export const waitForDropdownItems = async () => {
+ await axios.waitForAll();
+ await waitForPromises();
+};
+
+// assertion helpers ---------------------------------------------------------
+export const createUnassignedExpectation = () => {
+ return [
+ { type: 'user', userId: '0' },
+ { type: 'divider' },
+ ...getUsersFixture().map((x) => ({
+ type: 'user',
+ userId: x.id.toString(),
+ })),
+ ];
+};
+
+export const createAssignedExpectation = ({ header, assigned }) => {
+ const assignedIds = new Set(assigned.map((x) => x.id));
+ const unassignedIds = getUsersFixture().filter((x) => !assignedIds.has(x.id));
+
+ return [
+ { type: 'user', userId: '0' },
+ { type: 'divider' },
+ { type: 'dropdown-header', text: header },
+ ...assigned.map((x) => ({ type: 'user', userId: x.id.toString() })),
+ { type: 'divider' },
+ ...unassignedIds.map((x) => ({ type: 'user', userId: x.id.toString() })),
+ ];
+};
+
+export const createInputsModelExpectation = (users) =>
+ users.map((user) => ({
+ value: user.id.toString(),
+ dataset: {
+ approved: user.approved.toString(),
+ avatar_url: user.avatar_url,
+ can_merge: user.can_merge.toString(),
+ can_update_merge_request: user.can_update_merge_request.toString(),
+ id: user.id.toString(),
+ name: user.name,
+ show_status: user.show_status.toString(),
+ state: user.state,
+ username: user.username,
+ web_url: user.web_url,
+ },
+ }));
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index eadf07e54fb..115f21d8b35 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -34,7 +34,7 @@ describe('MRWidgetHeader', () => {
divergedCommitsCount: 12,
sourceBranch: 'mr-widget-refactor',
sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'master',
+ targetBranch: 'main',
statusPath: 'abc',
},
});
@@ -48,7 +48,7 @@ describe('MRWidgetHeader', () => {
divergedCommitsCount: 0,
sourceBranch: 'mr-widget-refactor',
sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'master',
+ targetBranch: 'main',
statusPath: 'abc',
},
});
@@ -64,14 +64,14 @@ describe('MRWidgetHeader', () => {
divergedCommitsCount: 1,
sourceBranch: 'mr-widget-refactor',
sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'master',
- targetBranchPath: '/foo/bar/master',
+ targetBranch: 'main',
+ targetBranchPath: '/foo/bar/main',
statusPath: 'abc',
},
});
expect(wrapper.vm.commitsBehindText).toBe(
- 'The source branch is <a href="/foo/bar/master">1 commit behind</a> the target branch',
+ 'The source branch is <a href="/foo/bar/main">1 commit behind</a> the target branch',
);
});
@@ -81,14 +81,14 @@ describe('MRWidgetHeader', () => {
divergedCommitsCount: 2,
sourceBranch: 'mr-widget-refactor',
sourceBranchLink: '<a href="/foo/bar/mr-widget-refactor">Link</a>',
- targetBranch: 'master',
- targetBranchPath: '/foo/bar/master',
+ targetBranch: 'main',
+ targetBranchPath: '/foo/bar/main',
statusPath: 'abc',
},
});
expect(wrapper.vm.commitsBehindText).toBe(
- 'The source branch is <a href="/foo/bar/master">2 commits behind</a> the target branch',
+ 'The source branch is <a href="/foo/bar/main">2 commits behind</a> the target branch',
);
});
});
@@ -105,7 +105,7 @@ describe('MRWidgetHeader', () => {
sourceBranchRemoved: false,
targetBranchPath: 'foo/bar/commits-path',
targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'master',
+ targetBranch: 'main',
isOpen: true,
emailPatchesPath: '/mr/email-patches',
plainDiffPath: '/mr/plainDiffPath',
@@ -125,7 +125,7 @@ describe('MRWidgetHeader', () => {
});
it('renders target branch', () => {
- expect(wrapper.find('.js-target-branch').text().trim()).toBe('master');
+ expect(wrapper.find('.js-target-branch').text().trim()).toBe('main');
});
});
@@ -138,7 +138,7 @@ describe('MRWidgetHeader', () => {
sourceBranchRemoved: false,
targetBranchPath: 'foo/bar/commits-path',
targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'master',
+ targetBranch: 'main',
isOpen: true,
canPushToSourceBranch: true,
emailPatchesPath: '/mr/email-patches',
@@ -227,7 +227,7 @@ describe('MRWidgetHeader', () => {
sourceBranchRemoved: false,
targetBranchPath: 'foo/bar/commits-path',
targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'master',
+ targetBranch: 'main',
isOpen: false,
emailPatchesPath: '/mr/email-patches',
plainDiffPath: '/mr/plainDiffPath',
@@ -257,7 +257,7 @@ describe('MRWidgetHeader', () => {
sourceBranchRemoved: false,
targetBranchPath: 'foo/bar/commits-path',
targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'master',
+ targetBranch: 'main',
isOpen: true,
emailPatchesPath: '/mr/email-patches',
plainDiffPath: '/mr/plainDiffPath',
@@ -281,7 +281,7 @@ describe('MRWidgetHeader', () => {
sourceBranchRemoved: false,
targetBranchPath: 'foo/bar/commits-path',
targetBranchTreePath: 'foo/bar/tree/path',
- targetBranch: 'master',
+ targetBranch: 'main',
isOpen: true,
emailPatchesPath: '/mr/email-patches',
plainDiffPath: '/mr/plainDiffPath',
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
index e5862df5dda..ac20487c55f 100644
--- a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
+++ b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
@@ -16,7 +16,6 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have
>
<span
class="gl-mr-3"
- data-qa-selector="merge_request_status_content"
>
<span
class="js-status-text-before-author"
@@ -40,13 +39,14 @@ exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have
<a
class="btn btn-sm btn-default js-cancel-auto-merge"
+ data-qa-selector="cancel_auto_merge_button"
data-testid="cancelAutomaticMergeButton"
href="#"
role="button"
>
<!---->
- Cancel automatic merge
+ Cancel
</a>
</h4>
@@ -108,7 +108,6 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c
>
<span
class="gl-mr-3"
- data-qa-selector="merge_request_status_content"
>
<span
class="js-status-text-before-author"
@@ -132,13 +131,14 @@ exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have c
<a
class="btn btn-sm btn-default js-cancel-auto-merge"
+ data-qa-selector="cancel_auto_merge_button"
data-testid="cancelAutomaticMergeButton"
href="#"
role="button"
>
<!---->
- Cancel automatic merge
+ Cancel
</a>
</h4>
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap
new file mode 100644
index 00000000000..cef1dff3335
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_ready_to_merge_spec.js.snap
@@ -0,0 +1,3 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`ReadyToMerge with a mismatched SHA warns the user to refresh to review 1`] = `"<gl-sprintf-stub message=\\"New changes were added. %{linkStart}Reload the page to review them%{linkEnd}\\"></gl-sprintf-stub>"`;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index 1af96717b56..0110a76e722 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -192,15 +192,13 @@ describe('MRWidgetAutoMergeEnabled', () => {
});
describe('cancelButtonText', () => {
- it('should return "Cancel automatic merge" if MWPS is selected', () => {
+ it('should return "Cancel" if MWPS is selected', () => {
factory({
...defaultMrProps(),
autoMergeStrategy: MWPS_MERGE_STRATEGY,
});
- expect(wrapper.findByTestId('cancelAutomaticMergeButton').text()).toBe(
- 'Cancel automatic merge',
- );
+ expect(wrapper.findByTestId('cancelAutomaticMergeButton').text()).toBe('Cancel');
});
});
});
@@ -329,7 +327,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
expect(statusText).toBe('to be merged automatically when the pipeline succeeds');
});
- it('should render the cancel button as "Cancel automatic merge" if MWPS is selected', () => {
+ it('should render the cancel button as "Cancel" if MWPS is selected', () => {
factory({
...defaultMrProps(),
autoMergeStrategy: MWPS_MERGE_STRATEGY,
@@ -337,7 +335,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
const cancelButtonText = trimText(wrapper.find('.js-cancel-auto-merge').text());
- expect(cancelButtonText).toBe('Cancel automatic merge');
+ expect(cancelButtonText).toBe('Cancel');
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
index e4123b2ca83..b31a75f30d3 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
@@ -8,7 +8,7 @@ describe('Commits header component', () => {
wrapper = shallowMount(CommitsHeader, {
propsData: {
isSquashEnabled: false,
- targetBranch: 'master',
+ targetBranch: 'main',
commitsCount: 5,
isFastForwardEnabled: false,
...props,
@@ -94,7 +94,7 @@ describe('Commits header component', () => {
it('has correct target branch displayed', () => {
createComponent();
- expect(findTargetBranchMessage().text()).toBe('master');
+ expect(findTargetBranchMessage().text()).toBe('main');
});
it('does has merge commit part of the message', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js
index b16fb5171e7..b6c16958993 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js
@@ -4,6 +4,7 @@ import MrWidgetMerging from '~/vue_merge_request_widget/components/states/mr_wid
describe('MRWidgetMerging', () => {
let wrapper;
+ const GlEmoji = { template: '<img />' };
beforeEach(() => {
wrapper = shallowMount(MrWidgetMerging, {
propsData: {
@@ -12,6 +13,9 @@ describe('MRWidgetMerging', () => {
targetBranch: 'branch',
},
},
+ stubs: {
+ GlEmoji,
+ },
});
});
@@ -27,7 +31,7 @@ describe('MRWidgetMerging', () => {
.trim()
.replace(/\s\s+/g, ' ')
.replace(/[\r\n]+/g, ' '),
- ).toContain('This merge request is in the process of being merged');
+ ).toContain('Merging!');
});
it('renders branch information', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 983e4a35078..85a42946325 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -1,13 +1,13 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
-import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
import simplePoll from '~/lib/utils/simple_poll';
import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit.vue';
import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue';
import CommitsHeader from '~/vue_merge_request_widget/components/states/commits_header.vue';
import ReadyToMerge from '~/vue_merge_request_widget/components/states/ready_to_merge.vue';
import SquashBeforeMerge from '~/vue_merge_request_widget/components/states/squash_before_merge.vue';
-import { MWPS_MERGE_STRATEGY, MTWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants';
+import { MWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
jest.mock('~/lib/utils/simple_poll', () =>
@@ -42,7 +42,7 @@ const createTestMr = (customConfig) => {
commitMessageWithDescription,
shouldRemoveSourceBranch: true,
canRemoveSourceBranch: false,
- targetBranch: 'master',
+ targetBranch: 'main',
preferredAutoMergeStrategy: MWPS_MERGE_STRATEGY,
availableAutoMergeStrategies: [MWPS_MERGE_STRATEGY],
mergeImmediatelyDocsPath: 'path/to/merge/immediately/docs',
@@ -58,11 +58,9 @@ const createTestService = () => ({
poll: jest.fn().mockResolvedValue(),
});
+let wrapper;
const createComponent = (customConfig = {}) => {
- const Component = Vue.extend(ReadyToMerge);
-
- return new Component({
- el: document.createElement('div'),
+ wrapper = shallowMount(ReadyToMerge, {
propsData: {
mr: createTestMr(customConfig),
service: createTestService(),
@@ -71,277 +69,207 @@ const createComponent = (customConfig = {}) => {
};
describe('ReadyToMerge', () => {
- let vm;
-
- beforeEach(() => {
- vm = createComponent();
- });
-
afterEach(() => {
- vm.$destroy();
- });
-
- describe('props', () => {
- it('should have props', () => {
- const { mr, service } = ReadyToMerge.props;
-
- expect(mr.type instanceof Object).toBeTruthy();
- expect(mr.required).toBeTruthy();
-
- expect(service.type instanceof Object).toBeTruthy();
- expect(service.required).toBeTruthy();
- });
- });
-
- describe('data', () => {
- it('should have default data', () => {
- expect(vm.mergeWhenBuildSucceeds).toBeFalsy();
- expect(vm.useCommitMessageWithDescription).toBeFalsy();
- expect(vm.showCommitMessageEditor).toBeFalsy();
- expect(vm.isMakingRequest).toBeFalsy();
- expect(vm.isMergingImmediately).toBeFalsy();
- expect(vm.commitMessage).toBe(vm.mr.commitMessage);
- });
+ wrapper.destroy();
});
describe('computed', () => {
describe('isAutoMergeAvailable', () => {
it('should return true when at least one merge strategy is available', () => {
- vm.mr.availableAutoMergeStrategies = [MWPS_MERGE_STRATEGY];
+ createComponent();
- expect(vm.isAutoMergeAvailable).toBe(true);
+ expect(wrapper.vm.isAutoMergeAvailable).toBe(true);
});
it('should return false when no merge strategies are available', () => {
- vm.mr.availableAutoMergeStrategies = [];
+ createComponent({ mr: { availableAutoMergeStrategies: [] } });
- expect(vm.isAutoMergeAvailable).toBe(false);
+ expect(wrapper.vm.isAutoMergeAvailable).toBe(false);
});
});
describe('status', () => {
it('defaults to success', () => {
- Vue.set(vm.mr, 'pipeline', true);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
+ createComponent({ mr: { pipeline: true, availableAutoMergeStrategies: [] } });
- expect(vm.status).toEqual('success');
+ expect(wrapper.vm.status).toEqual('success');
});
it('returns failed when MR has CI but also has an unknown status', () => {
- Vue.set(vm.mr, 'hasCI', true);
+ createComponent({ mr: { hasCI: true } });
- expect(vm.status).toEqual('failed');
+ expect(wrapper.vm.status).toEqual('failed');
});
it('returns default when MR has no pipeline', () => {
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
+ createComponent({ mr: { availableAutoMergeStrategies: [] } });
- expect(vm.status).toEqual('success');
+ expect(wrapper.vm.status).toEqual('success');
});
it('returns pending when pipeline is active', () => {
- Vue.set(vm.mr, 'pipeline', {});
- Vue.set(vm.mr, 'isPipelineActive', true);
+ createComponent({ mr: { pipeline: {}, isPipelineActive: true } });
- expect(vm.status).toEqual('pending');
+ expect(wrapper.vm.status).toEqual('pending');
});
it('returns failed when pipeline is failed', () => {
- Vue.set(vm.mr, 'pipeline', {});
- Vue.set(vm.mr, 'isPipelineFailed', true);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
+ createComponent({
+ mr: { pipeline: {}, isPipelineFailed: true, availableAutoMergeStrategies: [] },
+ });
- expect(vm.status).toEqual('failed');
+ expect(wrapper.vm.status).toEqual('failed');
});
});
describe('mergeButtonVariant', () => {
it('defaults to success class', () => {
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
+ createComponent({
+ mr: { availableAutoMergeStrategies: [] },
+ });
- expect(vm.mergeButtonVariant).toEqual('success');
+ expect(wrapper.vm.mergeButtonVariant).toEqual('success');
});
it('returns success class for success status', () => {
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
- Vue.set(vm.mr, 'pipeline', true);
+ createComponent({
+ mr: { availableAutoMergeStrategies: [], pipeline: true },
+ });
- expect(vm.mergeButtonVariant).toEqual('success');
+ expect(wrapper.vm.mergeButtonVariant).toEqual('success');
});
it('returns info class for pending status', () => {
- Vue.set(vm.mr, 'availableAutoMergeStrategies', [MTWPS_MERGE_STRATEGY]);
+ createComponent();
- expect(vm.mergeButtonVariant).toEqual('info');
+ expect(wrapper.vm.mergeButtonVariant).toEqual('info');
});
it('returns danger class for failed status', () => {
- vm.mr.hasCI = true;
+ createComponent({ mr: { hasCI: true } });
- expect(vm.mergeButtonVariant).toEqual('danger');
+ expect(wrapper.vm.mergeButtonVariant).toEqual('danger');
});
});
describe('status icon', () => {
it('defaults to tick icon', () => {
- expect(vm.iconClass).toEqual('success');
+ createComponent();
+
+ expect(wrapper.vm.iconClass).toEqual('success');
});
it('shows tick for success status', () => {
- vm.mr.pipeline = true;
+ createComponent({ mr: { pipeline: true } });
- expect(vm.iconClass).toEqual('success');
+ expect(wrapper.vm.iconClass).toEqual('success');
});
it('shows tick for pending status', () => {
- vm.mr.pipeline = {};
- vm.mr.isPipelineActive = true;
+ createComponent({ mr: { pipeline: {}, isPipelineActive: true } });
- expect(vm.iconClass).toEqual('success');
- });
-
- it('shows warning icon for failed status', () => {
- vm.mr.hasCI = true;
-
- expect(vm.iconClass).toEqual('warning');
- });
-
- it('shows warning icon for merge not allowed', () => {
- vm.mr.hasCI = true;
-
- expect(vm.iconClass).toEqual('warning');
+ expect(wrapper.vm.iconClass).toEqual('success');
});
});
describe('mergeButtonText', () => {
it('should return "Merge" when no auto merge strategies are available', () => {
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
+ createComponent({ mr: { availableAutoMergeStrategies: [] } });
- expect(vm.mergeButtonText).toEqual('Merge');
+ expect(wrapper.vm.mergeButtonText).toEqual('Merge');
});
- it('should return "Merge in progress"', () => {
- Vue.set(vm, 'isMergingImmediately', true);
+ it('should return "Merge in progress"', async () => {
+ createComponent();
+
+ wrapper.setData({ isMergingImmediately: true });
+
+ await Vue.nextTick();
- expect(vm.mergeButtonText).toEqual('Merge in progress');
+ expect(wrapper.vm.mergeButtonText).toEqual('Merge in progress');
});
it('should return "Merge when pipeline succeeds" when the MWPS auto merge strategy is available', () => {
- Vue.set(vm, 'isMergingImmediately', false);
- Vue.set(vm.mr, 'preferredAutoMergeStrategy', MWPS_MERGE_STRATEGY);
+ createComponent({
+ mr: { isMergingImmediately: false, preferredAutoMergeStrategy: MWPS_MERGE_STRATEGY },
+ });
- expect(vm.mergeButtonText).toEqual('Merge when pipeline succeeds');
+ expect(wrapper.vm.mergeButtonText).toEqual('Merge when pipeline succeeds');
});
});
describe('autoMergeText', () => {
it('should return Merge when pipeline succeeds', () => {
- Vue.set(vm.mr, 'preferredAutoMergeStrategy', MWPS_MERGE_STRATEGY);
+ createComponent({ mr: { preferredAutoMergeStrategy: MWPS_MERGE_STRATEGY } });
- expect(vm.autoMergeText).toEqual('Merge when pipeline succeeds');
+ expect(wrapper.vm.autoMergeText).toEqual('Merge when pipeline succeeds');
});
});
describe('shouldShowMergeImmediatelyDropdown', () => {
it('should return false if no pipeline is active', () => {
- Vue.set(vm.mr, 'isPipelineActive', false);
- Vue.set(vm.mr, 'onlyAllowMergeIfPipelineSucceeds', false);
+ createComponent({
+ mr: { isPipelineActive: false, onlyAllowMergeIfPipelineSucceeds: false },
+ });
- expect(vm.shouldShowMergeImmediatelyDropdown).toBe(false);
+ expect(wrapper.vm.shouldShowMergeImmediatelyDropdown).toBe(false);
});
it('should return false if "Pipelines must succeed" is enabled for the current project', () => {
- Vue.set(vm.mr, 'isPipelineActive', true);
- Vue.set(vm.mr, 'onlyAllowMergeIfPipelineSucceeds', true);
+ createComponent({ mr: { isPipelineActive: true, onlyAllowMergeIfPipelineSucceeds: true } });
- expect(vm.shouldShowMergeImmediatelyDropdown).toBe(false);
- });
-
- it('should return true if the MR\'s pipeline is active and "Pipelines must succeed" is not enabled for the current project', () => {
- Vue.set(vm.mr, 'isPipelineActive', true);
- Vue.set(vm.mr, 'onlyAllowMergeIfPipelineSucceeds', false);
-
- expect(vm.shouldShowMergeImmediatelyDropdown).toBe(true);
+ expect(wrapper.vm.shouldShowMergeImmediatelyDropdown).toBe(false);
});
});
describe('isMergeButtonDisabled', () => {
it('should return false with initial data', () => {
- Vue.set(vm.mr, 'isMergeAllowed', true);
+ createComponent({ mr: { isMergeAllowed: true } });
- expect(vm.isMergeButtonDisabled).toBe(false);
+ expect(wrapper.vm.isMergeButtonDisabled).toBe(false);
});
it('should return true when there is no commit message', () => {
- Vue.set(vm.mr, 'isMergeAllowed', true);
- Vue.set(vm, 'commitMessage', '');
+ createComponent({ mr: { isMergeAllowed: true, commitMessage: '' } });
- expect(vm.isMergeButtonDisabled).toBe(true);
+ expect(wrapper.vm.isMergeButtonDisabled).toBe(true);
});
it('should return true if merge is not allowed', () => {
- Vue.set(vm.mr, 'isMergeAllowed', false);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
- Vue.set(vm.mr, 'onlyAllowMergeIfPipelineSucceeds', true);
+ createComponent({
+ mr: {
+ isMergeAllowed: false,
+ availableAutoMergeStrategies: [],
+ onlyAllowMergeIfPipelineSucceeds: true,
+ },
+ });
- expect(vm.isMergeButtonDisabled).toBe(true);
+ expect(wrapper.vm.isMergeButtonDisabled).toBe(true);
});
- it('should return true when the vm instance is making request', () => {
- Vue.set(vm.mr, 'isMergeAllowed', true);
- Vue.set(vm, 'isMakingRequest', true);
+ it('should return true when the vm instance is making request', async () => {
+ createComponent({ mr: { isMergeAllowed: true } });
- expect(vm.isMergeButtonDisabled).toBe(true);
- });
- });
+ wrapper.setData({ isMakingRequest: true });
- describe('isMergeImmediatelyDangerous', () => {
- it('should always return false in CE', () => {
- expect(vm.isMergeImmediatelyDangerous).toBe(false);
+ await Vue.nextTick();
+
+ expect(wrapper.vm.isMergeButtonDisabled).toBe(true);
});
});
});
describe('methods', () => {
- describe('shouldShowMergeControls', () => {
- it('should return false when an external pipeline is running and required to succeed', () => {
- Vue.set(vm.mr, 'isMergeAllowed', false);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
-
- expect(vm.shouldShowMergeControls).toBe(false);
- });
-
- it('should return true when the build succeeded or build not required to succeed', () => {
- Vue.set(vm.mr, 'isMergeAllowed', true);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', []);
-
- expect(vm.shouldShowMergeControls).toBe(true);
- });
-
- it('should return true when showing the MWPS button and a pipeline is running that needs to be successful', () => {
- Vue.set(vm.mr, 'isMergeAllowed', false);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', [MWPS_MERGE_STRATEGY]);
-
- expect(vm.shouldShowMergeControls).toBe(true);
- });
-
- it('should return true when showing the MWPS button but not required for the pipeline to succeed', () => {
- Vue.set(vm.mr, 'isMergeAllowed', true);
- Vue.set(vm.mr, 'availableAutoMergeStrategies', [MWPS_MERGE_STRATEGY]);
-
- expect(vm.shouldShowMergeControls).toBe(true);
- });
- });
-
describe('updateMergeCommitMessage', () => {
it('should revert flag and change commitMessage', () => {
- expect(vm.commitMessage).toEqual(commitMessage);
- vm.updateMergeCommitMessage(true);
+ createComponent();
+
+ wrapper.vm.updateMergeCommitMessage(true);
- expect(vm.commitMessage).toEqual(commitMessageWithDescription);
- vm.updateMergeCommitMessage(false);
+ expect(wrapper.vm.commitMessage).toEqual(commitMessageWithDescription);
+ wrapper.vm.updateMergeCommitMessage(false);
- expect(vm.commitMessage).toEqual(commitMessage);
+ expect(wrapper.vm.commitMessage).toEqual(commitMessage);
});
});
@@ -356,23 +284,26 @@ describe('ReadyToMerge', () => {
});
it('should handle merge when pipeline succeeds', (done) => {
+ createComponent();
+
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest
- .spyOn(vm.service, 'merge')
+ .spyOn(wrapper.vm.service, 'merge')
.mockReturnValue(returnPromise('merge_when_pipeline_succeeds'));
- vm.removeSourceBranch = false;
- vm.handleMergeButtonClick(true);
+ wrapper.setData({ removeSourceBranch: false });
+
+ wrapper.vm.handleMergeButtonClick(true);
setImmediate(() => {
- expect(vm.isMakingRequest).toBeTruthy();
+ expect(wrapper.vm.isMakingRequest).toBeTruthy();
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
- const params = vm.service.merge.mock.calls[0][0];
+ const params = wrapper.vm.service.merge.mock.calls[0][0];
expect(params).toEqual(
expect.objectContaining({
- sha: vm.mr.sha,
- commit_message: vm.mr.commitMessage,
+ sha: wrapper.vm.mr.sha,
+ commit_message: wrapper.vm.mr.commitMessage,
should_remove_source_branch: false,
auto_merge_strategy: 'merge_when_pipeline_succeeds',
}),
@@ -382,15 +313,17 @@ describe('ReadyToMerge', () => {
});
it('should handle merge failed', (done) => {
+ createComponent();
+
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- jest.spyOn(vm.service, 'merge').mockReturnValue(returnPromise('failed'));
- vm.handleMergeButtonClick(false, true);
+ jest.spyOn(wrapper.vm.service, 'merge').mockReturnValue(returnPromise('failed'));
+ wrapper.vm.handleMergeButtonClick(false, true);
setImmediate(() => {
- expect(vm.isMakingRequest).toBeTruthy();
+ expect(wrapper.vm.isMakingRequest).toBeTruthy();
expect(eventHub.$emit).toHaveBeenCalledWith('FailedToMerge', undefined);
- const params = vm.service.merge.mock.calls[0][0];
+ const params = wrapper.vm.service.merge.mock.calls[0][0];
expect(params.should_remove_source_branch).toBeTruthy();
expect(params.auto_merge_strategy).toBeUndefined();
@@ -399,15 +332,17 @@ describe('ReadyToMerge', () => {
});
it('should handle merge action accepted case', (done) => {
- jest.spyOn(vm.service, 'merge').mockReturnValue(returnPromise('success'));
- jest.spyOn(vm, 'initiateMergePolling').mockImplementation(() => {});
- vm.handleMergeButtonClick();
+ createComponent();
+
+ jest.spyOn(wrapper.vm.service, 'merge').mockReturnValue(returnPromise('success'));
+ jest.spyOn(wrapper.vm, 'initiateMergePolling').mockImplementation(() => {});
+ wrapper.vm.handleMergeButtonClick();
setImmediate(() => {
- expect(vm.isMakingRequest).toBeTruthy();
- expect(vm.initiateMergePolling).toHaveBeenCalled();
+ expect(wrapper.vm.isMakingRequest).toBeTruthy();
+ expect(wrapper.vm.initiateMergePolling).toHaveBeenCalled();
- const params = vm.service.merge.mock.calls[0][0];
+ const params = wrapper.vm.service.merge.mock.calls[0][0];
expect(params.should_remove_source_branch).toBeTruthy();
expect(params.auto_merge_strategy).toBeUndefined();
@@ -418,128 +353,31 @@ describe('ReadyToMerge', () => {
describe('initiateMergePolling', () => {
it('should call simplePoll', () => {
- vm.initiateMergePolling();
+ createComponent();
+
+ wrapper.vm.initiateMergePolling();
expect(simplePoll).toHaveBeenCalledWith(expect.any(Function), { timeout: 0 });
});
it('should call handleMergePolling', () => {
- jest.spyOn(vm, 'handleMergePolling').mockImplementation(() => {});
-
- vm.initiateMergePolling();
-
- expect(vm.handleMergePolling).toHaveBeenCalled();
- });
- });
-
- describe('handleMergePolling', () => {
- const returnPromise = (state) =>
- new Promise((resolve) => {
- resolve({
- data: {
- state,
- source_branch_exists: true,
- },
- });
- });
-
- beforeEach(() => {
- loadFixtures('merge_requests/merge_request_of_current_user.html');
- });
-
- it('should call start and stop polling when MR merged', (done) => {
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
- jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
-
- let cpc = false; // continuePollingCalled
- let spc = false; // stopPollingCalled
-
- vm.handleMergePolling(
- () => {
- cpc = true;
- },
- () => {
- spc = true;
- },
- );
- setImmediate(() => {
- expect(vm.service.poll).toHaveBeenCalled();
- expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
- expect(eventHub.$emit).toHaveBeenCalledWith('FetchActionsContent');
- expect(vm.initiateRemoveSourceBranchPolling).toHaveBeenCalled();
- expect(refreshUserMergeRequestCounts).toHaveBeenCalled();
- expect(cpc).toBeFalsy();
- expect(spc).toBeTruthy();
+ createComponent();
- done();
- });
- });
-
- it('updates status box', (done) => {
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
- jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
-
- vm.handleMergePolling(
- () => {},
- () => {},
- );
-
- setImmediate(() => {
- const statusBox = document.querySelector('.status-box');
-
- expect(statusBox.classList.contains('status-box-mr-merged')).toBeTruthy();
- expect(statusBox.textContent).toContain('Merged');
-
- done();
- });
- });
-
- it('updates merge request count badge', (done) => {
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
- jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
-
- vm.handleMergePolling(
- () => {},
- () => {},
- );
+ jest.spyOn(wrapper.vm, 'handleMergePolling').mockImplementation(() => {});
- setImmediate(() => {
- expect(document.querySelector('.js-merge-counter').textContent).toBe('0');
-
- done();
- });
- });
-
- it('should continue polling until MR is merged', (done) => {
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('some_other_state'));
- jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
-
- let cpc = false; // continuePollingCalled
- let spc = false; // stopPollingCalled
-
- vm.handleMergePolling(
- () => {
- cpc = true;
- },
- () => {
- spc = true;
- },
- );
- setImmediate(() => {
- expect(cpc).toBeTruthy();
- expect(spc).toBeFalsy();
+ wrapper.vm.initiateMergePolling();
- done();
- });
+ expect(wrapper.vm.handleMergePolling).toHaveBeenCalled();
});
});
describe('initiateRemoveSourceBranchPolling', () => {
it('should emit event and call simplePoll', () => {
+ createComponent();
+
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- vm.initiateRemoveSourceBranchPolling();
+ wrapper.vm.initiateRemoveSourceBranchPolling();
expect(eventHub.$emit).toHaveBeenCalledWith('SetBranchRemoveFlag', [true]);
expect(simplePoll).toHaveBeenCalled();
@@ -557,13 +395,15 @@ describe('ReadyToMerge', () => {
});
it('should call start and stop polling when MR merged', (done) => {
+ createComponent();
+
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise(false));
+ jest.spyOn(wrapper.vm.service, 'poll').mockReturnValue(returnPromise(false));
let cpc = false; // continuePollingCalled
let spc = false; // stopPollingCalled
- vm.handleRemoveBranchPolling(
+ wrapper.vm.handleRemoveBranchPolling(
() => {
cpc = true;
},
@@ -572,7 +412,7 @@ describe('ReadyToMerge', () => {
},
);
setImmediate(() => {
- expect(vm.service.poll).toHaveBeenCalled();
+ expect(wrapper.vm.service.poll).toHaveBeenCalled();
const args = eventHub.$emit.mock.calls[0];
@@ -590,12 +430,14 @@ describe('ReadyToMerge', () => {
});
it('should continue polling until MR is merged', (done) => {
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise(true));
+ createComponent();
+
+ jest.spyOn(wrapper.vm.service, 'poll').mockReturnValue(returnPromise(true));
let cpc = false; // continuePollingCalled
let spc = false; // stopPollingCalled
- vm.handleRemoveBranchPolling(
+ wrapper.vm.handleRemoveBranchPolling(
() => {
cpc = true;
},
@@ -616,49 +458,26 @@ describe('ReadyToMerge', () => {
describe('Remove source branch checkbox', () => {
describe('when user can merge but cannot delete branch', () => {
it('should be disabled in the rendered output', () => {
- const checkboxElement = vm.$el.querySelector('#remove-source-branch-input');
+ createComponent();
- expect(checkboxElement).toBeNull();
+ expect(wrapper.find('#remove-source-branch-input').exists()).toBe(false);
});
});
describe('when user can merge and can delete branch', () => {
beforeEach(() => {
- vm = createComponent({
+ createComponent({
mr: { canRemoveSourceBranch: true },
});
});
it('isRemoveSourceBranchButtonDisabled should be false', () => {
- expect(vm.isRemoveSourceBranchButtonDisabled).toBe(false);
- });
-
- it('removed source branch should be enabled in rendered output', () => {
- const checkboxElement = vm.$el.querySelector('#remove-source-branch-input');
-
- expect(checkboxElement).not.toBeNull();
+ expect(wrapper.find('#remove-source-branch-input').props('disabled')).toBe(undefined);
});
});
});
describe('render children components', () => {
- let wrapper;
- const localVue = createLocalVue();
-
- const createLocalComponent = (customConfig = {}) => {
- wrapper = shallowMount(localVue.extend(ReadyToMerge), {
- localVue,
- propsData: {
- mr: createTestMr(customConfig),
- service: createTestService(),
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
const findCommitEditElements = () => wrapper.findAll(CommitEdit);
@@ -667,7 +486,7 @@ describe('ReadyToMerge', () => {
describe('squash checkbox', () => {
it('should be rendered when squash before merge is enabled and there is more than 1 commit', () => {
- createLocalComponent({
+ createComponent({
mr: { commitsCount: 2, enableSquashBeforeMerge: true },
});
@@ -675,13 +494,13 @@ describe('ReadyToMerge', () => {
});
it('should not be rendered when squash before merge is disabled', () => {
- createLocalComponent({ mr: { commitsCount: 2, enableSquashBeforeMerge: false } });
+ createComponent({ mr: { commitsCount: 2, enableSquashBeforeMerge: false } });
expect(findCheckboxElement().exists()).toBeFalsy();
});
it('should not be rendered when there is only 1 commit', () => {
- createLocalComponent({ mr: { commitsCount: 1, enableSquashBeforeMerge: true } });
+ createComponent({ mr: { commitsCount: 1, enableSquashBeforeMerge: true } });
expect(findCheckboxElement().exists()).toBeFalsy();
});
@@ -695,7 +514,7 @@ describe('ReadyToMerge', () => {
`(
'is $state when squashIsReadonly returns $expectation ',
({ squashState, prop, expectation }) => {
- createLocalComponent({
+ createComponent({
mr: { commitsCount: 2, enableSquashBeforeMerge: true, [squashState]: expectation },
});
@@ -704,7 +523,7 @@ describe('ReadyToMerge', () => {
);
it('is not rendered for "Do not allow" option', () => {
- createLocalComponent({
+ createComponent({
mr: {
commitsCount: 2,
enableSquashBeforeMerge: true,
@@ -720,14 +539,14 @@ describe('ReadyToMerge', () => {
describe('commits count collapsible header', () => {
it('should be rendered when fast-forward is disabled', () => {
- createLocalComponent();
+ createComponent();
expect(findCommitsHeaderElement().exists()).toBeTruthy();
});
describe('when fast-forward is enabled', () => {
it('should be rendered if squash and squash before are enabled and there is more than 1 commit', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
enableSquashBeforeMerge: true,
@@ -740,7 +559,7 @@ describe('ReadyToMerge', () => {
});
it('should not be rendered if squash before merge is disabled', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
enableSquashBeforeMerge: false,
@@ -753,7 +572,7 @@ describe('ReadyToMerge', () => {
});
it('should not be rendered if squash is disabled', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
squash: false,
@@ -766,7 +585,7 @@ describe('ReadyToMerge', () => {
});
it('should not be rendered if commits count is 1', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
squash: true,
@@ -783,7 +602,7 @@ describe('ReadyToMerge', () => {
describe('commits edit components', () => {
describe('when fast-forward merge is enabled', () => {
it('should not be rendered if squash is disabled', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
squash: false,
@@ -796,7 +615,7 @@ describe('ReadyToMerge', () => {
});
it('should not be rendered if squash before merge is disabled', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
squash: true,
@@ -809,7 +628,7 @@ describe('ReadyToMerge', () => {
});
it('should not be rendered if there is only one commit', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
squash: true,
@@ -822,7 +641,7 @@ describe('ReadyToMerge', () => {
});
it('should have one edit component if squash is enabled and there is more than 1 commit', () => {
- createLocalComponent({
+ createComponent({
mr: {
ffOnlyEnabled: true,
squashIsSelected: true,
@@ -837,13 +656,13 @@ describe('ReadyToMerge', () => {
});
it('should have one edit component when squash is disabled', () => {
- createLocalComponent();
+ createComponent();
expect(findCommitEditElements().length).toBe(1);
});
it('should have two edit components when squash is enabled and there is more than 1 commit', () => {
- createLocalComponent({
+ createComponent({
mr: {
commitsCount: 2,
squashIsSelected: true,
@@ -855,7 +674,7 @@ describe('ReadyToMerge', () => {
});
it('should have one edit components when squash is enabled and there is 1 commit only', () => {
- createLocalComponent({
+ createComponent({
mr: {
commitsCount: 1,
squash: true,
@@ -867,13 +686,13 @@ describe('ReadyToMerge', () => {
});
it('should have correct edit merge commit label', () => {
- createLocalComponent();
+ createComponent();
expect(findFirstCommitEditLabel()).toBe('Merge commit message');
});
it('should have correct edit squash commit label', () => {
- createLocalComponent({
+ createComponent({
mr: {
commitsCount: 2,
squashIsSelected: true,
@@ -887,13 +706,13 @@ describe('ReadyToMerge', () => {
describe('commits dropdown', () => {
it('should not be rendered if squash is disabled', () => {
- createLocalComponent();
+ createComponent();
expect(findCommitDropdownElement().exists()).toBeFalsy();
});
it('should be rendered if squash is enabled and there is more than 1 commit', () => {
- createLocalComponent({
+ createComponent({
mr: { enableSquashBeforeMerge: true, squashIsSelected: true, commitsCount: 2 },
});
@@ -902,83 +721,38 @@ describe('ReadyToMerge', () => {
});
});
- describe('Merge controls', () => {
- describe('when allowed to merge', () => {
- beforeEach(() => {
- vm = createComponent({
- mr: { isMergeAllowed: true, canRemoveSourceBranch: true },
- });
- });
-
- it('shows remove source branch checkbox', () => {
- expect(vm.$el.querySelector('.js-remove-source-branch-checkbox')).not.toBeNull();
- });
-
- it('shows modify commit message button', () => {
- expect(vm.$el.querySelector('.js-modify-commit-message-button')).toBeDefined();
- });
-
- it('does not show message about needing to resolve items', () => {
- expect(vm.$el.querySelector('.js-resolve-mr-widget-items-message')).toBeNull();
- });
- });
-
- describe('when not allowed to merge', () => {
- beforeEach(() => {
- vm = createComponent({
- mr: { isMergeAllowed: false },
- });
- });
-
- it('does not show remove source branch checkbox', () => {
- expect(vm.$el.querySelector('.js-remove-source-branch-checkbox')).toBeNull();
- });
-
- it('shows message to resolve all items before being allowed to merge', () => {
- expect(vm.$el.querySelector('.js-resolve-mr-widget-items-message')).toBeDefined();
- });
- });
- });
-
describe('Merge request project settings', () => {
describe('when the merge commit merge method is enabled', () => {
beforeEach(() => {
- vm = createComponent({
+ createComponent({
mr: { ffOnlyEnabled: false },
});
});
it('should not show fast forward message', () => {
- expect(vm.$el.querySelector('.mr-fast-forward-message')).toBeNull();
- });
-
- it('should show "Modify commit message" button', () => {
- expect(vm.$el.querySelector('.js-modify-commit-message-button')).toBeDefined();
+ expect(wrapper.find('.mr-fast-forward-message').exists()).toBe(false);
});
});
describe('when the fast-forward merge method is enabled', () => {
beforeEach(() => {
- vm = createComponent({
+ createComponent({
mr: { ffOnlyEnabled: true },
});
});
it('should show fast forward message', () => {
- expect(vm.$el.querySelector('.mr-fast-forward-message')).toBeDefined();
- });
-
- it('should not show "Modify commit message" button', () => {
- expect(vm.$el.querySelector('.js-modify-commit-message-button')).toBeNull();
+ expect(wrapper.find('.mr-fast-forward-message').exists()).toBe(true);
});
});
});
describe('with a mismatched SHA', () => {
- const findMismatchShaBlock = () => vm.$el.querySelector('.js-sha-mismatch');
+ const findMismatchShaBlock = () => wrapper.find('.js-sha-mismatch');
+ const findMismatchShaTextBlock = () => findMismatchShaBlock().find(GlSprintf);
beforeEach(() => {
- vm = createComponent({
+ createComponent({
mr: {
isSHAMismatch: true,
mergeRequestDiffsPath: '/merge_requests/1/diffs',
@@ -987,17 +761,11 @@ describe('ReadyToMerge', () => {
});
it('displays a warning message', () => {
- expect(findMismatchShaBlock()).toExist();
+ expect(findMismatchShaBlock().exists()).toBe(true);
});
it('warns the user to refresh to review', () => {
- expect(findMismatchShaBlock().textContent.trim()).toBe(
- 'New changes were added. Reload the page to review them',
- );
- });
-
- it('displays link to the diffs tab', () => {
- expect(findMismatchShaBlock().querySelector('a').href).toContain(vm.mr.mergeRequestDiffsPath);
+ expect(findMismatchShaTextBlock().element.outerHTML).toMatchSnapshot();
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
index 6c0d69ea109..c6bfca4516f 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
@@ -42,9 +42,7 @@ describe('UnresolvedDiscussions', () => {
});
it('should have correct elements', () => {
- expect(wrapper.element.innerText).toContain(
- `Before this can be merged, one or more threads must be resolved.`,
- );
+ expect(wrapper.element.innerText).toContain(`Merge blocked: all threads must be resolved.`);
expect(wrapper.element.innerText).toContain('Jump to first unresolved thread');
expect(wrapper.element.innerText).toContain('Resolve all threads in new issue');
@@ -56,9 +54,7 @@ describe('UnresolvedDiscussions', () => {
describe('without threads path', () => {
it('should not show create issue link if user cannot create issue', () => {
- expect(wrapper.element.innerText).toContain(
- `Before this can be merged, one or more threads must be resolved.`,
- );
+ expect(wrapper.element.innerText).toContain(`Merge blocked: all threads must be resolved.`);
expect(wrapper.element.innerText).toContain('Jump to first unresolved thread');
expect(wrapper.element.innerText).not.toContain('Resolve all threads in new issue');
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js b/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
index ff29022b75d..2083dc88681 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
@@ -45,15 +45,15 @@ const deploymentMockData = {
changes: [
{
path: 'index.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
},
{
path: 'imgs/gallery.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
},
{
path: 'about/',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
},
],
};
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
index a5d91468ef2..eb6e3711e2e 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
@@ -1,4 +1,5 @@
-import { mount } from '@vue/test-utils';
+import { GlDropdown, GlLink } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import DeploymentViewButton from '~/vue_merge_request_widget/components/deployment/deployment_view_button.vue';
import ReviewAppLink from '~/vue_merge_request_widget/components/review_app_link.vue';
import { deploymentMockData } from './deployment_mock_data';
@@ -11,14 +12,14 @@ const appButtonText = {
describe('Deployment View App button', () => {
let wrapper;
- const factory = (options = {}) => {
- wrapper = mount(DeploymentViewButton, {
+ const createComponent = (options = {}) => {
+ wrapper = mountExtended(DeploymentViewButton, {
...options,
});
};
beforeEach(() => {
- factory({
+ createComponent({
propsData: {
deployment: deploymentMockData,
appButtonText,
@@ -30,15 +31,21 @@ describe('Deployment View App button', () => {
wrapper.destroy();
});
+ const findReviewAppLink = () => wrapper.findComponent(ReviewAppLink);
+ const findMrWigdetDeploymentDropdown = () => wrapper.findComponent(GlDropdown);
+ const findMrWigdetDeploymentDropdownIcon = () =>
+ wrapper.findByTestId('mr-wigdet-deployment-dropdown-icon');
+ const findDeployUrlMenuItems = () => wrapper.findAllComponents(GlLink);
+
describe('text', () => {
it('renders text as passed', () => {
- expect(wrapper.find(ReviewAppLink).text()).toContain(appButtonText.text);
+ expect(findReviewAppLink().props().display.text).toBe(appButtonText.text);
});
});
describe('without changes', () => {
beforeEach(() => {
- factory({
+ createComponent({
propsData: {
deployment: { ...deploymentMockData, changes: null },
appButtonText,
@@ -47,13 +54,13 @@ describe('Deployment View App button', () => {
});
it('renders the link to the review app without dropdown', () => {
- expect(wrapper.find('.js-mr-wigdet-deployment-dropdown').exists()).toBe(false);
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
});
});
describe('with a single change', () => {
beforeEach(() => {
- factory({
+ createComponent({
propsData: {
deployment: { ...deploymentMockData, changes: [deploymentMockData.changes[0]] },
appButtonText,
@@ -62,21 +69,20 @@ describe('Deployment View App button', () => {
});
it('renders the link to the review app without dropdown', () => {
- expect(wrapper.find('.js-mr-wigdet-deployment-dropdown').exists()).toBe(false);
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(false);
+ expect(findMrWigdetDeploymentDropdownIcon().exists()).toBe(false);
});
it('renders the link to the review app linked to to the first change', () => {
const expectedUrl = deploymentMockData.changes[0].external_url;
- const deployUrl = wrapper.find('.js-deploy-url');
- expect(deployUrl.attributes().href).not.toBeNull();
- expect(deployUrl.attributes().href).toEqual(expectedUrl);
+ expect(findReviewAppLink().attributes('href')).toBe(expectedUrl);
});
});
describe('with multiple changes', () => {
beforeEach(() => {
- factory({
+ createComponent({
propsData: {
deployment: deploymentMockData,
appButtonText,
@@ -85,18 +91,18 @@ describe('Deployment View App button', () => {
});
it('renders the link to the review app with dropdown', () => {
- expect(wrapper.find('.js-mr-wigdet-deployment-dropdown').exists()).toBe(true);
+ expect(findMrWigdetDeploymentDropdown().exists()).toBe(true);
+ expect(findMrWigdetDeploymentDropdownIcon().exists()).toBe(true);
});
it('renders all the links to the review apps', () => {
- const allUrls = wrapper.findAll('.js-deploy-url-menu-item').wrappers;
+ const allUrls = findDeployUrlMenuItems().wrappers;
const expectedUrls = deploymentMockData.changes.map((change) => change.external_url);
expectedUrls.forEach((expectedUrl, idx) => {
const deployUrl = allUrls[idx];
- expect(deployUrl.attributes().href).not.toBeNull();
- expect(deployUrl.attributes().href).toEqual(expectedUrl);
+ expect(deployUrl.attributes('href')).toBe(expectedUrl);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index aa2345abccf..8e36a9225d6 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -48,7 +48,7 @@ export default {
source_branch_link: 'daaaa',
source_project_id: 19,
source_project_full_path: '/group1/project1',
- target_branch: 'master',
+ target_branch: 'main',
target_project_id: 19,
target_project_full_path: '/group2/project2',
merge_request_add_ci_config_path: '/group2/project2/new/pipeline',
@@ -83,7 +83,7 @@ export default {
diff_head_sha: '104096c51715e12e7ae41f9333e9fa35b73f385d',
diff_head_commit_short_id: '104096c5',
default_merge_commit_message:
- "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
+ "Merge branch 'daaaa' into 'main'\n\nUpdate README.md\n\nSee merge request !22",
pipeline: {
id: 172,
user: {
@@ -173,8 +173,8 @@ export default {
title: 'Update README.md',
source_branch: 'feature-1',
source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1',
- target_branch: 'master',
- target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ target_branch: 'main',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/main',
},
commit: {
id: '104096c51715e12e7ae41f9333e9fa35b73f385d',
@@ -243,7 +243,7 @@ export default {
head_path: 'blob_path',
},
codequality_help_path: 'code_quality.html',
- target_branch_path: '/root/acets-app/branches/master',
+ target_branch_path: '/root/acets-app/branches/main',
source_branch_path: '/root/acets-app/branches/daaaa',
conflict_resolution_ui_path: '/root/acets-app/-/merge_requests/22/conflicts',
remove_wip_path: '/root/acets-app/-/merge_requests/22/remove_wip',
@@ -264,7 +264,7 @@ export default {
ci_environments_status_url: '/root/acets-app/-/merge_requests/22/ci_environments_status',
project_archived: false,
default_merge_commit_message_with_description:
- "Merge branch 'daaaa' into 'master'\n\nUpdate README.md\n\nSee merge request !22",
+ "Merge branch 'daaaa' into 'main'\n\nUpdate README.md\n\nSee merge request !22",
default_squash_commit_message: 'Test squash commit message',
diverged_commits_count: 0,
only_allow_merge_if_pipeline_succeeds: false,
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index c4962b608e1..446cd2a1e2f 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { securityReportDownloadPathsQueryResponse } from 'jest/vue_shared/security_reports/mock_data';
+import { securityReportMergeRequestDownloadPathsQueryResponse } from 'jest/vue_shared/security_reports/mock_data';
import axios from '~/lib/utils/axios_utils';
import { setFaviconOverlay } from '~/lib/utils/favicon';
import notify from '~/lib/utils/notify';
@@ -12,7 +12,7 @@ import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/consta
import eventHub from '~/vue_merge_request_widget/event_hub';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
-import securityReportDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_download_paths.query.graphql';
+import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_merge_request_download_paths.query.graphql';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
import mockData from './mock_data';
@@ -559,15 +559,15 @@ describe('MrWidgetOptions', () => {
const changes = [
{
path: 'index.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
},
{
path: 'imgs/gallery.html',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
},
{
path: 'about/',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
},
];
const deploymentMockData = {
@@ -688,22 +688,22 @@ describe('MrWidgetOptions', () => {
scheduled_actions: [],
},
ref: {
- name: 'master',
- path: '/root/ci-web-terminal/commits/master',
+ name: 'main',
+ path: '/root/ci-web-terminal/commits/main',
tag: false,
branch: true,
},
commit: {
id: 'aa1939133d373c94879becb79d91828a892ee319',
short_id: 'aa193913',
- title: "Merge branch 'master-test' into 'master'",
+ title: "Merge branch 'main-test' into 'main'",
created_at: '2018-10-22T11:41:33.000Z',
parent_ids: [
'4622f4dd792468993003caf2e3be978798cbe096',
'76598df914cdfe87132d0c3c40f80db9fa9396a4',
],
message:
- "Merge branch 'master-test' into 'master'\n\nUpdate .gitlab-ci.yml\n\nSee merge request root/ci-web-terminal!1",
+ "Merge branch 'main-test' into 'main'\n\nUpdate .gitlab-ci.yml\n\nSee merge request root/ci-web-terminal!1",
author_name: 'Administrator',
author_email: 'admin@example.com',
authored_date: '2018-10-22T11:41:33.000Z',
@@ -751,17 +751,16 @@ describe('MrWidgetOptions', () => {
changes: [
{
path: 'index.html',
- external_url:
- 'http://root-master-patch-91341.volatile-watch.surge.sh/index.html',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/index.html',
},
{
path: 'imgs/gallery.html',
external_url:
- 'http://root-master-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
+ 'http://root-main-patch-91341.volatile-watch.surge.sh/imgs/gallery.html',
},
{
path: 'about/',
- external_url: 'http://root-master-patch-91341.volatile-watch.surge.sh/about/',
+ external_url: 'http://root-main-patch-91341.volatile-watch.surge.sh/about/',
},
],
status: 'success',
@@ -831,8 +830,8 @@ describe('MrWidgetOptions', () => {
return createComponent(mrData, {
apolloProvider: createMockApollo([
[
- securityReportDownloadPathsQuery,
- async () => ({ data: securityReportDownloadPathsQueryResponse }),
+ securityReportMergeRequestDownloadPathsQuery,
+ async () => ({ data: securityReportMergeRequestDownloadPathsQueryResponse }),
],
]),
});
diff --git a/spec/frontend/vue_shared/alert_details/sidebar/alert_managment_sidebar_assignees_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js
index 28646994ed1..db9b0930c06 100644
--- a/spec/frontend/vue_shared/alert_details/sidebar/alert_managment_sidebar_assignees_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js
@@ -1,7 +1,7 @@
import { GlDropdownItem } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SidebarAssignee from '~/vue_shared/alert_details/components/sidebar/sidebar_assignee.vue';
import SidebarAssignees from '~/vue_shared/alert_details/components/sidebar/sidebar_assignees.vue';
import AlertSetAssignees from '~/vue_shared/alert_details/graphql/mutations/alert_set_assignees.mutation.graphql';
@@ -13,6 +13,29 @@ describe('Alert Details Sidebar Assignees', () => {
let wrapper;
let mock;
+ const mockPath = '/-/autocomplete/users.json';
+ const mockUsers = [
+ {
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 1,
+ name: 'User 1',
+ username: 'root',
+ },
+ {
+ avatar_url:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 2,
+ name: 'User 2',
+ username: 'not-root',
+ },
+ ];
+
+ const findAssigned = () => wrapper.findByTestId('assigned-users');
+ const findDropdown = () => wrapper.findComponent(GlDropdownItem);
+ const findSidebarIcon = () => wrapper.findByTestId('assignees-icon');
+ const findUnassigned = () => wrapper.findByTestId('unassigned-users');
+
function mountComponent({
data,
users = [],
@@ -21,7 +44,7 @@ describe('Alert Details Sidebar Assignees', () => {
loading = false,
stubs = {},
} = {}) {
- wrapper = shallowMount(SidebarAssignees, {
+ wrapper = shallowMountExtended(SidebarAssignees, {
data() {
return {
users,
@@ -56,10 +79,7 @@ describe('Alert Details Sidebar Assignees', () => {
mock.restore();
});
- const findAssigned = () => wrapper.find('[data-testid="assigned-users"]');
- const findUnassigned = () => wrapper.find('[data-testid="unassigned-users"]');
-
- describe('updating the alert status', () => {
+ describe('sidebar expanded', () => {
const mockUpdatedMutationResult = {
data: {
alertSetAssignees: {
@@ -73,30 +93,13 @@ describe('Alert Details Sidebar Assignees', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- const path = '/-/autocomplete/users.json';
- const users = [
- {
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- id: 1,
- name: 'User 1',
- username: 'root',
- },
- {
- avatar_url:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- id: 2,
- name: 'User 2',
- username: 'not-root',
- },
- ];
- mock.onGet(path).replyOnce(200, users);
+ mock.onGet(mockPath).replyOnce(200, mockUsers);
mountComponent({
data: { alert: mockAlert },
sidebarCollapsed: false,
loading: false,
- users,
+ users: mockUsers,
stubs: {
SidebarAssignee,
},
@@ -106,7 +109,11 @@ describe('Alert Details Sidebar Assignees', () => {
it('renders a unassigned option', async () => {
wrapper.setData({ isDropdownSearching: false });
await wrapper.vm.$nextTick();
- expect(wrapper.find(GlDropdownItem).text()).toBe('Unassigned');
+ expect(findDropdown().text()).toBe('Unassigned');
+ });
+
+ it('does not display the collapsed sidebar icon', () => {
+ expect(findSidebarIcon().exists()).toBe(false);
});
it('calls `$apollo.mutate` with `AlertSetAssignees` mutation and variables containing `iid`, `assigneeUsernames`, & `projectPath`', async () => {
@@ -170,4 +177,28 @@ describe('Alert Details Sidebar Assignees', () => {
expect(findAssigned().find('.dropdown-menu-user-username').text()).toBe('@root');
});
});
+
+ describe('sidebar collapsed', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ mock.onGet(mockPath).replyOnce(200, mockUsers);
+
+ mountComponent({
+ data: { alert: mockAlert },
+ loading: false,
+ users: mockUsers,
+ stubs: {
+ SidebarAssignee,
+ },
+ });
+ });
+ it('does not display the status dropdown', () => {
+ expect(findDropdown().exists()).toBe(false);
+ });
+
+ it('does display the collapsed sidebar icon', () => {
+ expect(findSidebarIcon().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
index 0014957517f..d5be5b623b8 100644
--- a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
@@ -1,5 +1,5 @@
import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import updateAlertStatusMutation from '~/graphql_shared/mutations/alert_status_update.mutation.graphql';
import AlertStatus from '~/vue_shared/alert_details/components/alert_status.vue';
import AlertSidebarStatus from '~/vue_shared/alert_details/components/sidebar/sidebar_status.vue';
@@ -10,12 +10,13 @@ const mockAlert = mockAlerts[0];
describe('Alert Details Sidebar Status', () => {
let wrapper;
- const findStatusDropdown = () => wrapper.find(GlDropdown);
- const findStatusDropdownItem = () => wrapper.find(GlDropdownItem);
- const findStatusLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findStatusDropdownHeader = () => wrapper.find('[data-testid="dropdown-header"]');
+ const findStatusDropdown = () => wrapper.findComponent(GlDropdown);
+ const findStatusDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+ const findStatusLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findStatusDropdownHeader = () => wrapper.findByTestId('dropdown-header');
const findAlertStatus = () => wrapper.findComponent(AlertStatus);
- const findStatus = () => wrapper.find('[data-testid="status"]');
+ const findStatus = () => wrapper.findByTestId('status');
+ const findSidebarIcon = () => wrapper.findByTestId('status-icon');
function mountComponent({
data,
@@ -24,7 +25,7 @@ describe('Alert Details Sidebar Status', () => {
stubs = {},
provide = {},
} = {}) {
- wrapper = mount(AlertSidebarStatus, {
+ wrapper = mountExtended(AlertSidebarStatus, {
propsData: {
alert: { ...mockAlert },
...data,
@@ -52,7 +53,7 @@ describe('Alert Details Sidebar Status', () => {
}
});
- describe('Alert Sidebar Dropdown Status', () => {
+ describe('sidebar expanded', () => {
beforeEach(() => {
mountComponent({
data: { alert: mockAlert },
@@ -69,6 +70,10 @@ describe('Alert Details Sidebar Status', () => {
expect(findStatusDropdownHeader().exists()).toBe(true);
});
+ it('does not display the collapsed sidebar icon', () => {
+ expect(findSidebarIcon().exists()).toBe(false);
+ });
+
describe('updating the alert status', () => {
const mockUpdatedMutationResult = {
data: {
@@ -109,22 +114,47 @@ describe('Alert Details Sidebar Status', () => {
expect(findStatusLoadingIcon().exists()).toBe(false);
expect(findStatus().text()).toBe('Triggered');
});
+
+ it('renders default translated statuses', () => {
+ mountComponent({ sidebarCollapsed: false });
+ expect(findAlertStatus().props('statuses')).toBe(PAGE_CONFIG.OPERATIONS.STATUSES);
+ expect(findStatus().text()).toBe('Triggered');
+ });
+
+ it('emits "alert-update" when the status has been updated', () => {
+ mountComponent({ sidebarCollapsed: false });
+ expect(wrapper.emitted('alert-update')).toBeUndefined();
+ findAlertStatus().vm.$emit('handle-updating');
+ expect(wrapper.emitted('alert-update')).toEqual([[]]);
+ });
+
+ it('renders translated statuses', () => {
+ const status = 'TEST';
+ const statuses = { [status]: 'Test' };
+ mountComponent({
+ data: { alert: { ...mockAlert, status } },
+ provide: { statuses },
+ sidebarCollapsed: false,
+ });
+ expect(findAlertStatus().props('statuses')).toBe(statuses);
+ expect(findStatus().text()).toBe(statuses.TEST);
+ });
});
});
- describe('Statuses', () => {
- it('renders default translated statuses', () => {
- mountComponent({});
- expect(findAlertStatus().props('statuses')).toBe(PAGE_CONFIG.OPERATIONS.STATUSES);
- expect(findStatus().text()).toBe('Triggered');
+ describe('sidebar collapsed', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { alert: mockAlert },
+ loading: false,
+ });
+ });
+ it('does not display the status dropdown', () => {
+ expect(findStatusDropdown().exists()).toBe(false);
});
- it('renders translated statuses', () => {
- const status = 'TEST';
- const statuses = { [status]: 'Test' };
- mountComponent({ data: { alert: { ...mockAlert, status } }, provide: { statuses } });
- expect(findAlertStatus().props('statuses')).toBe(statuses);
- expect(findStatus().text()).toBe(statuses.TEST);
+ it('does display the collapsed sidebar icon', () => {
+ expect(findSidebarIcon().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/alerts_deprecation_warning_spec.js b/spec/frontend/vue_shared/components/alerts_deprecation_warning_spec.js
new file mode 100644
index 00000000000..b73f4d6a396
--- /dev/null
+++ b/spec/frontend/vue_shared/components/alerts_deprecation_warning_spec.js
@@ -0,0 +1,48 @@
+import { GlAlert, GlLink } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import AlertDeprecationWarning from '~/vue_shared/components/alerts_deprecation_warning.vue';
+
+describe('AlertDetails', () => {
+ let wrapper;
+
+ function mountComponent(hasManagedPrometheus = false) {
+ wrapper = mount(AlertDeprecationWarning, {
+ provide: {
+ hasManagedPrometheus,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLink = () => wrapper.findComponent(GlLink);
+
+ describe('Alert details', () => {
+ describe('with no manual prometheus', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('renders nothing', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('with manual prometheus', () => {
+ beforeEach(() => {
+ mountComponent(true);
+ });
+
+ it('renders a deprecation notice', () => {
+ expect(findAlert().text()).toContain('GitLab-managed Prometheus is deprecated');
+ expect(findLink().attributes('href')).toContain(
+ 'operations/metrics/alerts.html#managed-prometheus-instances',
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/commit_spec.js b/spec/frontend/vue_shared/components/commit_spec.js
index 66ceebed489..6a31742141b 100644
--- a/spec/frontend/vue_shared/components/commit_spec.js
+++ b/spec/frontend/vue_shared/components/commit_spec.js
@@ -32,8 +32,8 @@ describe('Commit component', () => {
createComponent({
tag: false,
commitRef: {
- name: 'master',
- ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/main',
},
commitUrl:
'https://gitlab.com/gitlab-org/gitlab-foss/commit/b7836eddf62d663c665769e1b0960197fd215067',
@@ -55,8 +55,8 @@ describe('Commit component', () => {
props = {
tag: true,
commitRef: {
- name: 'master',
- ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/main',
},
commitUrl:
'https://gitlab.com/gitlab-org/gitlab-foss/commit/b7836eddf62d663c665769e1b0960197fd215067',
@@ -122,8 +122,8 @@ describe('Commit component', () => {
props = {
tag: false,
commitRef: {
- name: 'master',
- ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/main',
},
commitUrl:
'https://gitlab.com/gitlab-org/gitlab-foss/commit/b7836eddf62d663c665769e1b0960197fd215067',
@@ -145,8 +145,8 @@ describe('Commit component', () => {
props = {
tag: false,
commitRef: {
- name: 'master',
- ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/main',
},
commitUrl:
'https://gitlab.com/gitlab-org/gitlab-foss/commit/b7836eddf62d663c665769e1b0960197fd215067',
@@ -158,7 +158,7 @@ describe('Commit component', () => {
createComponent(props);
const refEl = wrapper.find('.ref-name');
- expect(refEl.text()).toContain('master');
+ expect(refEl.text()).toContain('main');
expect(refEl.attributes('href')).toBe(props.commitRef.ref_url);
@@ -173,8 +173,8 @@ describe('Commit component', () => {
props = {
tag: false,
commitRef: {
- name: 'master',
- ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/main',
},
commitUrl:
'https://gitlab.com/gitlab-org/gitlab-foss/commit/b7836eddf62d663c665769e1b0960197fd215067',
@@ -206,8 +206,8 @@ describe('Commit component', () => {
props = {
tag: false,
commitRef: {
- name: 'master',
- ref_url: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ ref_url: 'http://localhost/namespace2/gitlabhq/tree/main',
},
commitUrl:
'https://gitlab.com/gitlab-org/gitlab-foss/commit/b7836eddf62d663c665769e1b0960197fd215067',
@@ -232,8 +232,8 @@ describe('Commit component', () => {
it('should render path as href attribute', () => {
props = {
commitRef: {
- name: 'master',
- path: 'http://localhost/namespace2/gitlabhq/tree/master',
+ name: 'main',
+ path: 'http://localhost/namespace2/gitlabhq/tree/main',
},
};
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
index 9e96c154546..b2ed79cd75a 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
@@ -1,3 +1,6 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+
+import AccessorUtilities from '~/lib/utils/accessor';
import {
stripQuotes,
uniqueTokens,
@@ -5,6 +8,8 @@ import {
processFilters,
filterToQueryObject,
urlQueryToFilter,
+ getRecentlyUsedTokenValues,
+ setTokenValueToRecentlyUsed,
} from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
import {
@@ -14,6 +19,12 @@ import {
tokenValuePlain,
} from './mock_data';
+const mockStorageKey = 'recent-tokens';
+
+function setLocalStorageAvailability(isAvailable) {
+ jest.spyOn(AccessorUtilities, 'isLocalStorageAccessSafe').mockReturnValue(isAvailable);
+}
+
describe('Filtered Search Utils', () => {
describe('stripQuotes', () => {
it.each`
@@ -249,3 +260,79 @@ describe('urlQueryToFilter', () => {
expect(res).toEqual(result);
});
});
+
+describe('getRecentlyUsedTokenValues', () => {
+ useLocalStorageSpy();
+
+ beforeEach(() => {
+ localStorage.removeItem(mockStorageKey);
+ });
+
+ it('returns array containing recently used token values from provided recentTokenValuesStorageKey', () => {
+ setLocalStorageAvailability(true);
+
+ const mockExpectedArray = [{ foo: 'bar' }];
+ localStorage.setItem(mockStorageKey, JSON.stringify(mockExpectedArray));
+
+ expect(getRecentlyUsedTokenValues(mockStorageKey)).toEqual(mockExpectedArray);
+ });
+
+ it('returns empty array when provided recentTokenValuesStorageKey does not have anything in localStorage', () => {
+ setLocalStorageAvailability(true);
+
+ expect(getRecentlyUsedTokenValues(mockStorageKey)).toEqual([]);
+ });
+
+ it('returns empty array when when access to localStorage is not available', () => {
+ setLocalStorageAvailability(false);
+
+ expect(getRecentlyUsedTokenValues(mockStorageKey)).toEqual([]);
+ });
+});
+
+describe('setTokenValueToRecentlyUsed', () => {
+ const mockTokenValue1 = { foo: 'bar' };
+ const mockTokenValue2 = { bar: 'baz' };
+ useLocalStorageSpy();
+
+ beforeEach(() => {
+ localStorage.removeItem(mockStorageKey);
+ });
+
+ it('adds provided tokenValue to localStorage for recentTokenValuesStorageKey', () => {
+ setLocalStorageAvailability(true);
+
+ setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue1);
+
+ expect(JSON.parse(localStorage.getItem(mockStorageKey))).toEqual([mockTokenValue1]);
+ });
+
+ it('adds provided tokenValue to localStorage at the top of existing values (i.e. Stack order)', () => {
+ setLocalStorageAvailability(true);
+
+ setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue1);
+ setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue2);
+
+ expect(JSON.parse(localStorage.getItem(mockStorageKey))).toEqual([
+ mockTokenValue2,
+ mockTokenValue1,
+ ]);
+ });
+
+ it('ensures that provided tokenValue is not added twice', () => {
+ setLocalStorageAvailability(true);
+
+ setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue1);
+ setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue1);
+
+ expect(JSON.parse(localStorage.getItem(mockStorageKey))).toEqual([mockTokenValue1]);
+ });
+
+ it('does not add any value when acess to localStorage is not available', () => {
+ setLocalStorageAvailability(false);
+
+ setTokenValueToRecentlyUsed(mockStorageKey, mockTokenValue1);
+
+ expect(JSON.parse(localStorage.getItem(mockStorageKey))).toBeNull();
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index c24528ba4d2..23e4deab9c1 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -1,12 +1,15 @@
import { GlFilteredSearchToken } from '@gitlab/ui';
import { mockLabels } from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
import Api from '~/api';
+import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue';
+import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
+import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
export const mockAuthor1 = {
id: 1,
@@ -37,7 +40,7 @@ export const mockAuthor3 = {
export const mockAuthors = [mockAuthor1, mockAuthor2, mockAuthor3];
-export const mockBranches = [{ name: 'Master' }, { name: 'v1.x' }, { name: 'my-Branch' }];
+export const mockBranches = [{ name: 'Main' }, { name: 'v1.x' }, { name: 'my-Branch' }];
export const mockRegularMilestone = {
id: 1,
@@ -82,7 +85,7 @@ export const mockBranchToken = {
title: 'Source Branch',
unique: true,
token: BranchToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchBranches: Api.branches.bind(Api),
};
@@ -93,11 +96,20 @@ export const mockAuthorToken = {
unique: false,
symbol: '@',
token: AuthorToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchPath: 'gitlab-org/gitlab-test',
fetchAuthors: Api.projectUsers.bind(Api),
};
+export const mockIterationToken = {
+ type: 'iteration',
+ icon: 'iteration',
+ title: 'Iteration',
+ unique: true,
+ token: IterationToken,
+ fetchIterations: () => Promise.resolve(),
+};
+
export const mockLabelToken = {
type: 'label_name',
icon: 'labels',
@@ -105,7 +117,7 @@ export const mockLabelToken = {
unique: false,
symbol: '~',
token: LabelToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchLabels: () => Promise.resolve(mockLabels),
};
@@ -116,7 +128,7 @@ export const mockMilestoneToken = {
unique: true,
symbol: '%',
token: MilestoneToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchMilestones: () => Promise.resolve({ data: mockMilestones }),
};
@@ -127,9 +139,9 @@ export const mockEpicToken = {
unique: true,
symbol: '&',
token: EpicToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
+ idProperty: 'iid',
fetchEpics: () => Promise.resolve({ data: mockEpics }),
- fetchSingleEpic: () => Promise.resolve({ data: mockEpics[0] }),
};
export const mockReactionEmojiToken = {
@@ -138,7 +150,7 @@ export const mockReactionEmojiToken = {
title: 'My-Reaction',
unique: true,
token: EmojiToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchEmojis: () => Promise.resolve(mockEmojis),
};
@@ -148,13 +160,21 @@ export const mockMembershipToken = {
title: 'Membership',
token: GlFilteredSearchToken,
unique: true,
- operators: [{ value: '=', description: 'is' }],
+ operators: OPERATOR_IS_ONLY,
options: [
{ value: 'exclude', title: 'Direct' },
{ value: 'only', title: 'Inherited' },
],
};
+export const mockWeightToken = {
+ type: 'weight',
+ icon: 'weight',
+ title: 'Weight',
+ unique: true,
+ token: WeightToken,
+};
+
export const mockMembershipTokenOptionsWithoutTitles = {
...mockMembershipToken,
options: [{ value: 'exclude' }, { value: 'only' }],
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 765e576914c..3b50927dcc6 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -11,8 +11,8 @@ import { deprecatedCreateFlash as createFlash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import {
- DEFAULT_LABEL_NONE,
DEFAULT_LABEL_ANY,
+ DEFAULT_NONE_ANY,
} from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
@@ -159,7 +159,7 @@ describe('AuthorToken', () => {
});
it('renders provided defaultAuthors as suggestions', async () => {
- const defaultAuthors = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
+ const defaultAuthors = DEFAULT_NONE_ANY;
wrapper = createComponent({
active: true,
config: { ...mockAuthorToken, defaultAuthors },
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
new file mode 100644
index 00000000000..0db47f1f189
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/base_token_spec.js
@@ -0,0 +1,228 @@
+import { GlFilteredSearchToken } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import {
+ mockRegularLabel,
+ mockLabels,
+} from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
+
+import { DEFAULT_LABELS } from '~/vue_shared/components/filtered_search_bar/constants';
+import {
+ getRecentlyUsedTokenValues,
+ setTokenValueToRecentlyUsed,
+} from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
+import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
+
+import { mockLabelToken } from '../mock_data';
+
+jest.mock('~/vue_shared/components/filtered_search_bar/filtered_search_utils');
+
+const mockStorageKey = 'recent-tokens-label_name';
+
+const defaultStubs = {
+ Portal: true,
+ GlFilteredSearchToken: {
+ template: `
+ <div>
+ <slot name="view-token"></slot>
+ <slot name="view"></slot>
+ </div>
+ `,
+ },
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+};
+
+const defaultSlots = {
+ 'view-token': `
+ <div class="js-view-token">${mockRegularLabel.title}</div>
+ `,
+ view: `
+ <div class="js-view">${mockRegularLabel.title}</div>
+ `,
+};
+
+const mockProps = {
+ tokenConfig: mockLabelToken,
+ tokenValue: { data: '' },
+ tokenActive: false,
+ tokensListLoading: false,
+ tokenValues: [],
+ fnActiveTokenValue: jest.fn(),
+ defaultTokenValues: DEFAULT_LABELS,
+ recentTokenValuesStorageKey: mockStorageKey,
+ fnCurrentTokenValue: jest.fn(),
+};
+
+function createComponent({
+ props = { ...mockProps },
+ stubs = defaultStubs,
+ slots = defaultSlots,
+} = {}) {
+ return mount(BaseToken, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: jest.fn(),
+ suggestionsListClass: 'custom-class',
+ },
+ stubs,
+ slots,
+ });
+}
+
+describe('BaseToken', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: {
+ ...mockProps,
+ tokenValue: { data: `"${mockRegularLabel.title}"` },
+ tokenValues: mockLabels,
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('data', () => {
+ it('calls `getRecentlyUsedTokenValues` to populate `recentTokenValues` when `recentTokenValuesStorageKey` is defined', () => {
+ expect(getRecentlyUsedTokenValues).toHaveBeenCalledWith(mockStorageKey);
+ });
+ });
+
+ describe('computed', () => {
+ describe('currentTokenValue', () => {
+ it('calls `fnCurrentTokenValue` when it is provided', () => {
+ // We're disabling lint to trigger computed prop execution for this test.
+ // eslint-disable-next-line no-unused-vars
+ const { currentTokenValue } = wrapper.vm;
+
+ expect(wrapper.vm.fnCurrentTokenValue).toHaveBeenCalledWith(`"${mockRegularLabel.title}"`);
+ });
+ });
+
+ describe('activeTokenValue', () => {
+ it('calls `fnActiveTokenValue` when it is provided', async () => {
+ wrapper.setProps({
+ fnCurrentTokenValue: undefined,
+ });
+
+ await wrapper.vm.$nextTick();
+
+ // We're disabling lint to trigger computed prop execution for this test.
+ // eslint-disable-next-line no-unused-vars
+ const { activeTokenValue } = wrapper.vm;
+
+ expect(wrapper.vm.fnActiveTokenValue).toHaveBeenCalledWith(
+ mockLabels,
+ `"${mockRegularLabel.title.toLowerCase()}"`,
+ );
+ });
+ });
+ });
+
+ describe('watch', () => {
+ describe('tokenActive', () => {
+ let wrapperWithTokenActive;
+
+ beforeEach(() => {
+ wrapperWithTokenActive = createComponent({
+ props: {
+ ...mockProps,
+ tokenActive: true,
+ tokenValue: { data: `"${mockRegularLabel.title}"` },
+ },
+ });
+ });
+
+ afterEach(() => {
+ wrapperWithTokenActive.destroy();
+ });
+
+ it('emits `fetch-token-values` event on the component when value of this prop is changed to false and `tokenValues` array is empty', async () => {
+ wrapperWithTokenActive.setProps({
+ tokenActive: false,
+ });
+
+ await wrapperWithTokenActive.vm.$nextTick();
+
+ expect(wrapperWithTokenActive.emitted('fetch-token-values')).toBeTruthy();
+ expect(wrapperWithTokenActive.emitted('fetch-token-values')).toEqual([
+ [`"${mockRegularLabel.title}"`],
+ ]);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('handleTokenValueSelected', () => {
+ it('calls `setTokenValueToRecentlyUsed` when `recentTokenValuesStorageKey` is defined', () => {
+ const mockTokenValue = {
+ id: 1,
+ title: 'Foo',
+ };
+
+ wrapper.vm.handleTokenValueSelected(mockTokenValue);
+
+ expect(setTokenValueToRecentlyUsed).toHaveBeenCalledWith(mockStorageKey, mockTokenValue);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('renders gl-filtered-search-token component', () => {
+ const wrapperWithNoStubs = createComponent({
+ stubs: {},
+ });
+ const glFilteredSearchToken = wrapperWithNoStubs.find(GlFilteredSearchToken);
+
+ expect(glFilteredSearchToken.exists()).toBe(true);
+ expect(glFilteredSearchToken.props('config')).toBe(mockLabelToken);
+
+ wrapperWithNoStubs.destroy();
+ });
+
+ it('renders `view-token` slot when present', () => {
+ expect(wrapper.find('.js-view-token').exists()).toBe(true);
+ });
+
+ it('renders `view` slot when present', () => {
+ expect(wrapper.find('.js-view').exists()).toBe(true);
+ });
+
+ describe('events', () => {
+ let wrapperWithNoStubs;
+
+ beforeEach(() => {
+ wrapperWithNoStubs = createComponent({
+ stubs: { Portal: true },
+ });
+ });
+
+ afterEach(() => {
+ wrapperWithNoStubs.destroy();
+ });
+
+ it('emits `fetch-token-values` event on component after a delay when component emits `input` event', async () => {
+ jest.useFakeTimers();
+
+ wrapperWithNoStubs.find(GlFilteredSearchToken).vm.$emit('input', { data: 'foo' });
+ await wrapperWithNoStubs.vm.$nextTick();
+
+ jest.runAllTimers();
+
+ expect(wrapperWithNoStubs.emitted('fetch-token-values')).toBeTruthy();
+ expect(wrapperWithNoStubs.emitted('fetch-token-values')[1]).toEqual(['foo']);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
index a20bc4986fc..331c9c2c14d 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
@@ -10,10 +10,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import {
- DEFAULT_LABEL_NONE,
- DEFAULT_LABEL_ANY,
-} from '~/vue_shared/components/filtered_search_bar/constants';
+import { DEFAULT_NONE_ANY } from '~/vue_shared/components/filtered_search_bar/constants';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
import { mockBranches, mockBranchToken } from '../mock_data';
@@ -77,7 +74,7 @@ describe('BranchToken', () => {
describe('currentValue', () => {
it('returns lowercase string for `value.data`', () => {
- expect(wrapper.vm.currentValue).toBe('master');
+ expect(wrapper.vm.currentValue).toBe('main');
});
});
@@ -137,7 +134,7 @@ describe('BranchToken', () => {
});
describe('template', () => {
- const defaultBranches = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
+ const defaultBranches = DEFAULT_NONE_ANY;
async function showSuggestions() {
const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
index 231f2f01428..fb48aea8e4f 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -13,6 +13,7 @@ import axios from '~/lib/utils/axios_utils';
import {
DEFAULT_LABEL_NONE,
DEFAULT_LABEL_ANY,
+ DEFAULT_NONE_ANY,
} from '~/vue_shared/components/filtered_search_bar/constants';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
@@ -137,7 +138,7 @@ describe('EmojiToken', () => {
});
describe('template', () => {
- const defaultEmojis = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
+ const defaultEmojis = DEFAULT_NONE_ANY;
beforeEach(async () => {
wrapper = createComponent({
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
index 0c3f9e1363f..addc058f658 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
@@ -68,21 +68,6 @@ describe('EpicToken', () => {
await wrapper.vm.$nextTick();
});
- describe('currentValue', () => {
- it.each`
- data | id
- ${`${mockEpics[0].title}::&${mockEpics[0].iid}`} | ${mockEpics[0].iid}
- ${mockEpics[0].iid} | ${mockEpics[0].iid}
- ${'foobar'} | ${'foobar'}
- `('$data returns $id', async ({ data, id }) => {
- wrapper.setProps({ value: { data } });
-
- await wrapper.vm.$nextTick();
-
- expect(wrapper.vm.currentValue).toBe(id);
- });
- });
-
describe('activeEpic', () => {
it('returns object for currently present `value.data`', async () => {
wrapper.setProps({
@@ -140,20 +125,6 @@ describe('EpicToken', () => {
expect(wrapper.vm.loading).toBe(false);
});
});
-
- describe('fetchSingleEpic', () => {
- it('calls `config.fetchSingleEpic` with provided iid param', async () => {
- jest.spyOn(wrapper.vm.config, 'fetchSingleEpic');
-
- wrapper.vm.fetchSingleEpic(1);
-
- expect(wrapper.vm.config.fetchSingleEpic).toHaveBeenCalledWith(1);
-
- await waitForPromises();
-
- expect(wrapper.vm.epics).toEqual([mockEpics[0]]);
- });
- });
});
describe('template', () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
new file mode 100644
index 00000000000..ca5dc984ae0
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/iteration_token_spec.js
@@ -0,0 +1,78 @@
+import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import createFlash from '~/flash';
+import IterationToken from '~/vue_shared/components/filtered_search_bar/tokens/iteration_token.vue';
+import { mockIterationToken } from '../mock_data';
+
+jest.mock('~/flash');
+
+describe('IterationToken', () => {
+ const title = 'gitlab-org: #1';
+ let wrapper;
+
+ const createComponent = ({ config = mockIterationToken, value = { data: '' } } = {}) =>
+ mount(IterationToken, {
+ propsData: {
+ config,
+ value,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: 'custom-class',
+ },
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders iteration value', async () => {
+ wrapper = createComponent({ value: { data: title } });
+
+ await wrapper.vm.$nextTick();
+
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // `Iteration` `=` `gitlab-org: #1`
+ expect(tokenSegments.at(2).text()).toBe(title);
+ });
+
+ it('fetches initial values', () => {
+ const fetchIterationsSpy = jest.fn().mockResolvedValue();
+
+ wrapper = createComponent({
+ config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
+ value: { data: title },
+ });
+
+ expect(fetchIterationsSpy).toHaveBeenCalledWith(title);
+ });
+
+ it('fetches iterations on user input', () => {
+ const search = 'hello';
+ const fetchIterationsSpy = jest.fn().mockResolvedValue();
+
+ wrapper = createComponent({
+ config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
+ });
+
+ wrapper.findComponent(GlFilteredSearchToken).vm.$emit('input', { data: search });
+
+ expect(fetchIterationsSpy).toHaveBeenCalledWith(search);
+ });
+
+ it('renders error message when request fails', async () => {
+ const fetchIterationsSpy = jest.fn().mockRejectedValue();
+
+ wrapper = createComponent({
+ config: { ...mockIterationToken, fetchIterations: fetchIterationsSpy },
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'There was a problem fetching iterations.',
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index 8528c062426..57514a0c499 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -16,8 +16,7 @@ import axios from '~/lib/utils/axios_utils';
import {
DEFAULT_LABELS,
- DEFAULT_LABEL_NONE,
- DEFAULT_LABEL_ANY,
+ DEFAULT_NONE_ANY,
} from '~/vue_shared/components/filtered_search_bar/constants';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
@@ -176,7 +175,7 @@ describe('LabelToken', () => {
});
describe('template', () => {
- const defaultLabels = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
+ const defaultLabels = DEFAULT_NONE_ANY;
beforeEach(async () => {
wrapper = createComponent({ value: { data: `"${mockRegularLabel.title}"` } });
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
new file mode 100644
index 00000000000..9a72be636cd
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/weight_token_spec.js
@@ -0,0 +1,37 @@
+import { GlFilteredSearchTokenSegment } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import WeightToken from '~/vue_shared/components/filtered_search_bar/tokens/weight_token.vue';
+import { mockWeightToken } from '../mock_data';
+
+jest.mock('~/flash');
+
+describe('WeightToken', () => {
+ const weight = '3';
+ let wrapper;
+
+ const createComponent = ({ config = mockWeightToken, value = { data: '' } } = {}) =>
+ mount(WeightToken, {
+ propsData: {
+ config,
+ value,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: 'custom-class',
+ },
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders weight value', () => {
+ wrapper = createComponent({ value: { data: weight } });
+
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // `Weight` `=` `3`
+ expect(tokenSegments.at(2).text()).toBe(weight);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
index 99bf0d84d0c..8738924f717 100644
--- a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
@@ -132,6 +132,35 @@ describe('RelatedIssuableItem', () => {
it('renders due date component with correct due date', () => {
expect(wrapper.find(IssueDueDate).props('date')).toBe(props.dueDate);
});
+
+ it('does not render red icon for overdue issue that is closed', async () => {
+ mountComponent({
+ props: {
+ ...props,
+ closedAt: '2018-12-01T00:00:00.00Z',
+ },
+ });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(IssueDueDate).props('closed')).toBe(true);
+ });
+
+ it('should not contain the `.text-danger` css class for overdue issue that is closed', async () => {
+ mountComponent({
+ props: {
+ ...props,
+ closedAt: '2018-12-01T00:00:00.00Z',
+ },
+ });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(IssueDueDate).find('.board-card-info-icon').classes('text-danger')).toBe(
+ false,
+ );
+ expect(wrapper.find(IssueDueDate).find('.board-card-info-text').classes('text-danger')).toBe(
+ false,
+ );
+ });
});
describe('token assignees', () => {
diff --git a/spec/frontend/vue_shared/components/keep_alive_slots_spec.js b/spec/frontend/vue_shared/components/keep_alive_slots_spec.js
new file mode 100644
index 00000000000..10c6cbe6d94
--- /dev/null
+++ b/spec/frontend/vue_shared/components/keep_alive_slots_spec.js
@@ -0,0 +1,122 @@
+import { nextTick } from 'vue';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import KeepAliveSlots from '~/vue_shared/components/keep_alive_slots.vue';
+
+const SLOT_1 = {
+ slotKey: 'slot-1',
+ title: 'Hello 1',
+};
+const SLOT_2 = {
+ slotKey: 'slot-2',
+ title: 'Hello 2',
+};
+
+describe('~/vue_shared/components/keep_alive_slots.vue', () => {
+ let wrapper;
+
+ const createSlotContent = ({ slotKey, title }) => `
+ <div data-testid="slot-child" data-slot-id="${slotKey}">
+ <h1>${title}</h1>
+ <input type="text" />
+ </div>
+ `;
+ const createComponent = (props = {}) => {
+ wrapper = mountExtended(KeepAliveSlots, {
+ propsData: props,
+ slots: {
+ [SLOT_1.slotKey]: createSlotContent(SLOT_1),
+ [SLOT_2.slotKey]: createSlotContent(SLOT_2),
+ },
+ });
+ };
+
+ const findRenderedSlots = () =>
+ wrapper.findAllByTestId('slot-child').wrappers.map((x) => ({
+ title: x.find('h1').text(),
+ inputValue: x.find('input').element.value,
+ isVisible: x.isVisible(),
+ }));
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('doesnt show anything', () => {
+ expect(findRenderedSlots()).toEqual([]);
+ });
+
+ describe('when slotKey is changed', () => {
+ beforeEach(async () => {
+ wrapper.setProps({ slotKey: SLOT_1.slotKey });
+ await nextTick();
+ });
+
+ it('shows slot', () => {
+ expect(findRenderedSlots()).toEqual([
+ {
+ title: SLOT_1.title,
+ isVisible: true,
+ inputValue: '',
+ },
+ ]);
+ });
+
+ it('hides everything when slotKey cannot be found', async () => {
+ wrapper.setProps({ slotKey: '' });
+ await nextTick();
+
+ expect(findRenderedSlots()).toEqual([
+ {
+ title: SLOT_1.title,
+ isVisible: false,
+ inputValue: '',
+ },
+ ]);
+ });
+
+ describe('when user intreracts then slotKey changes again', () => {
+ beforeEach(async () => {
+ wrapper.find('input').setValue('TEST');
+ wrapper.setProps({ slotKey: SLOT_2.slotKey });
+ await nextTick();
+ });
+
+ it('keeps first slot alive but hidden', () => {
+ expect(findRenderedSlots()).toEqual([
+ {
+ title: SLOT_1.title,
+ isVisible: false,
+ inputValue: 'TEST',
+ },
+ {
+ title: SLOT_2.title,
+ isVisible: true,
+ inputValue: '',
+ },
+ ]);
+ });
+ });
+ });
+ });
+
+ describe('initialized with slotKey', () => {
+ beforeEach(() => {
+ createComponent({ slotKey: SLOT_2.slotKey });
+ });
+
+ it('shows slot', () => {
+ expect(findRenderedSlots()).toEqual([
+ {
+ title: SLOT_2.title,
+ isVisible: true,
+ inputValue: '',
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
index c454166e30b..3b49536799c 100644
--- a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
+++ b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
@@ -6,7 +6,7 @@ exports[`Suggestion Diff component matches snapshot 1`] = `
>
<suggestion-diff-header-stub
batchsuggestionscount="1"
- class="qa-suggestion-diff-header js-suggestion-diff-header"
+ class="js-suggestion-diff-header"
defaultcommitmessage="Apply suggestion"
helppagepath="path_to_docs"
isapplyingbatch="true"
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 077c2174571..fec6abc9639 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -48,6 +48,7 @@ describe('Markdown field header component', () => {
'Add a bullet list',
'Add a numbered list',
'Add a task list',
+ 'Add a collapsible section',
'Add a table',
'Go full screen',
];
@@ -133,6 +134,14 @@ describe('Markdown field header component', () => {
);
});
+ it('renders collapsible section template', () => {
+ const detailsBlockButton = findToolbarButtonByProp('icon', 'details-block');
+
+ expect(detailsBlockButton.props('tag')).toEqual(
+ '<details><summary>Click to expand</summary>\n{text}\n</details>',
+ );
+ });
+
it('does not render suggestion button if `canSuggest` is set to false', () => {
createWrapper({
canSuggest: false,
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index 74e9cbcbb53..acf97713885 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -1,6 +1,7 @@
import { GlAlert, GlBadge, GlPagination, GlTabs, GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import Tracking from '~/tracking';
+import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import PageWrapper from '~/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue';
@@ -291,7 +292,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: AuthorToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchPath: '/link',
fetchAuthors: expect.any(Function),
},
@@ -302,7 +303,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: AuthorToken,
- operators: [{ value: '=', description: 'is', default: 'true' }],
+ operators: OPERATOR_IS_ONLY,
fetchPath: '/link',
fetchAuthors: expect.any(Function),
},
diff --git a/spec/frontend/vue_shared/components/registry/list_item_spec.js b/spec/frontend/vue_shared/components/registry/list_item_spec.js
index 33c9c808dc3..ca4bf0b0652 100644
--- a/spec/frontend/vue_shared/components/registry/list_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/list_item_spec.js
@@ -101,16 +101,16 @@ describe('list item', () => {
});
describe('disabled prop', () => {
- it('when true applies disabled-content class', () => {
+ it('when true applies gl-opacity-5 class', () => {
mountComponent({ disabled: true });
- expect(wrapper.classes('disabled-content')).toBe(true);
+ expect(wrapper.classes('gl-opacity-5')).toBe(true);
});
- it('when false does not apply disabled-content class', () => {
+ it('when false does not apply gl-opacity-5 class', () => {
mountComponent({ disabled: false });
- expect(wrapper.classes('disabled-content')).toBe(false);
+ expect(wrapper.classes('gl-opacity-5')).toBe(false);
});
});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
index 4033c943b82..32ef2d27ba7 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
@@ -1,4 +1,5 @@
-import { GlAlert, GlLoadingIcon, GlSkeletonLoader } from '@gitlab/ui';
+import { GlAlert, GlButton, GlLoadingIcon, GlSkeletonLoader } from '@gitlab/ui';
+import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -18,6 +19,24 @@ import {
const localVue = createLocalVue();
localVue.use(VueApollo);
+let resizeCallback;
+const MockResizeObserver = {
+ bind(el, { value }) {
+ resizeCallback = value;
+ },
+ mockResize(size) {
+ bp.getBreakpointSize.mockReturnValue(size);
+ resizeCallback();
+ },
+ unbind() {
+ resizeCallback = null;
+ },
+};
+
+localVue.directive('gl-resize-observer', MockResizeObserver);
+
+jest.mock('@gitlab/ui/dist/utils');
+
describe('RunnerInstructionsModal component', () => {
let wrapper;
let fakeApollo;
@@ -27,7 +46,8 @@ describe('RunnerInstructionsModal component', () => {
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAlert = () => wrapper.findComponent(GlAlert);
- const findPlatformButtons = () => wrapper.findAllByTestId('platform-button');
+ const findPlatformButtonGroup = () => wrapper.findByTestId('platform-buttons');
+ const findPlatformButtons = () => findPlatformButtonGroup().findAllComponents(GlButton);
const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
const findRegisterCommand = () => wrapper.findByTestId('register-command');
@@ -141,6 +161,22 @@ describe('RunnerInstructionsModal component', () => {
});
});
+ describe('when the modal resizes', () => {
+ it('to an xs viewport', async () => {
+ MockResizeObserver.mockResize('xs');
+ await nextTick();
+
+ expect(findPlatformButtonGroup().attributes('vertical')).toBeTruthy();
+ });
+
+ it('to a non-xs viewport', async () => {
+ MockResizeObserver.mockResize('sm');
+ await nextTick();
+
+ expect(findPlatformButtonGroup().props('vertical')).toBeFalsy();
+ });
+ });
+
describe('when apollo is loading', () => {
it('should show a skeleton loader', async () => {
createComponent();
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
index 1175d183c6c..88557917cb5 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
@@ -1,8 +1,8 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_vue/constants';
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue';
-
import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
import { mockConfig } from './mock_data';
@@ -50,13 +50,20 @@ describe('DropdownContent', () => {
describe('template', () => {
it('renders component container element with class `labels-select-dropdown-contents` and no styles', () => {
expect(wrapper.attributes('class')).toContain('labels-select-dropdown-contents');
- expect(wrapper.attributes('style')).toBe(undefined);
+ expect(wrapper.attributes('style')).toBeUndefined();
});
- it('renders component container element with styles when `renderOnTop` is true', () => {
- wrapper = createComponent(mockConfig, { renderOnTop: true });
+ describe('when `renderOnTop` is true', () => {
+ it.each`
+ variant | expected
+ ${DropdownVariant.Sidebar} | ${'bottom: 3rem'}
+ ${DropdownVariant.Standalone} | ${'bottom: 2rem'}
+ ${DropdownVariant.Embedded} | ${'bottom: 2rem'}
+ `('renders upward for $variant variant', ({ variant, expected }) => {
+ wrapper = createComponent({ ...mockConfig, variant }, { renderOnTop: true });
- expect(wrapper.attributes('style')).toContain('bottom: 100%');
+ expect(wrapper.attributes('style')).toContain(expected);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
index 4cf36df2502..3f00eab17b7 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -3,6 +3,7 @@ import Vuex from 'vuex';
import { isInViewport } from '~/lib/utils/common_utils';
import DropdownValueCollapsed from '~/vue_shared/components/sidebar/labels_select/dropdown_value_collapsed.vue';
+import { DropdownVariant } from '~/vue_shared/components/sidebar/labels_select_vue/constants';
import DropdownButton from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_button.vue';
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue';
import DropdownTitle from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue';
@@ -190,40 +191,33 @@ describe('LabelsSelectRoot', () => {
});
describe('sets content direction based on viewport', () => {
- it('does not set direction when `state.variant` is not "embedded"', async () => {
- createComponent();
-
- wrapper.vm.$store.dispatch('toggleDropdownContents');
- wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
- await wrapper.vm.$nextTick;
-
- expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(false);
- });
-
- describe('when `state.variant` is "embedded"', () => {
- beforeEach(() => {
- createComponent({ ...mockConfig, variant: 'embedded' });
- wrapper.vm.$store.dispatch('toggleDropdownContents');
- });
+ describe.each(Object.values(DropdownVariant))(
+ 'when labels variant is "%s"',
+ ({ variant }) => {
+ beforeEach(() => {
+ createComponent({ ...mockConfig, variant });
+ wrapper.vm.$store.dispatch('toggleDropdownContents');
+ });
- it('set direction when out of viewport', () => {
- isInViewport.mockImplementation(() => false);
- wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
+ it('set direction when out of viewport', () => {
+ isInViewport.mockImplementation(() => false);
+ wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(true);
+ });
});
- });
- it('does not set direction when inside of viewport', () => {
- isInViewport.mockImplementation(() => true);
- wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
+ it('does not set direction when inside of viewport', () => {
+ isInViewport.mockImplementation(() => true);
+ wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(false);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(false);
+ });
});
- });
- });
+ },
+ );
});
});
diff --git a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index 691e19473c1..28c5acc8110 100644
--- a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
@@ -1,28 +1,36 @@
import { shallowMount } from '@vue/test-utils';
+import timezoneMock from 'timezone-mock';
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
describe('Time ago with tooltip component', () => {
let vm;
- const buildVm = (propsData = {}, scopedSlots = {}) => {
+ const timestamp = '2017-05-08T14:57:39.781Z';
+ const timeAgoTimestamp = getTimeago().format(timestamp);
+
+ const defaultProps = {
+ time: timestamp,
+ };
+
+ const buildVm = (props = {}, scopedSlots = {}) => {
vm = shallowMount(TimeAgoTooltip, {
- propsData,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
scopedSlots,
});
};
- const timestamp = '2017-05-08T14:57:39.781Z';
- const timeAgoTimestamp = getTimeago().format(timestamp);
afterEach(() => {
vm.destroy();
+ timezoneMock.unregister();
});
it('should render timeago with a bootstrap tooltip', () => {
- buildVm({
- time: timestamp,
- });
+ buildVm();
expect(vm.attributes('title')).toEqual(formatDate(timestamp));
expect(vm.text()).toEqual(timeAgoTimestamp);
@@ -30,7 +38,6 @@ describe('Time ago with tooltip component', () => {
it('should render provided html class', () => {
buildVm({
- time: timestamp,
cssClass: 'foo',
});
@@ -38,14 +45,58 @@ describe('Time ago with tooltip component', () => {
});
it('should render with the datetime attribute', () => {
- buildVm({ time: timestamp });
+ buildVm();
expect(vm.attributes('datetime')).toEqual(timestamp);
});
it('should render provided scope content with the correct timeAgo string', () => {
- buildVm({ time: timestamp }, { default: `<span>The time is {{ props.timeAgo }}</span>` });
+ buildVm(null, { default: `<span>The time is {{ props.timeAgo }}</span>` });
expect(vm.text()).toEqual(`The time is ${timeAgoTimestamp}`);
});
+
+ describe('number based timestamps', () => {
+ // Store a date object before we mock the TZ
+ const date = new Date();
+
+ describe('with default TZ', () => {
+ beforeEach(() => {
+ buildVm({ time: date.getTime() });
+ });
+
+ it('handled correctly', () => {
+ expect(vm.text()).toEqual(getTimeago().format(date.getTime()));
+ });
+ });
+
+ describe.each`
+ timezone | offset
+ ${'US/Pacific'} | ${420}
+ ${'US/Eastern'} | ${240}
+ ${'Brazil/East'} | ${180}
+ ${'UTC'} | ${-0}
+ ${'Europe/London'} | ${-60}
+ `('with different client vs server TZ', ({ timezone, offset }) => {
+ let tzDate;
+
+ beforeEach(() => {
+ timezoneMock.register(timezone);
+ // Date object with mocked TZ
+ tzDate = new Date();
+ buildVm({ time: date.getTime() });
+ });
+
+ it('the date object should have correct timezones', () => {
+ expect(tzDate.getTimezoneOffset()).toBe(offset);
+ });
+
+ it('timeago should handled the date correctly', () => {
+ // getTime() should always handle the TZ, which allows for us to validate the date objects represent
+ // the same date and time regardless of the TZ.
+ expect(vm.text()).toEqual(getTimeago().format(date.getTime()));
+ expect(vm.text()).toEqual(getTimeago().format(tzDate.getTime()));
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
new file mode 100644
index 00000000000..5a609568220
--- /dev/null
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -0,0 +1,311 @@
+import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { cloneDeep } from 'lodash';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphql';
+import { ASSIGNEES_DEBOUNCE_DELAY } from '~/sidebar/constants';
+import getIssueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
+import UserSelect from '~/vue_shared/components/user_select/user_select.vue';
+import {
+ searchResponse,
+ projectMembersResponse,
+ participantsQueryResponse,
+} from '../../sidebar/mock_data';
+
+const assignee = {
+ id: 'gid://gitlab/User/4',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Developer',
+ username: 'dev',
+ webUrl: '/dev',
+ status: null,
+};
+
+const mockError = jest.fn().mockRejectedValue('Error!');
+
+const waitForSearch = async () => {
+ jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ await nextTick();
+ await waitForPromises();
+};
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('User select dropdown', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findSearchField = () => wrapper.findComponent(GlSearchBoxByType);
+ const findParticipantsLoading = () => wrapper.find('[data-testid="loading-participants"]');
+ const findSelectedParticipants = () => wrapper.findAll('[data-testid="selected-participant"]');
+ const findUnselectedParticipants = () =>
+ wrapper.findAll('[data-testid="unselected-participant"]');
+ const findCurrentUser = () => wrapper.findAll('[data-testid="current-user"]');
+ const findUnassignLink = () => wrapper.find('[data-testid="unassign"]');
+ const findEmptySearchResults = () => wrapper.find('[data-testid="empty-results"]');
+
+ const createComponent = ({
+ props = {},
+ searchQueryHandler = jest.fn().mockResolvedValue(projectMembersResponse),
+ participantsQueryHandler = jest.fn().mockResolvedValue(participantsQueryResponse),
+ } = {}) => {
+ fakeApollo = createMockApollo([
+ [searchUsersQuery, searchQueryHandler],
+ [getIssueParticipantsQuery, participantsQueryHandler],
+ ]);
+ wrapper = shallowMount(UserSelect, {
+ localVue,
+ apolloProvider: fakeApollo,
+ propsData: {
+ headerText: 'test',
+ text: 'test-text',
+ fullPath: '/project',
+ iid: '1',
+ value: [],
+ currentUser: {
+ username: 'random',
+ name: 'Mr. Random',
+ },
+ allowMultipleAssignees: false,
+ ...props,
+ },
+ stubs: {
+ GlDropdown,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('renders a loading spinner if participants are loading', () => {
+ createComponent();
+
+ expect(findParticipantsLoading().exists()).toBe(true);
+ });
+
+ it('emits an `error` event if participants query was rejected', async () => {
+ createComponent({ participantsQueryHandler: mockError });
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[], []]);
+ });
+
+ it('emits an `error` event if search query was rejected', async () => {
+ createComponent({ searchQueryHandler: mockError });
+ await waitForSearch();
+
+ expect(wrapper.emitted('error')).toEqual([[], []]);
+ });
+
+ it('renders current user if they are not in participants or assignees', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findCurrentUser().exists()).toBe(true);
+ });
+
+ it('displays correct amount of selected users', async () => {
+ createComponent({
+ props: {
+ value: [assignee],
+ },
+ });
+ await waitForPromises();
+
+ expect(findSelectedParticipants()).toHaveLength(1);
+ });
+
+ describe('when search is empty', () => {
+ it('renders a merged list of participants and project members', async () => {
+ createComponent();
+ await waitForPromises();
+ expect(findUnselectedParticipants()).toHaveLength(3);
+ });
+
+ it('renders `Unassigned` link with the checkmark when there are no selected users', async () => {
+ createComponent();
+ await waitForPromises();
+ expect(findUnassignLink().props('isChecked')).toBe(true);
+ });
+
+ it('renders `Unassigned` link without the checkmark when there are selected users', async () => {
+ createComponent({
+ props: {
+ value: [assignee],
+ },
+ });
+ await waitForPromises();
+ expect(findUnassignLink().props('isChecked')).toBe(false);
+ });
+
+ it('emits an input event with empty array after clicking on `Unassigned`', async () => {
+ createComponent({
+ props: {
+ value: [assignee],
+ },
+ });
+ await waitForPromises();
+ findUnassignLink().vm.$emit('click');
+
+ expect(wrapper.emitted('input')).toEqual([[[]]]);
+ });
+
+ it('emits an empty array after unselecting the only selected assignee', async () => {
+ createComponent({
+ props: {
+ value: [assignee],
+ },
+ });
+ await waitForPromises();
+
+ findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+ expect(wrapper.emitted('input')).toEqual([[[]]]);
+ });
+
+ it('allows only one user to be selected if `allowMultipleAssignees` is false', async () => {
+ createComponent({
+ props: {
+ value: [assignee],
+ },
+ });
+ await waitForPromises();
+
+ findUnselectedParticipants().at(0).vm.$emit('click');
+ expect(wrapper.emitted('input')).toEqual([
+ [
+ [
+ {
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ status: null,
+ username: 'root',
+ webUrl: '/root',
+ },
+ ],
+ ],
+ ]);
+ });
+
+ it('adds user to selected if `allowMultipleAssignees` is true', async () => {
+ createComponent({
+ props: {
+ value: [assignee],
+ allowMultipleAssignees: true,
+ },
+ });
+ await waitForPromises();
+
+ findUnselectedParticipants().at(0).vm.$emit('click');
+ expect(wrapper.emitted('input')[0][0]).toHaveLength(2);
+ });
+ });
+
+ describe('when searching', () => {
+ it('does not show loading spinner when debounce timer is still running', async () => {
+ createComponent();
+ await waitForPromises();
+ findSearchField().vm.$emit('input', 'roo');
+
+ expect(findParticipantsLoading().exists()).toBe(false);
+ });
+
+ it('shows loading spinner when searching for users', async () => {
+ createComponent();
+ await waitForPromises();
+ findSearchField().vm.$emit('input', 'roo');
+ jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ await nextTick();
+
+ expect(findParticipantsLoading().exists()).toBe(true);
+ });
+
+ it('renders a list of found users and external participants matching search term', async () => {
+ createComponent({ searchQueryHandler: jest.fn().mockResolvedValue(searchResponse) });
+ await waitForPromises();
+
+ findSearchField().vm.$emit('input', 'ro');
+ await waitForSearch();
+
+ expect(findUnselectedParticipants()).toHaveLength(3);
+ });
+
+ it('renders a list of found users only if no external participants match search term', async () => {
+ createComponent({ searchQueryHandler: jest.fn().mockResolvedValue(searchResponse) });
+ await waitForPromises();
+
+ findSearchField().vm.$emit('input', 'roo');
+ await waitForSearch();
+
+ expect(findUnselectedParticipants()).toHaveLength(2);
+ });
+
+ it('shows a message about no matches if search returned an empty list', async () => {
+ const responseCopy = cloneDeep(searchResponse);
+ responseCopy.data.workspace.users.nodes = [];
+
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue(responseCopy),
+ });
+ await waitForPromises();
+ findSearchField().vm.$emit('input', 'tango');
+ await waitForSearch();
+
+ expect(findUnselectedParticipants()).toHaveLength(0);
+ expect(findEmptySearchResults().exists()).toBe(true);
+ });
+ });
+
+ // TODO Remove this test after the following issue is resolved in the backend
+ // https://gitlab.com/gitlab-org/gitlab/-/issues/329750
+ describe('temporary error suppression', () => {
+ beforeEach(() => {
+ jest.spyOn(console, 'error').mockImplementation();
+ });
+
+ const nullError = { message: 'Cannot return null for non-nullable field GroupMember.user' };
+
+ it.each`
+ mockErrors
+ ${[nullError]}
+ ${[nullError, nullError]}
+ `('does not emit errors', async ({ mockErrors }) => {
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue({
+ errors: mockErrors,
+ }),
+ });
+ await waitForSearch();
+
+ expect(wrapper.emitted()).toEqual({});
+ // eslint-disable-next-line no-console
+ expect(console.error).toHaveBeenCalled();
+ });
+
+ it.each`
+ mockErrors
+ ${[{ message: 'serious error' }]}
+ ${[nullError, { message: 'serious error' }]}
+ `('emits error when non-null related errors are included', async ({ mockErrors }) => {
+ createComponent({
+ searchQueryHandler: jest.fn().mockResolvedValue({
+ errors: mockErrors,
+ }),
+ });
+ await waitForSearch();
+
+ expect(wrapper.emitted('error')).toEqual([[]]);
+ // eslint-disable-next-line no-console
+ expect(console.error).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/vuex_module_provider_spec.js b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
new file mode 100644
index 00000000000..ebd396bd87c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/vuex_module_provider_spec.js
@@ -0,0 +1,47 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import VuexModuleProvider from '~/vue_shared/components/vuex_module_provider.vue';
+
+const TestComponent = Vue.extend({
+ inject: ['vuexModule'],
+ template: `<div data-testid="vuexModule">{{ vuexModule }}</div> `,
+});
+
+const TEST_VUEX_MODULE = 'testVuexModule';
+
+describe('~/vue_shared/components/vuex_module_provider', () => {
+ let wrapper;
+
+ const findProvidedVuexModule = () => wrapper.find('[data-testid="vuexModule"]').text();
+
+ const createComponent = (extraParams = {}) => {
+ wrapper = mount(VuexModuleProvider, {
+ propsData: {
+ vuexModule: TEST_VUEX_MODULE,
+ },
+ slots: {
+ default: TestComponent,
+ },
+ ...extraParams,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('provides "vuexModule" set from prop', () => {
+ createComponent();
+ expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE);
+ });
+
+ it('does not blow up when used with vue-apollo', () => {
+ // See https://github.com/vuejs/vue-apollo/pull/1153 for details
+ const localVue = createLocalVue();
+ localVue.use(VueApollo);
+
+ createComponent({ localVue });
+ expect(findProvidedVuexModule()).toBe(TEST_VUEX_MODULE);
+ });
+});
diff --git a/spec/frontend/vue_shared/directives/validation_spec.js b/spec/frontend/vue_shared/directives/validation_spec.js
index 2764a71d204..51ee73cabde 100644
--- a/spec/frontend/vue_shared/directives/validation_spec.js
+++ b/spec/frontend/vue_shared/directives/validation_spec.js
@@ -1,15 +1,21 @@
import { shallowMount } from '@vue/test-utils';
-import validation from '~/vue_shared/directives/validation';
+import validation, { initForm } from '~/vue_shared/directives/validation';
describe('validation directive', () => {
let wrapper;
- const createComponent = ({ inputAttributes, showValidation } = {}) => {
+ const createComponentFactory = ({ inputAttributes, template, data }) => {
const defaultInputAttributes = {
type: 'text',
required: true,
};
+ const defaultTemplate = `
+ <form>
+ <input v-validation:[showValidation] name="exampleField" v-bind="attributes" />
+ </form>
+ `;
+
const component = {
directives: {
validation: validation(),
@@ -17,27 +23,52 @@ describe('validation directive', () => {
data() {
return {
attributes: inputAttributes || defaultInputAttributes,
- showValidation,
- form: {
- state: null,
- fields: {
- exampleField: {
- state: null,
- feedback: '',
- },
+ ...data,
+ };
+ },
+ template: template || defaultTemplate,
+ };
+
+ wrapper = shallowMount(component, { attachTo: document.body });
+ };
+
+ const createComponent = ({ inputAttributes, showValidation, template } = {}) =>
+ createComponentFactory({
+ inputAttributes,
+ data: {
+ showValidation,
+ form: {
+ state: null,
+ fields: {
+ exampleField: {
+ state: null,
+ feedback: '',
},
},
- };
+ },
+ },
+ template,
+ });
+
+ const createComponentWithInitForm = ({ inputAttributes } = {}) =>
+ createComponentFactory({
+ inputAttributes,
+ data: {
+ form: initForm({
+ fields: {
+ exampleField: {
+ state: null,
+ value: 'lorem',
+ },
+ },
+ }),
},
template: `
<form>
- <input v-validation:[showValidation] name="exampleField" v-bind="attributes" />
+ <input v-validation:[form.showValidation] name="exampleField" v-bind="attributes" />
</form>
`,
- };
-
- wrapper = shallowMount(component, { attachTo: document.body });
- };
+ });
afterEach(() => {
wrapper.destroy();
@@ -48,6 +79,12 @@ describe('validation directive', () => {
const findForm = () => wrapper.find('form');
const findInput = () => wrapper.find('input');
+ const setValueAndTriggerValidation = (value) => {
+ const input = findInput();
+ input.setValue(value);
+ input.trigger('blur');
+ };
+
describe.each([true, false])(
'with fields untouched and "showValidation" set to "%s"',
(showValidation) => {
@@ -78,12 +115,6 @@ describe('validation directive', () => {
`(
'with input-attributes set to $inputAttributes',
({ inputAttributes, validValue, invalidValue }) => {
- const setValueAndTriggerValidation = (value) => {
- const input = findInput();
- input.setValue(value);
- input.trigger('blur');
- };
-
beforeEach(() => {
createComponent({ inputAttributes });
});
@@ -129,4 +160,130 @@ describe('validation directive', () => {
});
},
);
+
+ describe('with group elements', () => {
+ const template = `
+ <form>
+ <div v-validation:[showValidation]>
+ <input name="exampleField" v-bind="attributes" />
+ </div>
+ </form>
+ `;
+ beforeEach(() => {
+ createComponent({
+ template,
+ inputAttributes: {
+ required: true,
+ },
+ });
+ });
+
+ describe('with invalid value', () => {
+ beforeEach(() => {
+ setValueAndTriggerValidation('');
+ });
+
+ it('should set correct field state', () => {
+ expect(getFormData().fields.exampleField).toEqual({
+ state: false,
+ feedback: expect.any(String),
+ });
+ });
+
+ it('should set correct feedback', () => {
+ expect(getFormData().fields.exampleField.feedback).toBe('Please fill out this field.');
+ });
+ });
+
+ describe('with valid value', () => {
+ beforeEach(() => {
+ setValueAndTriggerValidation('hello');
+ });
+
+ it('set the correct state', () => {
+ expect(getFormData().fields.exampleField).toEqual({
+ state: true,
+ feedback: '',
+ });
+ });
+ });
+ });
+
+ describe('component using initForm', () => {
+ it('sets the form fields correctly', () => {
+ createComponentWithInitForm();
+
+ expect(getFormData().state).toBe(false);
+ expect(getFormData().showValidation).toBe(false);
+
+ expect(getFormData().fields.exampleField).toMatchObject({
+ value: 'lorem',
+ state: null,
+ required: true,
+ feedback: expect.any(String),
+ });
+ });
+ });
+});
+
+describe('initForm', () => {
+ const MOCK_FORM = {
+ fields: {
+ name: {
+ value: 'lorem',
+ },
+ description: {
+ value: 'ipsum',
+ required: false,
+ skipValidation: true,
+ },
+ },
+ };
+
+ const EXPECTED_FIELDS = {
+ name: { value: 'lorem', required: true, state: null, feedback: null },
+ description: { value: 'ipsum', required: false, state: true, feedback: null },
+ };
+
+ it('returns form object', () => {
+ expect(initForm(MOCK_FORM)).toMatchObject({
+ state: false,
+ showValidation: false,
+ fields: EXPECTED_FIELDS,
+ });
+ });
+
+ it('returns form object with additional parameters', () => {
+ const customFormObject = {
+ foo: {
+ bar: 'lorem',
+ },
+ };
+
+ const form = {
+ ...MOCK_FORM,
+ ...customFormObject,
+ };
+
+ expect(initForm(form)).toMatchObject({
+ state: false,
+ showValidation: false,
+ fields: EXPECTED_FIELDS,
+ ...customFormObject,
+ });
+ });
+
+ it('can override existing state and showValidation values', () => {
+ const form = {
+ ...MOCK_FORM,
+ state: true,
+ showValidation: true,
+ };
+
+ expect(initForm(form)).toMatchObject({
+ state: true,
+ showValidation: true,
+ fields: EXPECTED_FIELDS,
+ });
+ });
});
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js b/spec/frontend/vue_shared/new_namespace/components/legacy_container_spec.js
index 6fc36d6362c..52f36aa0e77 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/components/legacy_container_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import LegacyContainer from '~/projects/experiment_new_project_creation/components/legacy_container.vue';
+import LegacyContainer from '~/vue_shared/new_namespace/components/legacy_container.vue';
describe('Legacy container component', () => {
let wrapper;
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js b/spec/frontend/vue_shared/new_namespace/components/welcome_spec.js
index 9fd1230806e..602213fca83 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
+++ b/spec/frontend/vue_shared/new_namespace/components/welcome_spec.js
@@ -3,8 +3,7 @@ import { nextTick } from 'vue';
import { mockTracking } from 'helpers/tracking_helper';
import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
import { getExperimentData } from '~/experimentation/utils';
-import NewProjectPushTipPopover from '~/projects/experiment_new_project_creation/components/new_project_push_tip_popover.vue';
-import WelcomePage from '~/projects/experiment_new_project_creation/components/welcome.vue';
+import WelcomePage from '~/vue_shared/new_namespace/components/welcome.vue';
jest.mock('~/experimentation/utils', () => ({ getExperimentData: jest.fn() }));
@@ -12,8 +11,18 @@ describe('Welcome page', () => {
let wrapper;
let trackingSpy;
- const createComponent = (propsData) => {
- wrapper = shallowMount(WelcomePage, { propsData });
+ const DEFAULT_PROPS = {
+ title: 'Create new something',
+ };
+
+ const createComponent = ({ propsData, slots }) => {
+ wrapper = shallowMount(WelcomePage, {
+ slots,
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ });
};
beforeEach(() => {
@@ -29,7 +38,7 @@ describe('Welcome page', () => {
});
it('tracks link clicks', async () => {
- createComponent({ panels: [{ name: 'test', href: '#' }] });
+ createComponent({ propsData: { experiment: 'foo', panels: [{ name: 'test', href: '#' }] } });
const link = wrapper.find('a');
link.trigger('click');
await nextTick();
@@ -38,11 +47,11 @@ describe('Welcome page', () => {
});
});
- it('adds new_repo experiment data if in experiment', async () => {
+ it('adds experiment data if in experiment', async () => {
const mockExperimentData = 'data';
getExperimentData.mockReturnValue(mockExperimentData);
- createComponent({ panels: [{ name: 'test', href: '#' }] });
+ createComponent({ propsData: { experiment: 'foo', panels: [{ name: 'test', href: '#' }] } });
const link = wrapper.find('a');
link.trigger('click');
await nextTick();
@@ -57,12 +66,13 @@ describe('Welcome page', () => {
});
});
- it('renders new project push tip popover', () => {
- createComponent({ panels: [{ name: 'test', href: '#' }] });
-
- const popover = wrapper.findComponent(NewProjectPushTipPopover);
+ it('renders footer slot if provided', () => {
+ const DUMMY = 'Test message';
+ createComponent({
+ slots: { footer: DUMMY },
+ propsData: { panels: [{ name: 'test', href: '#' }] },
+ });
- expect(popover.exists()).toBe(true);
- expect(popover.props().target()).toBe(wrapper.find({ ref: 'clipTip' }).element);
+ expect(wrapper.text()).toContain(DUMMY);
});
});
diff --git a/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
new file mode 100644
index 00000000000..30937921900
--- /dev/null
+++ b/spec/frontend/vue_shared/new_namespace/new_namespace_page_spec.js
@@ -0,0 +1,114 @@
+import { GlBreadcrumb } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import LegacyContainer from '~/vue_shared/new_namespace/components/legacy_container.vue';
+import WelcomePage from '~/vue_shared/new_namespace/components/welcome.vue';
+import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
+
+describe('Experimental new project creation app', () => {
+ let wrapper;
+
+ const findWelcomePage = () => wrapper.findComponent(WelcomePage);
+ const findLegacyContainer = () => wrapper.findComponent(LegacyContainer);
+ const findBreadcrumb = () => wrapper.findComponent(GlBreadcrumb);
+
+ const DEFAULT_PROPS = {
+ title: 'Create something',
+ initialBreadcrumb: 'Something',
+ panels: [
+ { name: 'panel1', selector: '#some-selector1' },
+ { name: 'panel2', selector: '#some-selector2' },
+ ],
+ persistenceKey: 'DEMO-PERSISTENCE-KEY',
+ };
+
+ const createComponent = ({ slots, propsData } = {}) => {
+ wrapper = shallowMount(NewNamespacePage, {
+ slots,
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...propsData,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ window.location.hash = '';
+ });
+
+ it('passes experiment to welcome component if provided', () => {
+ const EXPERIMENT = 'foo';
+ createComponent({ propsData: { experiment: EXPERIMENT } });
+
+ expect(findWelcomePage().props().experiment).toBe(EXPERIMENT);
+ });
+
+ describe('with empty hash', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders welcome page', () => {
+ expect(findWelcomePage().exists()).toBe(true);
+ });
+
+ it('does not render breadcrumbs', () => {
+ expect(findBreadcrumb().exists()).toBe(false);
+ });
+ });
+
+ it('renders first container if jumpToLastPersistedPanel passed', () => {
+ createComponent({ propsData: { jumpToLastPersistedPanel: true } });
+ expect(findWelcomePage().exists()).toBe(false);
+ expect(findLegacyContainer().exists()).toBe(true);
+ });
+
+ describe('when hash is not empty on load', () => {
+ beforeEach(() => {
+ window.location.hash = `#${DEFAULT_PROPS.panels[1].name}`;
+ createComponent();
+ });
+
+ it('renders relevant container', () => {
+ expect(findWelcomePage().exists()).toBe(false);
+
+ const container = findLegacyContainer();
+
+ expect(container.exists()).toBe(true);
+ expect(container.props().selector).toBe(DEFAULT_PROPS.panels[1].selector);
+ });
+
+ it('renders breadcrumbs', () => {
+ const breadcrumb = findBreadcrumb();
+ expect(breadcrumb.exists()).toBe(true);
+ expect(breadcrumb.props().items[0].text).toBe(DEFAULT_PROPS.initialBreadcrumb);
+ });
+ });
+
+ it('renders extra description if provided', () => {
+ window.location.hash = `#${DEFAULT_PROPS.panels[1].name}`;
+ const EXTRA_DESCRIPTION = 'Some extra description';
+ createComponent({
+ slots: {
+ 'extra-description': EXTRA_DESCRIPTION,
+ },
+ });
+
+ expect(wrapper.text()).toContain(EXTRA_DESCRIPTION);
+ });
+
+ it('renders relevant container when hash changes', async () => {
+ createComponent();
+ expect(findWelcomePage().exists()).toBe(true);
+
+ window.location.hash = `#${DEFAULT_PROPS.panels[0].name}`;
+ const ev = document.createEvent('HTMLEvents');
+ ev.initEvent('hashchange', false, false);
+ window.dispatchEvent(ev);
+
+ await nextTick();
+ expect(findWelcomePage().exists()).toBe(false);
+ expect(findLegacyContainer().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/vue_shared/security_reports/components/apollo_mocks.js b/spec/frontend/vue_shared/security_reports/components/apollo_mocks.js
new file mode 100644
index 00000000000..066f9a57bc6
--- /dev/null
+++ b/spec/frontend/vue_shared/security_reports/components/apollo_mocks.js
@@ -0,0 +1,12 @@
+export const buildConfigureSecurityFeatureMockFactory = (mutationType) => ({
+ successPath = 'testSuccessPath',
+ errors = [],
+} = {}) => ({
+ data: {
+ [mutationType]: {
+ successPath,
+ errors,
+ __typename: `${mutationType}Payload`,
+ },
+ },
+});
diff --git a/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
new file mode 100644
index 00000000000..517eee6a729
--- /dev/null
+++ b/spec/frontend/vue_shared/security_reports/components/manage_via_mr_spec.js
@@ -0,0 +1,184 @@
+import { GlButton } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { featureToMutationMap } from 'ee_else_ce/security_configuration/components/constants';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { humanize } from '~/lib/utils/text_utility';
+import { redirectTo } from '~/lib/utils/url_utility';
+import ManageViaMr from '~/vue_shared/security_configuration/components/manage_via_mr.vue';
+import { buildConfigureSecurityFeatureMockFactory } from './apollo_mocks';
+
+jest.mock('~/lib/utils/url_utility');
+
+Vue.use(VueApollo);
+
+const projectPath = 'namespace/project';
+
+describe('ManageViaMr component', () => {
+ let wrapper;
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ function createMockApolloProvider(mutation, handler) {
+ const requestHandlers = [[mutation, handler]];
+
+ return createMockApollo(requestHandlers);
+ }
+
+ function createComponent({
+ featureName = 'SAST',
+ featureType = 'sast',
+ isFeatureConfigured = false,
+ variant = undefined,
+ category = undefined,
+ ...options
+ } = {}) {
+ wrapper = extendedWrapper(
+ mount(ManageViaMr, {
+ provide: {
+ projectPath,
+ },
+ propsData: {
+ feature: {
+ name: featureName,
+ type: featureType,
+ configured: isFeatureConfigured,
+ },
+ variant,
+ category,
+ },
+ ...options,
+ }),
+ );
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ // This component supports different report types/mutations depending on
+ // whether it's in a CE or EE context. This makes sure we are only testing
+ // the ones available in the current test context.
+ const supportedReportTypes = Object.entries(featureToMutationMap).map(
+ ([featureType, { getMutationPayload, mutationId }]) => {
+ const { mutation, variables: mutationVariables } = getMutationPayload(projectPath);
+ return [humanize(featureType), featureType, mutation, mutationId, mutationVariables];
+ },
+ );
+
+ describe.each(supportedReportTypes)(
+ '%s',
+ (featureName, featureType, mutation, mutationId, mutationVariables) => {
+ const buildConfigureSecurityFeatureMock = buildConfigureSecurityFeatureMockFactory(
+ mutationId,
+ );
+ const successHandler = jest.fn(async () => buildConfigureSecurityFeatureMock());
+ const noSuccessPathHandler = async () =>
+ buildConfigureSecurityFeatureMock({
+ successPath: '',
+ });
+ const errorHandler = async () =>
+ buildConfigureSecurityFeatureMock({
+ errors: ['foo'],
+ });
+ const pendingHandler = () => new Promise(() => {});
+
+ describe('when feature is configured', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApolloProvider(mutation, successHandler);
+ createComponent({ apolloProvider, featureName, featureType, isFeatureConfigured: true });
+ });
+
+ it('it does not render a button', () => {
+ expect(findButton().exists()).toBe(false);
+ });
+ });
+
+ describe('when feature is not configured', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApolloProvider(mutation, successHandler);
+ createComponent({ apolloProvider, featureName, featureType, isFeatureConfigured: false });
+ });
+
+ it('it does render a button', () => {
+ expect(findButton().exists()).toBe(true);
+ });
+
+ it('clicking on the button triggers the configure mutation', () => {
+ findButton().trigger('click');
+
+ expect(successHandler).toHaveBeenCalledTimes(1);
+ expect(successHandler).toHaveBeenCalledWith(mutationVariables);
+ });
+ });
+
+ describe('given a pending response', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApolloProvider(mutation, pendingHandler);
+ createComponent({ apolloProvider, featureName, featureType });
+ });
+
+ it('renders spinner correctly', async () => {
+ const button = findButton();
+ expect(button.props('loading')).toBe(false);
+ await button.trigger('click');
+ expect(button.props('loading')).toBe(true);
+ });
+ });
+
+ describe('given a successful response', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApolloProvider(mutation, successHandler);
+ createComponent({ apolloProvider, featureName, featureType });
+ });
+
+ it('should call redirect helper with correct value', async () => {
+ await wrapper.trigger('click');
+ await waitForPromises();
+ expect(redirectTo).toHaveBeenCalledTimes(1);
+ expect(redirectTo).toHaveBeenCalledWith('testSuccessPath');
+ // This is done for UX reasons. If the loading prop is set to false
+ // on success, then there's a period where the button is clickable
+ // again. Instead, we want the button to display a loading indicator
+ // for the remainder of the lifetime of the page (i.e., until the
+ // browser can start painting the new page it's been redirected to).
+ expect(findButton().props().loading).toBe(true);
+ });
+ });
+
+ describe.each`
+ handler | message
+ ${noSuccessPathHandler} | ${`${featureName} merge request creation mutation failed`}
+ ${errorHandler} | ${'foo'}
+ `('given an error response', ({ handler, message }) => {
+ beforeEach(() => {
+ const apolloProvider = createMockApolloProvider(mutation, handler);
+ createComponent({ apolloProvider, featureName, featureType });
+ });
+
+ it('should catch and emit error', async () => {
+ await wrapper.trigger('click');
+ await waitForPromises();
+ expect(wrapper.emitted('error')).toEqual([[message]]);
+ expect(findButton().props('loading')).toBe(false);
+ });
+ });
+ },
+ );
+
+ describe('button props', () => {
+ it('passes the variant and category props to the GlButton', () => {
+ const variant = 'danger';
+ const category = 'tertiary';
+ createComponent({ variant, category });
+
+ expect(wrapper.findComponent(GlButton).props()).toMatchObject({
+ variant,
+ category,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/security_reports/mock_data.js b/spec/frontend/vue_shared/security_reports/mock_data.js
index 7918f70d702..bd9ce3b7314 100644
--- a/spec/frontend/vue_shared/security_reports/mock_data.js
+++ b/spec/frontend/vue_shared/security_reports/mock_data.js
@@ -322,7 +322,7 @@ export const secretScanningDiffSuccessMock = {
head_report_created_at: '2020-01-10T10:00:00.000Z',
};
-export const securityReportDownloadPathsQueryNoArtifactsResponse = {
+export const securityReportMergeRequestDownloadPathsQueryNoArtifactsResponse = {
project: {
mergeRequest: {
headPipeline: {
@@ -339,7 +339,7 @@ export const securityReportDownloadPathsQueryNoArtifactsResponse = {
},
};
-export const securityReportDownloadPathsQueryResponse = {
+export const securityReportMergeRequestDownloadPathsQueryResponse = {
project: {
mergeRequest: {
headPipeline: {
@@ -447,8 +447,114 @@ export const securityReportDownloadPathsQueryResponse = {
},
};
+export const securityReportPipelineDownloadPathsQueryResponse = {
+ project: {
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/176',
+ jobs: {
+ nodes: [
+ {
+ name: 'secret_detection',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1399/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1399/artifacts/download?file_type=secret_detection',
+ fileType: 'SECRET_DETECTION',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ name: 'bandit-sast',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1400/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1400/artifacts/download?file_type=sast',
+ fileType: 'SAST',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ name: 'eslint-sast',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1401/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1401/artifacts/download?file_type=sast',
+ fileType: 'SAST',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ name: 'all_artifacts',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=archive',
+ fileType: 'ARCHIVE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/gitlab-org/secrets-detection-test/-/jobs/1402/artifacts/download?file_type=metadata',
+ fileType: 'METADATA',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ ],
+ __typename: 'CiJobConnection',
+ },
+ __typename: 'Pipeline',
+ },
+ __typename: 'MergeRequest',
+ },
+ __typename: 'Project',
+};
+
/**
- * These correspond to SAST jobs in the securityReportDownloadPathsQueryResponse above.
+ * These correspond to SAST jobs in the securityReportMergeRequestDownloadPathsQueryResponse above.
*/
export const sastArtifacts = [
{
@@ -464,7 +570,7 @@ export const sastArtifacts = [
];
/**
- * These correspond to Secret Detection jobs in the securityReportDownloadPathsQueryResponse above.
+ * These correspond to Secret Detection jobs in the securityReportMergeRequestDownloadPathsQueryResponse above.
*/
export const secretDetectionArtifacts = [
{
@@ -481,7 +587,7 @@ export const expectedDownloadDropdownProps = {
};
/**
- * These correspond to any jobs with zip archives in the securityReportDownloadPathsQueryResponse above.
+ * These correspond to any jobs with zip archives in the securityReportMergeRequestDownloadPathsQueryResponse above.
*/
export const archiveArtifacts = [
{
@@ -492,7 +598,7 @@ export const archiveArtifacts = [
];
/**
- * These correspond to any jobs with trace data in the securityReportDownloadPathsQueryResponse above.
+ * These correspond to any jobs with trace data in the securityReportMergeRequestDownloadPathsQueryResponse above.
*/
export const traceArtifacts = [
{
@@ -518,7 +624,7 @@ export const traceArtifacts = [
];
/**
- * These correspond to any jobs with metadata data in the securityReportDownloadPathsQueryResponse above.
+ * These correspond to any jobs with metadata data in the securityReportMergeRequestDownloadPathsQueryResponse above.
*/
export const metadataArtifacts = [
{
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index 0b4816a951e..038d7754776 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -9,8 +9,8 @@ import { trimText } from 'helpers/text_helper';
import waitForPromises from 'helpers/wait_for_promises';
import {
expectedDownloadDropdownProps,
- securityReportDownloadPathsQueryNoArtifactsResponse,
- securityReportDownloadPathsQueryResponse,
+ securityReportMergeRequestDownloadPathsQueryNoArtifactsResponse,
+ securityReportMergeRequestDownloadPathsQueryResponse,
sastDiffSuccessMock,
secretScanningDiffSuccessMock,
} from 'jest/vue_shared/security_reports/mock_data';
@@ -22,7 +22,7 @@ import {
REPORT_TYPE_SAST,
REPORT_TYPE_SECRET_DETECTION,
} from '~/vue_shared/security_reports/constants';
-import securityReportDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_download_paths.query.graphql';
+import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/queries/security_report_merge_request_download_paths.query.graphql';
import SecurityReportsApp from '~/vue_shared/security_reports/security_reports_app.vue';
jest.mock('~/flash');
@@ -59,12 +59,13 @@ describe('Security reports app', () => {
};
const pendingHandler = () => new Promise(() => {});
- const successHandler = () => Promise.resolve({ data: securityReportDownloadPathsQueryResponse });
+ const successHandler = () =>
+ Promise.resolve({ data: securityReportMergeRequestDownloadPathsQueryResponse });
const successEmptyHandler = () =>
- Promise.resolve({ data: securityReportDownloadPathsQueryNoArtifactsResponse });
+ Promise.resolve({ data: securityReportMergeRequestDownloadPathsQueryNoArtifactsResponse });
const failureHandler = () => Promise.resolve({ errors: [{ message: 'some error' }] });
const createMockApolloProvider = (handler) => {
- const requestHandlers = [[securityReportDownloadPathsQuery, handler]];
+ const requestHandlers = [[securityReportMergeRequestDownloadPathsQuery, handler]];
return createMockApollo(requestHandlers);
};
diff --git a/spec/frontend/vue_shared/security_reports/utils_spec.js b/spec/frontend/vue_shared/security_reports/utils_spec.js
index aa9e54fa10c..b7129ece698 100644
--- a/spec/frontend/vue_shared/security_reports/utils_spec.js
+++ b/spec/frontend/vue_shared/security_reports/utils_spec.js
@@ -3,9 +3,13 @@ import {
REPORT_TYPE_SECRET_DETECTION,
REPORT_FILE_TYPES,
} from '~/vue_shared/security_reports/constants';
-import { extractSecurityReportArtifacts } from '~/vue_shared/security_reports/utils';
import {
- securityReportDownloadPathsQueryResponse,
+ extractSecurityReportArtifactsFromMergeRequest,
+ extractSecurityReportArtifactsFromPipeline,
+} from '~/vue_shared/security_reports/utils';
+import {
+ securityReportMergeRequestDownloadPathsQueryResponse,
+ securityReportPipelineDownloadPathsQueryResponse,
sastArtifacts,
secretDetectionArtifacts,
archiveArtifacts,
@@ -13,7 +17,18 @@ import {
metadataArtifacts,
} from './mock_data';
-describe('extractSecurityReportArtifacts', () => {
+describe.each([
+ [
+ 'extractSecurityReportArtifactsFromMergeRequest',
+ extractSecurityReportArtifactsFromMergeRequest,
+ securityReportMergeRequestDownloadPathsQueryResponse,
+ ],
+ [
+ 'extractSecurityReportArtifactsFromPipelines',
+ extractSecurityReportArtifactsFromPipeline,
+ securityReportPipelineDownloadPathsQueryResponse,
+ ],
+])('%s', (funcName, extractFunc, response) => {
it.each`
reportTypes | expectedArtifacts
${[]} | ${[]}
@@ -27,9 +42,7 @@ describe('extractSecurityReportArtifacts', () => {
`(
'returns the expected artifacts given report types $reportTypes',
({ reportTypes, expectedArtifacts }) => {
- expect(
- extractSecurityReportArtifacts(reportTypes, securityReportDownloadPathsQueryResponse),
- ).toEqual(expectedArtifacts);
+ expect(extractFunc(reportTypes, response)).toEqual(expectedArtifacts);
},
);
});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index 45c4682208b..12034346aba 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -82,6 +82,7 @@ describe('App', () => {
});
const getDrawer = () => wrapper.find(GlDrawer);
+ const getBackdrop = () => wrapper.find('.whats-new-modal-backdrop');
it('contains a drawer', () => {
expect(getDrawer().exists()).toBe(true);
@@ -100,6 +101,11 @@ describe('App', () => {
expect(actions.closeDrawer).toHaveBeenCalled();
});
+ it('dispatches closeDrawer when clicking the backdrop', () => {
+ getBackdrop().trigger('click');
+ expect(actions.closeDrawer).toHaveBeenCalled();
+ });
+
it.each([true, false])('passes open property', async (openState) => {
wrapper.vm.$store.state.open = openState;
@@ -149,7 +155,10 @@ describe('App', () => {
wrapper.vm.$store.state.pageInfo = { nextPage: 840 };
emitBottomReached();
- expect(actions.fetchItems).toHaveBeenCalledWith(expect.anything(), { page: 840 });
+ expect(actions.fetchItems).toHaveBeenCalledWith(expect.anything(), {
+ page: 840,
+ versionDigest: 'version-digest',
+ });
});
it('when nextPage does not exist it does not call fetchItems', () => {
diff --git a/spec/frontend/whats_new/components/feature_spec.js b/spec/frontend/whats_new/components/feature_spec.js
new file mode 100644
index 00000000000..9e9cb59c0d6
--- /dev/null
+++ b/spec/frontend/whats_new/components/feature_spec.js
@@ -0,0 +1,46 @@
+import { shallowMount } from '@vue/test-utils';
+import Feature from '~/whats_new/components/feature.vue';
+
+describe("What's new single feature", () => {
+ /** @type {import("@vue/test-utils").Wrapper} */
+ let wrapper;
+
+ const exampleFeature = {
+ title: 'Compliance pipeline configurations',
+ body:
+ '<p>We are thrilled to announce that it is now possible to define enforceable pipelines that will run for any project assigned a corresponding compliance framework.</p>',
+ stage: 'Manage',
+ 'self-managed': true,
+ 'gitlab-com': true,
+ packages: ['Ultimate'],
+ url: 'https://docs.gitlab.com/ee/user/project/settings/#compliance-pipeline-configuration',
+ image_url: 'https://img.youtube.com/vi/upLJ_equomw/hqdefault.jpg',
+ published_at: '2021-04-22T00:00:00.000Z',
+ release: '13.11',
+ };
+
+ const findReleaseDate = () => wrapper.find('[data-testid="release-date"]');
+
+ const createWrapper = ({ feature } = {}) => {
+ wrapper = shallowMount(Feature, {
+ propsData: { feature },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders the date', () => {
+ createWrapper({ feature: exampleFeature });
+ expect(findReleaseDate().text()).toBe('April 22, 2021');
+ });
+
+ describe('when the published_at is null', () => {
+ it("doesn't render the date", () => {
+ createWrapper({ feature: { ...exampleFeature, published_at: null } });
+ expect(findReleaseDate().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
index 39ad526cf14..c9614c7330b 100644
--- a/spec/frontend/whats_new/store/actions_spec.js
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -44,16 +44,33 @@ describe('whats new actions', () => {
axiosMock.restore();
});
+ it("doesn't require arguments", () => {
+ axiosMock.reset();
+
+ axiosMock
+ .onGet('/-/whats_new', { params: { page: undefined, v: undefined } })
+ .replyOnce(200, [{ title: 'GitLab Stories' }]);
+
+ testAction(
+ actions.fetchItems,
+ {},
+ {},
+ expect.arrayContaining([
+ { type: types.ADD_FEATURES, payload: [{ title: 'GitLab Stories' }] },
+ ]),
+ );
+ });
+
it('passes arguments', () => {
axiosMock.reset();
axiosMock
- .onGet('/-/whats_new', { params: { page: 8 } })
+ .onGet('/-/whats_new', { params: { page: 8, v: 42 } })
.replyOnce(200, [{ title: 'GitLab Stories' }]);
testAction(
actions.fetchItems,
- { page: 8 },
+ { page: 8, versionDigest: 42 },
{},
expect.arrayContaining([
{ type: types.ADD_FEATURES, payload: [{ title: 'GitLab Stories' }] },
diff --git a/spec/graphql/graphql_triggers_spec.rb b/spec/graphql/graphql_triggers_spec.rb
new file mode 100644
index 00000000000..0b53c633077
--- /dev/null
+++ b/spec/graphql/graphql_triggers_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GraphqlTriggers do
+ describe '.issuable_assignees_updated' do
+ it 'triggers the issuableAssigneesUpdated subscription' do
+ assignees = create_list(:user, 2)
+ issue = create(:issue, assignees: assignees)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'issuableAssigneesUpdated',
+ { issuable_id: issue.to_gid },
+ issue
+ )
+
+ GraphqlTriggers.issuable_assignees_updated(issue)
+ end
+ end
+end
diff --git a/spec/graphql/mutations/alert_management/alerts/todo/create_spec.rb b/spec/graphql/mutations/alert_management/alerts/todo/create_spec.rb
index a10c3725ba2..8ec99070c91 100644
--- a/spec/graphql/mutations/alert_management/alerts/todo/create_spec.rb
+++ b/spec/graphql/mutations/alert_management/alerts/todo/create_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Mutations::AlertManagement::Alerts::Todo::Create do
let_it_be(:alert) { create(:alert_management_alert) }
let_it_be(:project) { alert.project }
+
let(:current_user) { project.owner }
let(:args) { { project_path: project.full_path, iid: alert.iid } }
diff --git a/spec/graphql/mutations/alert_management/create_alert_issue_spec.rb b/spec/graphql/mutations/alert_management/create_alert_issue_spec.rb
index 47ee338ad34..4758ac526a5 100644
--- a/spec/graphql/mutations/alert_management/create_alert_issue_spec.rb
+++ b/spec/graphql/mutations/alert_management/create_alert_issue_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Mutations::AlertManagement::CreateAlertIssue do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:alert) { create(:alert_management_alert, project: project, status: 'triggered') }
+
let(:args) { { project_path: project.full_path, iid: alert.iid } }
specify { expect(described_class).to require_graphql_authorizations(:update_alert_management_alert) }
diff --git a/spec/graphql/mutations/alert_management/http_integration/create_spec.rb b/spec/graphql/mutations/alert_management/http_integration/create_spec.rb
index 9aa89761aaf..be6c627e376 100644
--- a/spec/graphql/mutations/alert_management/http_integration/create_spec.rb
+++ b/spec/graphql/mutations/alert_management/http_integration/create_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::AlertManagement::HttpIntegration::Create do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:args) { { project_path: project.full_path, active: true, name: 'HTTP Integration' } }
specify { expect(described_class).to require_graphql_authorizations(:admin_operations) }
diff --git a/spec/graphql/mutations/alert_management/http_integration/destroy_spec.rb b/spec/graphql/mutations/alert_management/http_integration/destroy_spec.rb
index acd7070d0d3..1aeeba1009e 100644
--- a/spec/graphql/mutations/alert_management/http_integration/destroy_spec.rb
+++ b/spec/graphql/mutations/alert_management/http_integration/destroy_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::AlertManagement::HttpIntegration::Destroy do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:integration) { create(:alert_management_http_integration, project: project) }
let(:args) { { id: GitlabSchema.id_from_object(integration) } }
diff --git a/spec/graphql/mutations/alert_management/http_integration/reset_token_spec.rb b/spec/graphql/mutations/alert_management/http_integration/reset_token_spec.rb
index 96974c2aa6f..5a2af9e0be8 100644
--- a/spec/graphql/mutations/alert_management/http_integration/reset_token_spec.rb
+++ b/spec/graphql/mutations/alert_management/http_integration/reset_token_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Mutations::AlertManagement::HttpIntegration::ResetToken do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
+
let(:args) { { id: GitlabSchema.id_from_object(integration) } }
specify { expect(described_class).to require_graphql_authorizations(:admin_operations) }
diff --git a/spec/graphql/mutations/alert_management/http_integration/update_spec.rb b/spec/graphql/mutations/alert_management/http_integration/update_spec.rb
index d6318e3161d..805996bf9e9 100644
--- a/spec/graphql/mutations/alert_management/http_integration/update_spec.rb
+++ b/spec/graphql/mutations/alert_management/http_integration/update_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Mutations::AlertManagement::HttpIntegration::Update do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
+
let(:args) { { id: GitlabSchema.id_from_object(integration), active: false, name: 'New Name' } }
specify { expect(described_class).to require_graphql_authorizations(:admin_operations) }
diff --git a/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb b/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
index 02a5e2e74e2..7ab0f43d674 100644
--- a/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
+++ b/spec/graphql/mutations/alert_management/prometheus_integration/create_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Mutations::AlertManagement::PrometheusIntegration::Create do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:args) { { project_path: project.full_path, active: true, api_url: 'http://prometheus.com/' } }
specify { expect(described_class).to require_graphql_authorizations(:admin_project) }
diff --git a/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb b/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
index ddf23909035..c9e1bf4162c 100644
--- a/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
+++ b/spec/graphql/mutations/alert_management/prometheus_integration/reset_token_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Mutations::AlertManagement::PrometheusIntegration::ResetToken do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:prometheus_service, project: project) }
+
let(:args) { { id: GitlabSchema.id_from_object(integration) } }
specify { expect(described_class).to require_graphql_authorizations(:admin_project) }
diff --git a/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb b/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
index eab4474d827..19e0d53b75f 100644
--- a/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
+++ b/spec/graphql/mutations/alert_management/prometheus_integration/update_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Mutations::AlertManagement::PrometheusIntegration::Update do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:integration) { create(:prometheus_service, project: project) }
+
let(:args) { { id: GitlabSchema.id_from_object(integration), active: false, api_url: 'http://new-url.com' } }
specify { expect(described_class).to require_graphql_authorizations(:admin_project) }
diff --git a/spec/graphql/mutations/alert_management/update_alert_status_spec.rb b/spec/graphql/mutations/alert_management/update_alert_status_spec.rb
index 8465393f299..2c2518e046a 100644
--- a/spec/graphql/mutations/alert_management/update_alert_status_spec.rb
+++ b/spec/graphql/mutations/alert_management/update_alert_status_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Mutations::AlertManagement::UpdateAlertStatus do
let_it_be(:current_user) { create(:user) }
let_it_be(:alert) { create(:alert_management_alert, :triggered) }
let_it_be(:project) { alert.project }
+
let(:new_status) { Types::AlertManagement::StatusEnum.values['ACKNOWLEDGED'].value }
let(:args) { { status: new_status, project_path: project.full_path, iid: alert.iid } }
diff --git a/spec/graphql/mutations/boards/lists/update_spec.rb b/spec/graphql/mutations/boards/lists/update_spec.rb
index d5d8a2af6bf..c82cbbfdd83 100644
--- a/spec/graphql/mutations/boards/lists/update_spec.rb
+++ b/spec/graphql/mutations/boards/lists/update_spec.rb
@@ -3,54 +3,14 @@
require 'spec_helper'
RSpec.describe Mutations::Boards::Lists::Update do
- let_it_be(:group) { create(:group, :private) }
- let_it_be(:board) { create(:board, group: group) }
- let_it_be(:reporter) { create(:user) }
- let_it_be(:guest) { create(:user) }
- let_it_be(:list) { create(:list, board: board, position: 0) }
- let_it_be(:list2) { create(:list, board: board) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
- let(:list_update_params) { { position: 1, collapsed: true } }
-
- before_all do
- group.add_reporter(reporter)
- group.add_guest(guest)
- list.update_preferences_for(reporter, collapsed: false)
- end
-
- subject { mutation.resolve(list: list, **list_update_params) }
-
- describe '#resolve' do
- context 'with permission to admin board lists' do
- let(:current_user) { reporter }
-
- it 'updates the list position and collapsed state as expected' do
- subject
-
- reloaded_list = list.reload
- expect(reloaded_list.position).to eq(1)
- expect(reloaded_list.collapsed?(current_user)).to eq(true)
- end
- end
-
- context 'with permission to read board lists' do
- let(:current_user) { guest }
-
- it 'updates the list collapsed state but not the list position' do
- subject
-
- reloaded_list = list.reload
- expect(reloaded_list.position).to eq(0)
- expect(reloaded_list.collapsed?(current_user)).to eq(true)
- end
- end
-
- context 'without permission to read board lists' do
- let(:current_user) { create(:user) }
-
- it 'raises Resource Not Found error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
+ context 'on group issue boards' do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
+ let_it_be(:list2) { create(:list, board: board) }
+
+ it_behaves_like 'update board list mutation'
end
end
diff --git a/spec/graphql/mutations/commits/create_spec.rb b/spec/graphql/mutations/commits/create_spec.rb
index 82a5e3a62f5..152b5d87da0 100644
--- a/spec/graphql/mutations/commits/create_spec.rb
+++ b/spec/graphql/mutations/commits/create_spec.rb
@@ -24,11 +24,12 @@ RSpec.describe Mutations::Commits::Create do
let(:branch) { 'master' }
let(:start_branch) { nil }
let(:message) { 'Commit message' }
+ let(:file_path) { "#{SecureRandom.uuid}.md" }
let(:actions) do
[
{
action: 'create',
- file_path: 'NEW_FILE.md',
+ file_path: file_path,
content: 'Hello'
}
]
@@ -68,12 +69,17 @@ RSpec.describe Mutations::Commits::Create do
end
context 'when service successfully creates a new commit' do
+ it "returns the ETag path for the commit's pipeline" do
+ commit_pipeline_path = subject[:commit_pipeline_path]
+ expect(commit_pipeline_path).to match(%r(pipelines/sha/\w+))
+ end
+
it 'returns a new commit' do
expect(mutated_commit).to have_attributes(message: message, project: project)
expect(subject[:errors]).to be_empty
expect_to_contain_deltas([
- a_hash_including(a_mode: '0', b_mode: '100644', new_file: true, new_path: 'NEW_FILE.md')
+ a_hash_including(a_mode: '0', b_mode: '100644', new_file: true, new_path: file_path)
])
end
end
diff --git a/spec/graphql/mutations/issues/create_spec.rb b/spec/graphql/mutations/issues/create_spec.rb
index 422ad40a9cb..b32f0991959 100644
--- a/spec/graphql/mutations/issues/create_spec.rb
+++ b/spec/graphql/mutations/issues/create_spec.rb
@@ -19,7 +19,8 @@ RSpec.describe Mutations::Issues::Create do
description: 'new description',
confidential: true,
due_date: Date.tomorrow,
- discussion_locked: true
+ discussion_locked: true,
+ issue_type: 'issue'
}
end
@@ -93,6 +94,16 @@ RSpec.describe Mutations::Issues::Create do
expect(mutated_issue.iid).not_to eq(special_params[:iid])
end
end
+
+ context 'when creating a non-default issue type' do
+ before do
+ mutation_params[:issue_type] = 'incident'
+ end
+
+ it 'creates issue with correct values' do
+ expect(mutated_issue.issue_type).to eq('incident')
+ end
+ end
end
context 'when creating an issue as owner' do
diff --git a/spec/graphql/mutations/issues/set_due_date_spec.rb b/spec/graphql/mutations/issues/set_due_date_spec.rb
index 9f8d0d6c405..263122e5d5f 100644
--- a/spec/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/graphql/mutations/issues/set_due_date_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Mutations::Issues::SetDueDate do
- let(:issue) { create(:issue) }
- let(:user) { create(:user) }
+ let(:issue) { create(:issue, due_date: '2021-05-01') }
+
+ let_it_be(:user) { create(:user) }
subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
@@ -23,17 +24,25 @@ RSpec.describe Mutations::Issues::SetDueDate do
issue.project.add_developer(user)
end
- it 'returns the issue with updated due date' do
+ it 'returns the issue with updated due date', :aggregate_failures do
expect(mutated_issue).to eq(issue)
expect(mutated_issue.due_date).to eq(Date.today + 2.days)
expect(subject[:errors]).to be_empty
end
+ context 'when due date is nil' do
+ let(:due_date) { nil }
+
+ it 'updates due date to be nil' do
+ expect(mutated_issue.due_date).to be nil
+ end
+ end
+
context 'when passing incorrect due date value' do
let(:due_date) { 'test' }
- it 'does not update due date' do
- expect(mutated_issue.due_date).to eq(issue.due_date)
+ it 'updates due date to be nil' do
+ expect(mutated_issue.due_date).to be nil
end
end
end
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index f10e257e153..6d6a5b94219 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -69,17 +69,33 @@ RSpec.describe Mutations::Issues::Update do
context 'when changing state' do
let_it_be_with_refind(:issue) { create(:issue, project: project, state: :opened) }
- it 'closes issue' do
- mutation_params[:state_event] = 'close'
+ before do
+ mutation_params[:state_event] = state_event
+ end
+
+ context 'when state_event is close' do
+ let_it_be(:removable_label) { create(:label, project: project, remove_on_close: true, issues: [issue]) }
- expect { subject }.to change { issue.reload.state }.from('opened').to('closed')
+ let(:state_event) { 'close' }
+
+ it 'closes issue' do
+ expect do
+ subject
+ issue.reload
+ end.to change(issue, :state).from('opened').to('closed').and(
+ change { issue.label_ids }.from([removable_label.id]).to([])
+ )
+ end
end
- it 'reopens issue' do
- issue.close
- mutation_params[:state_event] = 'reopen'
+ context 'when state_event is reopen' do
+ let(:state_event) { 'reopen' }
+
+ it 'reopens issue' do
+ issue.close
- expect { subject }.to change { issue.reload.state }.from('closed').to('opened')
+ expect { subject }.to change { issue.reload.state }.from('closed').to('opened')
+ end
end
end
@@ -128,6 +144,14 @@ RSpec.describe Mutations::Issues::Update do
expect(issue.reload.labels).to match_array([project_label, label_2])
end
end
+
+ context 'when changing type' do
+ it 'changes the type of the issue' do
+ mutation_params[:issue_type] = 'incident'
+
+ expect { subject }.to change { issue.reload.issue_type }.from('issue').to('incident')
+ end
+ end
end
end
end
diff --git a/spec/graphql/mutations/merge_requests/set_draft_spec.rb b/spec/graphql/mutations/merge_requests/set_draft_spec.rb
new file mode 100644
index 00000000000..697b2e5b007
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_draft_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::MergeRequests::SetDraft do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ specify { expect(described_class).to require_graphql_authorizations(:update_merge_request) }
+
+ describe '#resolve' do
+ let(:draft) { true }
+ let(:mutated_merge_request) { subject[:merge_request] }
+
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, draft: draft) }
+
+ it_behaves_like 'permission level for merge request mutation is correctly verified'
+
+ context 'when the user can update the merge request' do
+ before do
+ merge_request.project.add_developer(user)
+ end
+
+ it 'returns the merge request as a draft' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request).to be_draft
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors if/when merge request could not be updated' do
+ # Make the merge request invalid
+ merge_request.allow_broken = true
+ merge_request.update!(source_project: nil)
+
+ expect(subject[:errors]).not_to be_empty
+ end
+
+ context 'when passing draft as false' do
+ let(:draft) { false }
+
+ it 'removes `Draft` from the title' do
+ merge_request.update!(title: "Draft: working on it")
+
+ expect(mutated_merge_request).not_to be_draft
+ end
+
+ it 'does not do anything if the title did not start with draft' do
+ expect(mutated_merge_request).not_to be_draft
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/merge_requests/set_locked_spec.rb b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
index 03c709e9bb3..68bb7aa0aa4 100644
--- a/spec/graphql/mutations/merge_requests/set_locked_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Mutations::MergeRequests::SetLocked do
let(:locked) { false }
it 'unlocks the discussion' do
- merge_request.update(discussion_locked: true)
+ merge_request.update!(discussion_locked: true)
expect(mutated_merge_request).not_to be_discussion_locked
end
diff --git a/spec/graphql/mutations/merge_requests/set_wip_spec.rb b/spec/graphql/mutations/merge_requests/set_wip_spec.rb
index 69f6a4328b8..fae9c4f7fe0 100644
--- a/spec/graphql/mutations/merge_requests/set_wip_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_wip_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Mutations::MergeRequests::SetWip do
let(:wip) { false }
it 'removes `wip` from the title' do
- merge_request.update(title: "WIP: working on it")
+ merge_request.update!(title: "WIP: working on it")
expect(mutated_merge_request).not_to be_work_in_progress
end
diff --git a/spec/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
index bd0d38cb49f..978c81fadfa 100644
--- a/spec/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -25,7 +25,9 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
end
RSpec.shared_examples 'updating the namespace package setting' do
- it_behaves_like 'updating the namespace package setting attributes', from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT' }, to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' }
+ it_behaves_like 'updating the namespace package setting attributes',
+ from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT', generic_duplicates_allowed: true, generic_duplicate_exception_regex: 'foo' },
+ to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE', generic_duplicates_allowed: false, generic_duplicate_exception_regex: 'bar' }
it_behaves_like 'returning a success'
@@ -56,7 +58,13 @@ RSpec.describe Mutations::Namespace::PackageSettings::Update do
context 'with existing namespace package setting' do
let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
- let_it_be(:params) { { namespace_path: namespace.full_path, maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' } }
+ let_it_be(:params) do
+ { namespace_path: namespace.full_path,
+ maven_duplicates_allowed: false,
+ maven_duplicate_exception_regex: 'RELEASE',
+ generic_duplicates_allowed: false,
+ generic_duplicate_exception_regex: 'bar' }
+ end
where(:user_role, :shared_examples_name) do
:maintainer | 'updating the namespace package setting'
diff --git a/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb b/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb
index ed03a1cb906..7c3b552480f 100644
--- a/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb
+++ b/spec/graphql/mutations/security/ci_configuration/configure_sast_spec.rb
@@ -3,118 +3,11 @@
require 'spec_helper'
RSpec.describe Mutations::Security::CiConfiguration::ConfigureSast do
- subject(:mutation) { described_class.new(object: nil, context: context, field: nil) }
+ include GraphqlHelpers
- let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:user) { create(:user) }
+ let(:service) { ::Security::CiConfiguration::SastCreateService }
- let_it_be(:service_result_json) do
- {
- status: "success",
- success_path: "http://127.0.0.1:3000/root/demo-historic-secrets/-/merge_requests/new?",
- errors: nil
- }
- end
+ subject { resolve(described_class, args: { project_path: project.full_path, configuration: {} }, ctx: { current_user: user }) }
- let_it_be(:service_error_result_json) do
- {
- status: "error",
- success_path: nil,
- errors: %w(error1 error2)
- }
- end
-
- let(:context) do
- GraphQL::Query::Context.new(
- query: OpenStruct.new(schema: nil),
- values: { current_user: user },
- object: nil
- )
- end
-
- specify { expect(described_class).to require_graphql_authorizations(:push_code) }
-
- describe '#resolve' do
- subject { mutation.resolve(project_path: project.full_path, configuration: {}) }
-
- let(:result) { subject }
-
- it 'raises an error if the resource is not accessible to the user' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
-
- context 'when user does not have enough permissions' do
- before do
- project.add_guest(user)
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
-
- context 'when user is a maintainer of a different project' do
- before do
- create(:project_empty_repo).add_maintainer(user)
- end
-
- it 'raises an error' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
-
- context 'when the user does not have permission to create a new branch' do
- before_all do
- project.add_developer(user)
- end
-
- let(:error_message) { 'You are not allowed to create protected branches on this project.' }
-
- it 'returns an array of errors' do
- allow_next_instance_of(::Files::MultiService) do |multi_service|
- allow(multi_service).to receive(:execute).and_raise(Gitlab::Git::PreReceiveError.new("GitLab: #{error_message}"))
- end
-
- expect(result).to match(
- status: :error,
- success_path: nil,
- errors: match_array([error_message])
- )
- end
- end
-
- context 'when the user can create a merge request' do
- before_all do
- project.add_developer(user)
- end
-
- context 'when service successfully generates a path to create a new merge request' do
- it 'returns a success path' do
- allow_next_instance_of(::Security::CiConfiguration::SastCreateService) do |service|
- allow(service).to receive(:execute).and_return(service_result_json)
- end
-
- expect(result).to match(
- status: 'success',
- success_path: service_result_json[:success_path],
- errors: []
- )
- end
- end
-
- context 'when service can not generate any path to create a new merge request' do
- it 'returns an array of errors' do
- allow_next_instance_of(::Security::CiConfiguration::SastCreateService) do |service|
- allow(service).to receive(:execute).and_return(service_error_result_json)
- end
-
- expect(result).to match(
- status: 'error',
- success_path: be_nil,
- errors: match_array(service_error_result_json[:errors])
- )
- end
- end
- end
- end
+ include_examples 'graphql mutations security ci configuration'
end
diff --git a/spec/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb b/spec/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb
new file mode 100644
index 00000000000..5b4a7d5918c
--- /dev/null
+++ b/spec/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Security::CiConfiguration::ConfigureSecretDetection do
+ include GraphqlHelpers
+
+ let(:service) { ::Security::CiConfiguration::SecretDetectionCreateService }
+
+ subject { resolve(described_class, args: { project_path: project.full_path }, ctx: { current_user: user }) }
+
+ include_examples 'graphql mutations security ci configuration'
+end
diff --git a/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
index 269a1fb1758..b63eca4359d 100644
--- a/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
+++ b/spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Resolvers::Admin::Analytics::UsageTrends::MeasurementsResolver do
include GraphqlHelpers
let_it_be(:admin_user) { create(:user, :admin) }
+
let(:current_user) { admin_user }
describe '#resolve' do
diff --git a/spec/graphql/resolvers/alert_management/alert_status_counts_resolver_spec.rb b/spec/graphql/resolvers/alert_management/alert_status_counts_resolver_spec.rb
index b72e692f2e8..3bc6043a849 100644
--- a/spec/graphql/resolvers/alert_management/alert_status_counts_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/alert_status_counts_resolver_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Resolvers::AlertManagement::AlertStatusCountsResolver do
describe '#resolve' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:args) { {} }
subject { resolve_alert_status_counts(args) }
diff --git a/spec/graphql/resolvers/boards_resolver_spec.rb b/spec/graphql/resolvers/boards_resolver_spec.rb
index cb3bcb002ec..221e905f441 100644
--- a/spec/graphql/resolvers/boards_resolver_spec.rb
+++ b/spec/graphql/resolvers/boards_resolver_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Resolvers::BoardsResolver do
end
it 'returns nil if board not found' do
- outside_parent = create(board_parent.class.underscore.to_sym)
+ outside_parent = create(board_parent.class.underscore.to_sym) # rubocop:disable Rails/SaveBang
outside_board = create(:board, name: 'outside board', resource_parent: outside_parent)
expect(resolve_boards(args: { id: global_id_of(outside_board) })).to eq Board.none
diff --git a/spec/graphql/resolvers/branch_commit_resolver_spec.rb b/spec/graphql/resolvers/branch_commit_resolver_spec.rb
index 346c9e01088..3d5702539fa 100644
--- a/spec/graphql/resolvers/branch_commit_resolver_spec.rb
+++ b/spec/graphql/resolvers/branch_commit_resolver_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Resolvers::BranchCommitResolver do
subject(:commit) { resolve(described_class, obj: branch) }
let_it_be(:repository) { create(:project, :repository).repository }
+
let(:branch) { repository.find_branch('master') }
describe '#resolve' do
diff --git a/spec/graphql/resolvers/ci/runners_resolver_spec.rb b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
new file mode 100644
index 00000000000..006d6785506
--- /dev/null
+++ b/spec/graphql/resolvers/ci/runners_resolver_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::RunnersResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create_default(:user, :admin) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+
+ let_it_be(:inactive_project_runner) do
+ create(:ci_runner, :project, projects: [project], active: false, contacted_at: 1.minute.ago, tag_list: %w(project_runner))
+ end
+
+ let_it_be(:offline_project_runner) do
+ create(:ci_runner, :project, projects: [project], contacted_at: 1.day.ago, tag_list: %w(project_runner active_runner))
+ end
+
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], contacted_at: 1.second.ago) }
+ let_it_be(:instance_runner) { create(:ci_runner, :instance, contacted_at: 2.minutes.ago, tag_list: %w(instance_runner active_runner)) }
+
+ describe '#resolve' do
+ subject { resolve(described_class, ctx: { current_user: user }, args: args).items.to_a }
+
+ let(:args) do
+ {}
+ end
+
+ context 'without sort' do
+ it 'returns all the runners' do
+ is_expected.to contain_exactly(inactive_project_runner, offline_project_runner, group_runner, instance_runner)
+ end
+ end
+
+ context 'with a sort argument' do
+ context "set to :contacted_asc" do
+ let(:args) do
+ { sort: :contacted_asc }
+ end
+
+ it { is_expected.to eq([offline_project_runner, instance_runner, inactive_project_runner, group_runner]) }
+ end
+
+ context "set to :created_date" do
+ let(:args) do
+ { sort: :created_date }
+ end
+
+ it { is_expected.to eq([instance_runner, group_runner, offline_project_runner, inactive_project_runner]) }
+ end
+ end
+
+ context 'when type is filtered' do
+ let(:args) do
+ { type: runner_type.to_s }
+ end
+
+ context 'to instance runners' do
+ let(:runner_type) { :instance_type }
+
+ it 'returns the instance runner' do
+ is_expected.to contain_exactly(instance_runner)
+ end
+ end
+
+ context 'to group runners' do
+ let(:runner_type) { :group_type }
+
+ it 'returns the group runner' do
+ is_expected.to contain_exactly(group_runner)
+ end
+ end
+
+ context 'to project runners' do
+ let(:runner_type) { :project_type }
+
+ it 'returns the project runner' do
+ is_expected.to contain_exactly(inactive_project_runner, offline_project_runner)
+ end
+ end
+ end
+
+ context 'when status is filtered' do
+ let(:args) do
+ { status: runner_status.to_s }
+ end
+
+ context 'to active runners' do
+ let(:runner_status) { :active }
+
+ it 'returns the instance and group runners' do
+ is_expected.to contain_exactly(offline_project_runner, group_runner, instance_runner)
+ end
+ end
+
+ context 'to offline runners' do
+ let(:runner_status) { :offline }
+
+ it 'returns the offline project runner' do
+ is_expected.to contain_exactly(offline_project_runner)
+ end
+ end
+ end
+
+ context 'when tag list is filtered' do
+ let(:args) do
+ { tag_list: tag_list }
+ end
+
+ context 'with "project_runner" tag' do
+ let(:tag_list) { ['project_runner'] }
+
+ it 'returns the project_runner runners' do
+ is_expected.to contain_exactly(offline_project_runner, inactive_project_runner)
+ end
+ end
+
+ context 'with "project_runner" and "active_runner" tags as comma-separated string' do
+ let(:tag_list) { ['project_runner,active_runner'] }
+
+ it 'returns the offline_project_runner runner' do
+ is_expected.to contain_exactly(offline_project_runner)
+ end
+ end
+
+ context 'with "active_runner" and "instance_runner" tags as array' do
+ let(:tag_list) { %w[instance_runner active_runner] }
+
+ it 'returns the offline_project_runner runner' do
+ is_expected.to contain_exactly(instance_runner)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/ci/template_resolver_spec.rb b/spec/graphql/resolvers/ci/template_resolver_spec.rb
new file mode 100644
index 00000000000..bec25640c7f
--- /dev/null
+++ b/spec/graphql/resolvers/ci/template_resolver_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::TemplateResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+
+ subject(:resolve_subject) { resolve(described_class, obj: project, ctx: { current_user: user }, args: { name: template_name }) }
+
+ context 'when template exists' do
+ let(:template_name) { 'Android' }
+
+ it 'returns the found template' do
+ found_template = resolve_subject
+
+ expect(found_template).to be_an_instance_of(Gitlab::Template::GitlabCiYmlTemplate)
+ expect(found_template.name).to eq('Android')
+ end
+ end
+
+ context 'when template does not exist' do
+ let(:template_name) { 'invalidname' }
+
+ it 'returns nil' do
+ expect(resolve_subject).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/design_management/designs_resolver_spec.rb b/spec/graphql/resolvers/design_management/designs_resolver_spec.rb
index 28e963c88a9..b091e58b06f 100644
--- a/spec/graphql/resolvers/design_management/designs_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/designs_resolver_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Resolvers::DesignManagement::DesignsResolver do
let_it_be(:first_version) { create(:design_version) }
let_it_be(:first_design) { create(:design, issue: issue, versions: [first_version]) }
let_it_be(:current_user) { create(:user) }
+
let(:gql_context) { { current_user: current_user } }
let(:args) { {} }
diff --git a/spec/graphql/resolvers/design_management/version/designs_at_version_resolver_spec.rb b/spec/graphql/resolvers/design_management/version/designs_at_version_resolver_spec.rb
index c038216ce0b..4e8f5e5fc1d 100644
--- a/spec/graphql/resolvers/design_management/version/designs_at_version_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/version/designs_at_version_resolver_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Resolvers::DesignManagement::Version::DesignsAtVersionResolver do
include_context 'four designs in three versions'
let_it_be(:current_user) { authorized_user }
+
let(:gql_context) { { current_user: current_user } }
let(:version) { third_version }
diff --git a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
index 23d4d86c79a..2c9c3a47650 100644
--- a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
+++ b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb
@@ -41,6 +41,20 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
it 'returns the ordered versions' do
expect(result.to_a).to eq(all_versions)
end
+
+ context 'loading associations' do
+ it 'prevents N+1 queries when loading author' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ resolve_versions(object).items.map(&:author)
+ end.count
+
+ create_list(:design_version, 3, issue: issue)
+
+ expect do
+ resolve_versions(object).items.map(&:author)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+ end
end
context 'when constrained' do
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index dd3f1676538..78d89054efd 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -119,6 +119,7 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
context 'when including descendant milestones in a public group' do
let_it_be(:group) { create(:group, :public) }
+
let(:args) { { include_descendants: true } }
it 'finds milestones only in accessible projects and groups' do
diff --git a/spec/graphql/resolvers/group_packages_resolver_spec.rb b/spec/graphql/resolvers/group_packages_resolver_spec.rb
index 59438b8d5ad..48f4c8ec4ca 100644
--- a/spec/graphql/resolvers/group_packages_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_packages_resolver_spec.rb
@@ -8,11 +8,14 @@ RSpec.describe Resolvers::GroupPackagesResolver do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group) }
- let_it_be(:package) { create(:package, project: project) }
+
+ let(:args) do
+ { sort: :created_desc }
+ end
describe '#resolve' do
- subject(:packages) { resolve(described_class, ctx: { current_user: user }, obj: group) }
+ subject { resolve(described_class, ctx: { current_user: user }, obj: group, args: args).to_a }
- it { is_expected.to contain_exactly(package) }
+ it_behaves_like 'group and projects packages resolver'
end
end
diff --git a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
index 84ef906b72f..3aadbc76be8 100644
--- a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Resolvers::MergeRequestPipelinesResolver do
let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project, ref: 'other-ref') }
let_it_be(:other_pipeline) { create(:ci_pipeline) }
+
let(:current_user) { create(:user) }
before do
diff --git a/spec/graphql/resolvers/metadata_resolver_spec.rb b/spec/graphql/resolvers/metadata_resolver_spec.rb
index f8c01f9d531..56875e185e7 100644
--- a/spec/graphql/resolvers/metadata_resolver_spec.rb
+++ b/spec/graphql/resolvers/metadata_resolver_spec.rb
@@ -7,7 +7,10 @@ RSpec.describe Resolvers::MetadataResolver do
describe '#resolve' do
it 'returns version and revision' do
- expect(resolve(described_class)).to have_attributes(version: Gitlab::VERSION, revision: Gitlab.revision)
+ expect(resolve(described_class)).to have_attributes(
+ version: Gitlab::VERSION,
+ revision: Gitlab.revision,
+ kas: kind_of(InstanceMetadata::Kas))
end
end
end
diff --git a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
index f90869c52bc..a83cef40bdf 100644
--- a/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
+++ b/spec/graphql/resolvers/metrics/dashboards/annotation_resolver_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Resolvers::Metrics::Dashboards::AnnotationResolver do
let_it_be(:current_user) { create(:user) }
let_it_be(:environment) { create(:environment) }
let_it_be(:path) { 'config/prometheus/common_metrics.yml' }
+
let(:dashboard) { PerformanceMonitoring::PrometheusDashboard.new(path: path, environment: environment) }
let(:args) do
{
diff --git a/spec/graphql/resolvers/packages_base_resolver_spec.rb b/spec/graphql/resolvers/packages_base_resolver_spec.rb
new file mode 100644
index 00000000000..8f9865c3785
--- /dev/null
+++ b/spec/graphql/resolvers/packages_base_resolver_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::PackagesBaseResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ subject { resolve(described_class) }
+
+ it 'throws an error' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/project_packages_resolver_spec.rb b/spec/graphql/resolvers/project_packages_resolver_spec.rb
index c8105ed2a38..66a94bd42dd 100644
--- a/spec/graphql/resolvers/project_packages_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_packages_resolver_spec.rb
@@ -6,12 +6,15 @@ RSpec.describe Resolvers::ProjectPackagesResolver do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
- let_it_be(:package) { create(:package, project: project) }
+ let_it_be_with_reload(:project) { create(:project, :public) }
+
+ let(:args) do
+ { sort: :created_desc }
+ end
describe '#resolve' do
- subject(:packages) { resolve(described_class, ctx: { current_user: user }, obj: project) }
+ subject { resolve(described_class, ctx: { current_user: user }, obj: project, args: args).to_a }
- it { is_expected.to contain_exactly(package) }
+ it_behaves_like 'group and projects packages resolver'
end
end
diff --git a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
index 3d33e0b500d..6a8aa39f3b2 100644
--- a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
let_it_be(:pipeline) { create(:ci_pipeline, project: project, iid: '1234', sha: 'sha') }
let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: project, iid: '1235', sha: 'sha2') }
let_it_be(:other_pipeline) { create(:ci_pipeline) }
+
let(:current_user) { create(:user) }
specify do
diff --git a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
index b2e8fed2441..c7c00f54c0c 100644
--- a/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipelines_resolver_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Resolvers::ProjectPipelinesResolver do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:other_pipeline) { create(:ci_pipeline) }
+
let(:current_user) { create(:user) }
before do
diff --git a/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb b/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb
index 398dd7a2e2e..004e0411e51 100644
--- a/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb
+++ b/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb
@@ -8,29 +8,50 @@ RSpec.describe Resolvers::RepositoryBranchNamesResolver do
let(:project) { create(:project, :repository) }
describe '#resolve' do
- subject(:resolve_branch_names) do
- resolve(
- described_class,
- obj: project.repository,
- args: { search_pattern: pattern },
- ctx: { current_user: project.creator }
- )
- end
-
context 'with empty search pattern' do
let(:pattern) { '' }
it 'returns nil' do
- expect(resolve_branch_names).to eq(nil)
+ expect(resolve_branch_names(pattern, 0, 100)).to eq(nil)
end
end
context 'with a valid search pattern' do
- let(:pattern) { 'mas*' }
+ let(:pattern) { 'snippet/*' }
it 'returns matching branches' do
- expect(resolve_branch_names).to match_array(['master'])
+ expect(resolve_branch_names(pattern, 0, 100)).to contain_exactly(
+ 'snippet/edit-file',
+ 'snippet/multiple-files',
+ 'snippet/no-files',
+ 'snippet/rename-and-edit-file',
+ 'snippet/single-file'
+ )
+ end
+
+ it 'properly offsets and limits branch name results' do
+ starting_names = resolve_branch_names(pattern, 0, 3)
+ offset_names = resolve_branch_names(pattern, 3, 2)
+
+ expect(starting_names.count).to eq(3)
+ expect(offset_names.count).to eq(2)
+
+ expect(offset_names).not_to include(*starting_names)
+
+ all_names = resolve_branch_names(pattern, 0, 100)
+ expect(all_names).to contain_exactly(*starting_names, *offset_names)
end
end
end
+
+ private
+
+ def resolve_branch_names(pattern, offset, limit)
+ resolve(
+ described_class,
+ obj: project.repository,
+ args: { search_pattern: pattern, offset: offset, limit: limit },
+ ctx: { current_user: project.creator }
+ )
+ end
end
diff --git a/spec/graphql/subscriptions/issuable_updated_spec.rb b/spec/graphql/subscriptions/issuable_updated_spec.rb
new file mode 100644
index 00000000000..cc88b37627d
--- /dev/null
+++ b/spec/graphql/subscriptions/issuable_updated_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Subscriptions::IssuableUpdated do
+ include GraphqlHelpers
+
+ it { expect(described_class).to have_graphql_arguments(:issuable_id) }
+ it { expect(described_class.payload_type).to eq(Types::IssuableType) }
+
+ describe '#resolve' do
+ let_it_be(:unauthorized_user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+
+ let(:current_user) { issue.author }
+ let(:issuable_id) { issue.to_gid }
+
+ subject { resolver.resolve_with_support(issuable_id: issuable_id) }
+
+ context 'initial subscription' do
+ let(:resolver) { resolver_instance(described_class, ctx: { current_user: current_user }, subscription_update: false) }
+
+ it 'returns nil' do
+ expect(subject).to eq(nil)
+ end
+
+ context 'when user is unauthorized' do
+ let(:current_user) { unauthorized_user }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(GraphQL::ExecutionError)
+ end
+ end
+
+ context 'when issue does not exist' do
+ let(:issuable_id) { GlobalID.parse("gid://gitlab/Issue/#{non_existing_record_id}") }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(GraphQL::ExecutionError)
+ end
+ end
+
+ context 'when a GraphQL::ID_TYPE is provided' do
+ let(:issuable_id) { issue.to_gid.to_s }
+
+ it 'raises an exception' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
+ end
+
+ context 'subscription updates' do
+ let(:resolver) { resolver_instance(described_class, obj: issue, ctx: { current_user: current_user }, subscription_update: true) }
+
+ it 'returns the resolved object' do
+ expect(subject).to eq(issue)
+ end
+
+ context 'when user is unauthorized' do
+ let(:current_user) { unauthorized_user }
+
+ it 'unsubscribes the user' do
+ expect { subject }.to throw_symbol(:graphql_subscription_unsubscribed)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/blob_viewer_type_spec.rb b/spec/graphql/types/blob_viewer_type_spec.rb
new file mode 100644
index 00000000000..1c020c63535
--- /dev/null
+++ b/spec/graphql/types/blob_viewer_type_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['BlobViewer'] do
+ it 'has the correct fields' do
+ expected_fields = [:type, :load_async, :too_large, :collapsed,
+ :render_error, :file_type, :loading_partial_name]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index 787e2174070..54fe0c4b707 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Types::Ci::JobType do
specify { expect(described_class.graphql_name).to eq('CiJob') }
specify { expect(described_class).to require_graphql_authorizations(:read_commit_status) }
+ specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Ci::Job) }
it 'exposes the expected fields' do
expected_fields = %i[
@@ -15,15 +16,18 @@ RSpec.describe Types::Ci::JobType do
commitPath
coverage
created_at
+ created_by_tag
detailedStatus
duration
finished_at
id
+ manual_job
name
needs
pipeline
playable
queued_at
+ queued_duration
refName
refPath
retryable
@@ -33,7 +37,10 @@ RSpec.describe Types::Ci::JobType do
stage
started_at
status
+ stuck
tags
+ triggered
+ userPermissions
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
index c7d2cbdb765..35d48229fa4 100644
--- a/spec/graphql/types/ci/pipeline_type_spec.rb
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -9,7 +9,8 @@ RSpec.describe Types::Ci::PipelineType do
it 'contains attributes related to a pipeline' do
expected_fields = %w[
- id iid sha before_sha status detailed_status config_source duration
+ id iid sha before_sha complete status detailed_status config_source
+ duration queued_duration
coverage created_at updated_at started_at finished_at committed_at
stages user retryable cancelable jobs source_job job downstream
upstream path project active user_permissions warnings commit_path uses_needs
@@ -17,7 +18,7 @@ RSpec.describe Types::Ci::PipelineType do
]
if Gitlab.ee?
- expected_fields += %w[security_report_summary security_report_findings]
+ expected_fields += %w[security_report_summary security_report_findings code_quality_reports]
end
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/runner_type_spec.rb b/spec/graphql/types/ci/runner_type_spec.rb
new file mode 100644
index 00000000000..dfe4a30c5b7
--- /dev/null
+++ b/spec/graphql/types/ci/runner_type_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::RunnerType do
+ specify { expect(described_class.graphql_name).to eq('CiRunner') }
+
+ it 'contains attributes related to a runner' do
+ expected_fields = %w[
+ id description contacted_at maximum_timeout access_level active status
+ version short_sha revision locked run_untagged ip_address runner_type tag_list
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/template_type_spec.rb b/spec/graphql/types/ci/template_type_spec.rb
new file mode 100644
index 00000000000..95ac9f97e31
--- /dev/null
+++ b/spec/graphql/types/ci/template_type_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TemplateType do
+ specify { expect(described_class.graphql_name).to eq('CiTemplate') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ content
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/design_management/version_type_spec.rb b/spec/graphql/types/design_management/version_type_spec.rb
index 017cc1775a1..62335a65fdf 100644
--- a/spec/graphql/types/design_management/version_type_spec.rb
+++ b/spec/graphql/types/design_management/version_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['DesignVersion'] do
it { expect(described_class).to require_graphql_authorizations(:read_design) }
it 'has the expected fields' do
- expected_fields = %i[id sha designs design_at_version designs_at_version]
+ expected_fields = %i[id sha designs design_at_version designs_at_version author created_at]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/duration_type_spec.rb b/spec/graphql/types/duration_type_spec.rb
new file mode 100644
index 00000000000..5b88819f157
--- /dev/null
+++ b/spec/graphql/types/duration_type_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Duration'] do
+ let(:duration) { 17.minutes }
+
+ it 'presents information as a floating point number' do
+ expect(described_class.coerce_isolated_result(duration)).to eq(duration.to_f)
+ end
+
+ it 'accepts integers as input' do
+ expect(described_class.coerce_isolated_input(100)).to eq(100.0)
+ end
+
+ it 'accepts floats as input' do
+ expect(described_class.coerce_isolated_input(0.5)).to eq(0.5)
+ end
+
+ it 'rejects invalid input' do
+ expect { described_class.coerce_isolated_input('not valid') }
+ .to raise_error(GraphQL::CoercionError)
+ end
+
+ it 'rejects nil' do
+ expect { described_class.coerce_isolated_input(nil) }
+ .to raise_error(GraphQL::CoercionError)
+ end
+end
diff --git a/spec/graphql/types/issuable_type_spec.rb b/spec/graphql/types/issuable_type_spec.rb
new file mode 100644
index 00000000000..992a58f524b
--- /dev/null
+++ b/spec/graphql/types/issuable_type_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Issuable'] do
+ it 'returns possible types' do
+ expect(described_class.possible_types).to include(Types::IssueType, Types::MergeRequestType)
+ end
+
+ describe '.resolve_type' do
+ it 'resolves issues' do
+ expect(described_class.resolve_type(build(:issue), {})).to eq(Types::IssueType)
+ end
+
+ it 'resolves merge requests' do
+ expect(described_class.resolve_type(build(:merge_request), {})).to eq(Types::MergeRequestType)
+ end
+
+ it 'raises an error for invalid types' do
+ expect { described_class.resolve_type(build(:user), {}) }.to raise_error 'Unsupported issuable type'
+ end
+ end
+end
diff --git a/spec/graphql/types/label_type_spec.rb b/spec/graphql/types/label_type_spec.rb
index 427b5d2dcef..475b2a2ad34 100644
--- a/spec/graphql/types/label_type_spec.rb
+++ b/spec/graphql/types/label_type_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe GitlabSchema.types['Label'] do
:color,
:text_color,
:created_at,
+ :remove_on_close,
:updated_at
]
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index 3314ea62324..fa33b32c6c8 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
notes discussions user_permissions id iid title title_html description
description_html state created_at updated_at source_project target_project
project project_id source_project_id target_project_id source_branch
- target_branch work_in_progress merge_when_pipeline_succeeds diff_head_sha
+ target_branch work_in_progress draft merge_when_pipeline_succeeds diff_head_sha
merge_commit_sha user_notes_count user_discussions_count should_remove_source_branch
diff_refs diff_stats diff_stats_summary
force_remove_source_branch merge_status in_progress_merge_commit_sha
diff --git a/spec/graphql/types/metadata/kas_type_spec.rb b/spec/graphql/types/metadata/kas_type_spec.rb
new file mode 100644
index 00000000000..f90c64f0068
--- /dev/null
+++ b/spec/graphql/types/metadata/kas_type_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Kas'] do
+ specify { expect(described_class.graphql_name).to eq('Kas') }
+ specify { expect(described_class).to require_graphql_authorizations(:read_instance_metadata) }
+end
diff --git a/spec/graphql/types/mutation_type_spec.rb b/spec/graphql/types/mutation_type_spec.rb
index 41993327577..e4144e4fa97 100644
--- a/spec/graphql/types/mutation_type_spec.rb
+++ b/spec/graphql/types/mutation_type_spec.rb
@@ -3,8 +3,16 @@
require 'spec_helper'
RSpec.describe Types::MutationType do
- it 'is expected to have the MergeRequestSetWip' do
- expect(described_class).to have_graphql_mutation(Mutations::MergeRequests::SetWip)
+ it 'is expected to have the deprecated MergeRequestSetWip' do
+ field = get_field('MergeRequestSetWip')
+
+ expect(field).to be_present
+ expect(field.deprecation_reason).to be_present
+ expect(field.resolver).to eq(Mutations::MergeRequests::SetWip)
+ end
+
+ it 'is expected to have the MergeRequestSetDraft' do
+ expect(described_class).to have_graphql_mutation(Mutations::MergeRequests::SetDraft)
end
describe 'deprecated and aliased mutations' do
@@ -27,9 +35,9 @@ RSpec.describe Types::MutationType do
it { expect(alias_field.resolver.fields).to eq(canonical_field.resolver.fields) }
it { expect(alias_field.resolver.arguments).to eq(canonical_field.resolver.arguments) }
end
+ end
- def get_field(name)
- described_class.fields[GraphqlHelpers.fieldnamerize(name)]
- end
+ def get_field(name)
+ described_class.fields[GraphqlHelpers.fieldnamerize(name)]
end
end
diff --git a/spec/graphql/types/packages/maven/metadatum_type_spec.rb b/spec/graphql/types/packages/maven/metadatum_type_spec.rb
new file mode 100644
index 00000000000..cf24af1456e
--- /dev/null
+++ b/spec/graphql/types/packages/maven/metadatum_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MavenMetadata'] do
+ it 'includes maven metadatum fields' do
+ expected_fields = %w[
+ id created_at updated_at path app_group app_version app_name
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/nuget/metadatum_type_spec.rb b/spec/graphql/types/packages/nuget/metadatum_type_spec.rb
new file mode 100644
index 00000000000..e5baa7522e4
--- /dev/null
+++ b/spec/graphql/types/packages/nuget/metadatum_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['NugetMetadata'] do
+ it 'includes nuget metadatum fields' do
+ expected_fields = %w[
+ id license_url project_url icon_url
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/package_status_enum_spec.rb b/spec/graphql/types/packages/package_status_enum_spec.rb
new file mode 100644
index 00000000000..71d05da35ea
--- /dev/null
+++ b/spec/graphql/types/packages/package_status_enum_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageStatus'] do
+ it 'exposes all package statuses' do
+ expect(described_class.values.keys).to contain_exactly(*%w[DEFAULT HIDDEN PROCESSING ERROR])
+ end
+end
diff --git a/spec/graphql/types/packages/package_type_enum_spec.rb b/spec/graphql/types/packages/package_type_enum_spec.rb
index ccd91485e4b..9d5a7716a61 100644
--- a/spec/graphql/types/packages/package_type_enum_spec.rb
+++ b/spec/graphql/types/packages/package_type_enum_spec.rb
@@ -4,6 +4,6 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['PackageTypeEnum'] do
it 'exposes all package types' do
- expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC GOLANG DEBIAN RUBYGEMS])
+ expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC GOLANG DEBIAN RUBYGEMS HELM TERRAFORM_MODULE])
end
end
diff --git a/spec/graphql/types/packages/package_type_spec.rb b/spec/graphql/types/packages/package_type_spec.rb
index 544d6ddc3af..07573044abb 100644
--- a/spec/graphql/types/packages/package_type_spec.rb
+++ b/spec/graphql/types/packages/package_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe GitlabSchema.types['Package'] do
created_at updated_at
project
tags pipelines metadata versions
+ status
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/permission_types/ci/job_spec.rb b/spec/graphql/types/permission_types/ci/job_spec.rb
new file mode 100644
index 00000000000..e4bc5419070
--- /dev/null
+++ b/spec/graphql/types/permission_types/ci/job_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::PermissionTypes::Ci::Job do
+ it 'has expected permission fields' do
+ expected_permissions = [
+ :read_job_artifacts, :read_build, :update_build
+ ]
+
+ expect(described_class).to have_graphql_fields(expected_permissions).only
+ end
+end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index f2c4068f048..0f7cadbd4a7 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe GitlabSchema.types['Project'] do
it 'has the expected fields' do
expected_fields = %w[
user_permissions id full_path path name_with_namespace
- name description description_html tag_list ssh_url_to_repo
+ name description description_html tag_list topics ssh_url_to_repo
http_url_to_repo web_url star_count forks_count
created_at last_activity_at archived visibility
container_registry_enabled shared_runners_enabled
@@ -32,6 +32,7 @@ RSpec.describe GitlabSchema.types['Project'] do
issue_status_counts terraform_states alert_management_integrations
container_repositories container_repositories_count
pipeline_analytics squash_read_only sast_ci_configuration
+ ci_template
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -379,4 +380,11 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::Ci::JobType.connection_type) }
it { is_expected.to have_graphql_arguments(:statuses) }
end
+
+ describe 'ci_template field' do
+ subject { described_class.fields['ciTemplate'] }
+
+ it { is_expected.to have_graphql_type(Types::Ci::TemplateType) }
+ it { is_expected.to have_graphql_arguments(:name) }
+ end
end
diff --git a/spec/graphql/types/projects/services_enum_spec.rb b/spec/graphql/types/projects/services_enum_spec.rb
index b8da9305de4..c23c652a378 100644
--- a/spec/graphql/types/projects/services_enum_spec.rb
+++ b/spec/graphql/types/projects/services_enum_spec.rb
@@ -11,5 +11,5 @@ RSpec.describe GitlabSchema.types['ServiceType'] do
end
def available_services_enum
- ::Service.available_services_types(include_dev: false).map(&:underscore).map(&:upcase)
+ ::Integration.available_services_types(include_dev: false).map(&:underscore).map(&:upcase)
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index d3dcdd260b0..9a8f2090cc1 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -21,8 +21,11 @@ RSpec.describe GitlabSchema.types['Query'] do
user
users
issue
+ merge_request
usage_trends_measurements
runner_platforms
+ runner
+ runners
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
@@ -60,11 +63,21 @@ RSpec.describe GitlabSchema.types['Query'] do
describe 'issue field' do
subject { described_class.fields['issue'] }
- it 'returns issue' do
+ it "finds an issue by it's gid" do
+ is_expected.to have_graphql_arguments(:id)
is_expected.to have_graphql_type(Types::IssueType)
end
end
+ describe 'merge_request field' do
+ subject { described_class.fields['mergeRequest'] }
+
+ it "finds a merge_request by it's gid" do
+ is_expected.to have_graphql_arguments(:id)
+ is_expected.to have_graphql_type(Types::MergeRequestType)
+ end
+ end
+
describe 'usage_trends_measurements field' do
subject { described_class.fields['usageTrendsMeasurements'] }
@@ -73,6 +86,18 @@ RSpec.describe GitlabSchema.types['Query'] do
end
end
+ describe 'runner field' do
+ subject { described_class.fields['runner'] }
+
+ it { is_expected.to have_graphql_type(Types::Ci::RunnerType) }
+ end
+
+ describe 'runners field' do
+ subject { described_class.fields['runners'] }
+
+ it { is_expected.to have_graphql_type(Types::Ci::RunnerType.connection_type) }
+ end
+
describe 'runner_platforms field' do
subject { described_class.fields['runnerPlatforms'] }
diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb
index f8647e4e964..beab4dcebc2 100644
--- a/spec/graphql/types/repository/blob_type_spec.rb
+++ b/spec/graphql/types/repository/blob_type_spec.rb
@@ -5,5 +5,32 @@ require 'spec_helper'
RSpec.describe Types::Repository::BlobType do
specify { expect(described_class.graphql_name).to eq('RepositoryBlob') }
- specify { expect(described_class).to have_graphql_fields(:id, :oid, :name, :path, :web_path, :lfs_oid, :mode) }
+ specify do
+ expect(described_class).to have_graphql_fields(
+ :id,
+ :oid,
+ :name,
+ :path,
+ :web_path,
+ :lfs_oid,
+ :mode,
+ :size,
+ :raw_size,
+ :raw_blob,
+ :raw_text_blob,
+ :file_type,
+ :edit_blob_path,
+ :stored_externally,
+ :raw_path,
+ :replace_path,
+ :simple_viewer,
+ :rich_viewer,
+ :plain_data,
+ :can_modify_blob,
+ :ide_edit_path,
+ :external_storage_url,
+ :fork_and_edit_path,
+ :ide_fork_and_edit_path
+ )
+ end
end
diff --git a/spec/graphql/types/repository_type_spec.rb b/spec/graphql/types/repository_type_spec.rb
index fa1e54dfcfa..ee0cc4361da 100644
--- a/spec/graphql/types/repository_type_spec.rb
+++ b/spec/graphql/types/repository_type_spec.rb
@@ -16,4 +16,6 @@ RSpec.describe GitlabSchema.types['Repository'] do
specify { expect(described_class).to have_graphql_field(:blobs) }
specify { expect(described_class).to have_graphql_field(:branch_names, calls_gitaly?: true, complexity: 170) }
+
+ specify { expect(described_class).to have_graphql_field(:disk_path) }
end
diff --git a/spec/graphql/types/subscription_type_spec.rb b/spec/graphql/types/subscription_type_spec.rb
new file mode 100644
index 00000000000..b99df374bb3
--- /dev/null
+++ b/spec/graphql/types/subscription_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Subscription'] do
+ it 'has the expected fields' do
+ expected_fields = %i[
+ issuable_assignees_updated
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields).only
+ end
+end
diff --git a/spec/graphql/types/timelog_type_spec.rb b/spec/graphql/types/timelog_type_spec.rb
index 38bd70d5097..791c2fdb046 100644
--- a/spec/graphql/types/timelog_type_spec.rb
+++ b/spec/graphql/types/timelog_type_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Timelog'] do
- let(:fields) { %i[spent_at time_spent user issue note] }
+ let(:fields) { %i[spent_at time_spent user issue merge_request note] }
it { expect(described_class.graphql_name).to eq('Timelog') }
it { expect(described_class).to have_graphql_fields(fields) }
@@ -25,6 +25,14 @@ RSpec.describe GitlabSchema.types['Timelog'] do
end
end
+ describe 'merge_request field' do
+ subject { described_class.fields['mergeRequest'] }
+
+ it 'returns merge_request' do
+ is_expected.to have_graphql_type(Types::MergeRequestType)
+ end
+ end
+
describe 'note field' do
subject { described_class.fields['note'] }
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index d9e67ff348b..7d73727b041 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -5,7 +5,11 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['User'] do
specify { expect(described_class.graphql_name).to eq('User') }
- specify { expect(described_class).to require_graphql_authorizations(:read_user) }
+ specify do
+ runtime_type = described_class.resolve_type(build(:user), {})
+
+ expect(runtime_type).to require_graphql_authorizations(:read_user)
+ end
it 'has the expected fields' do
expected_fields = %w[
diff --git a/spec/haml_lint/linter/documentation_links_spec.rb b/spec/haml_lint/linter/documentation_links_spec.rb
index 5de455b6e8c..22c406de57a 100644
--- a/spec/haml_lint/linter/documentation_links_spec.rb
+++ b/spec/haml_lint/linter/documentation_links_spec.rb
@@ -80,6 +80,12 @@ RSpec.describe HamlLint::Linter::DocumentationLinks do
it { is_expected.to report_lint }
end
+
+ context 'when the second link is invalid' do
+ let(:haml) { ".data-form{ data: { url: #{link_pattern}('README.md'), wrong_url: #{link_pattern}('wrong.md') } }" }
+
+ it { is_expected.to report_lint }
+ end
end
context 'help_page_path' do
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index ae039c1a8b1..bf533ca7034 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -284,14 +284,29 @@ RSpec.describe ApplicationHelper do
end
describe '#autocomplete_data_sources' do
- let(:project) { create(:project) }
- let(:noteable_type) { Issue }
+ context 'group' do
+ let(:group) { create(:group) }
+ let(:noteable_type) { Issue }
+
+ it 'returns paths for autocomplete_sources_controller' do
+ sources = helper.autocomplete_data_sources(group, noteable_type)
+ expect(sources.keys).to include(:members, :issues, :mergeRequests, :labels, :milestones, :commands)
+ sources.keys.each do |key|
+ expect(sources[key]).not_to be_nil
+ end
+ end
+ end
+
+ context 'project' do
+ let(:project) { create(:project) }
+ let(:noteable_type) { Issue }
- it 'returns paths for autocomplete_sources_controller' do
- sources = helper.autocomplete_data_sources(project, noteable_type)
- expect(sources.keys).to match_array([:members, :issues, :mergeRequests, :labels, :milestones, :commands, :snippets])
- sources.keys.each do |key|
- expect(sources[key]).not_to be_nil
+ it 'returns paths for autocomplete_sources_controller' do
+ sources = helper.autocomplete_data_sources(project, noteable_type)
+ expect(sources.keys).to match_array([:members, :issues, :mergeRequests, :labels, :milestones, :commands, :snippets])
+ sources.keys.each do |key|
+ expect(sources[key]).not_to be_nil
+ end
end
end
end
diff --git a/spec/helpers/auth_helper_spec.rb b/spec/helpers/auth_helper_spec.rb
index beffa4cf60e..c1c961c5cbb 100644
--- a/spec/helpers/auth_helper_spec.rb
+++ b/spec/helpers/auth_helper_spec.rb
@@ -77,8 +77,8 @@ RSpec.describe AuthHelper do
end
context 'all providers are enabled to sign in' do
- it 'returns all the enabled providers from settings' do
- expect(helper.enabled_button_based_providers).to include('twitter', 'github', 'google_oauth2', 'openid_connect')
+ it 'returns all the enabled providers from settings in expected order' do
+ expect(helper.enabled_button_based_providers).to match(%w[google_oauth2 github twitter openid_connect])
end
it 'puts google and github in the beginning' do
@@ -99,19 +99,19 @@ RSpec.describe AuthHelper do
end
end
- describe 'trial_enabled_button_based_providers' do
- it 'returns the intersection set of github & google_oauth2 with enabled providers' do
+ describe 'popular_enabled_button_based_providers' do
+ it 'returns the intersection set of popular & enabled providers', :aggregate_failures do
allow(helper).to receive(:enabled_button_based_providers) { %w(twitter github google_oauth2) }
- expect(helper.trial_enabled_button_based_providers).to eq(%w(github google_oauth2))
+ expect(helper.popular_enabled_button_based_providers).to eq(%w(github google_oauth2))
allow(helper).to receive(:enabled_button_based_providers) { %w(google_oauth2 bitbucket) }
- expect(helper.trial_enabled_button_based_providers).to eq(%w(google_oauth2))
+ expect(helper.popular_enabled_button_based_providers).to eq(%w(google_oauth2))
allow(helper).to receive(:enabled_button_based_providers) { %w(bitbucket) }
- expect(helper.trial_enabled_button_based_providers).to be_empty
+ expect(helper.popular_enabled_button_based_providers).to be_empty
end
end
@@ -313,4 +313,37 @@ RSpec.describe AuthHelper do
it { is_expected.to be_falsey }
end
end
+
+ describe '#auth_app_owner_text' do
+ shared_examples 'generates text with the correct info' do
+ it 'includes the name of the application owner' do
+ auth_app_owner_text = helper.auth_app_owner_text(owner)
+
+ expect(auth_app_owner_text).to include(owner.name)
+ expect(auth_app_owner_text).to include(path_to_owner)
+ end
+ end
+
+ context 'when owner is a user' do
+ let_it_be(:owner) { create(:user) }
+
+ let(:path_to_owner) { user_path(owner) }
+
+ it_behaves_like 'generates text with the correct info'
+ end
+
+ context 'when owner is a group' do
+ let_it_be(:owner) { create(:group) }
+
+ let(:path_to_owner) { user_path(owner) }
+
+ it_behaves_like 'generates text with the correct info'
+ end
+
+ context 'when the user is missing' do
+ it 'returns nil' do
+ expect(helper.auth_app_owner_text(nil)).to be(nil)
+ end
+ end
+ end
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 120dbe7cb49..047a6ca0b7d 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe AvatarsHelper do
shared_examples 'resource with a default avatar' do |source_type|
it 'returns a default avatar div' do
expect(public_send("#{source_type}_icon", *helper_args))
- .to match(%r{<div class="identicon bg\d+">F</div>})
+ .to match(%r{<span class="identicon bg\d+">F</span>})
end
end
@@ -409,4 +409,33 @@ RSpec.describe AvatarsHelper do
end
end
end
+
+ describe '#avatar_without_link' do
+ let(:options) { { size: 32 } }
+
+ subject { helper.avatar_without_link(resource, options) }
+
+ context 'with users' do
+ let(:resource) { user }
+
+ it 'displays user avatar' do
+ is_expected.to eq tag(
+ :img,
+ alt: "#{user.name}'s avatar",
+ src: avatar_icon_for_user(user, 32),
+ data: { container: 'body' },
+ class: 'avatar s32 has-tooltip',
+ title: user.name
+ )
+ end
+ end
+
+ context 'with groups' do
+ let(:resource) { build_stubbed(:group, name: 'foo') }
+
+ it 'displays group avatar' do
+ is_expected.to match(%r{<span class="avatar identicon bg\d+ s32">F</span>})
+ end
+ end
+ end
end
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index 00cd44809c7..cb4b6915b20 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe BoardsHelper do
end
describe '#board_base_url' do
- context 'when project board' do
+ context 'when group board' do
it 'generates the correct url' do
assign(:board, group_board)
assign(:group, base_group)
@@ -55,6 +55,43 @@ RSpec.describe BoardsHelper do
end
end
+ describe '#current_board_namespace' do
+ context 'when group board' do
+ it 'returns the correct namespace' do
+ assign(:board, group_board)
+ assign(:group, base_group)
+
+ expect(helper.current_board_namespace).to be(base_group)
+ end
+ end
+
+ context 'project under group' do
+ context 'when project board' do
+ it 'returns the correct namespace' do
+ assign(:project, project)
+ assign(:board, project_board)
+
+ expect(helper.current_board_namespace).to be(project.parent)
+ end
+ end
+ end
+
+ context 'project under user namespace' do
+ let_it_be(:project_under_user) { create(:project, namespace: user.namespace) }
+
+ context 'when project board' do
+ let_it_be(:project_board) { create(:board, project: project_under_user) }
+
+ it 'returns the correct namespace' do
+ assign(:project, project_under_user)
+ assign(:board, project_board)
+
+ expect(helper.current_board_namespace).to be(user.namespace)
+ end
+ end
+ end
+ end
+
describe '#board_data' do
context 'project_board' do
before do
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index a08517d0c57..2287718db5a 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -36,20 +36,56 @@ RSpec.describe Ci::PipelineEditorHelper do
subject(:pipeline_editor_data) { helper.js_pipeline_editor_data(project) }
- it 'returns pipeline editor data' do
- expect(pipeline_editor_data).to eq({
- "ci-config-path": project.ci_config_path_or_default,
- "commit-sha" => project.commit.sha,
- "default-branch" => project.default_branch,
- "empty-state-illustration-path" => 'foo',
- "initial-branch-name": nil,
- "lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
- "new-merge-request-path" => '/mock/project/-/merge_requests/new',
- "project-path" => project.path,
- "project-full-path" => project.full_path,
- "project-namespace" => project.namespace.full_path,
- "yml-help-page-path" => help_page_path('ci/yaml/README')
- })
+ context 'with a project with commits' do
+ it 'returns pipeline editor data' do
+ expect(pipeline_editor_data).to eq({
+ "ci-config-path": project.ci_config_path_or_default,
+ "ci-examples-help-page-path" => help_page_path('ci/examples/README'),
+ "ci-help-page-path" => help_page_path('ci/README'),
+ "commit-sha" => project.commit.sha,
+ "default-branch" => project.default_branch,
+ "empty-state-illustration-path" => 'foo',
+ "initial-branch-name": nil,
+ "lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
+ "needs-help-page-path" => help_page_path('ci/yaml/README', anchor: 'needs'),
+ "new-merge-request-path" => '/mock/project/-/merge_requests/new',
+ "pipeline_etag" => graphql_etag_pipeline_sha_path(project.commit.sha),
+ "pipeline-page-path" => project_pipelines_path(project),
+ "project-path" => project.path,
+ "project-full-path" => project.full_path,
+ "project-namespace" => project.namespace.full_path,
+ "runner-help-page-path" => help_page_path('ci/runners/README'),
+ "total-branches" => project.repository.branches.length,
+ "yml-help-page-path" => help_page_path('ci/yaml/README')
+ })
+ end
+ end
+
+ context 'with an empty project' do
+ let(:project) { create(:project, :empty_repo) }
+
+ it 'returns pipeline editor data' do
+ expect(pipeline_editor_data).to eq({
+ "ci-config-path": project.ci_config_path_or_default,
+ "ci-examples-help-page-path" => help_page_path('ci/examples/README'),
+ "ci-help-page-path" => help_page_path('ci/README'),
+ "commit-sha" => '',
+ "default-branch" => project.default_branch,
+ "empty-state-illustration-path" => 'foo',
+ "initial-branch-name": nil,
+ "lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
+ "needs-help-page-path" => help_page_path('ci/yaml/README', anchor: 'needs'),
+ "new-merge-request-path" => '/mock/project/-/merge_requests/new',
+ "pipeline_etag" => '',
+ "pipeline-page-path" => project_pipelines_path(project),
+ "project-path" => project.path,
+ "project-full-path" => project.full_path,
+ "project-namespace" => project.namespace.full_path,
+ "runner-help-page-path" => help_page_path('ci/runners/README'),
+ "total-branches" => 0,
+ "yml-help-page-path" => help_page_path('ci/yaml/README')
+ })
+ end
end
end
end
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 86ed133e599..9a1ecb22edb 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -144,7 +144,7 @@ RSpec.describe CommitsHelper do
}
end
- subject { helper.conditionally_paginate_diff_files(diffs_collection, paginate: paginate) }
+ subject { helper.conditionally_paginate_diff_files(diffs_collection, paginate: paginate, per: Projects::CommitController::COMMIT_DIFFS_PER_PAGE) }
before do
allow(helper).to receive(:params).and_return(params)
@@ -168,15 +168,15 @@ RSpec.describe CommitsHelper do
let(:page) { 1 }
it "has 20 diffs" do
- expect(subject.size).to eq(75)
+ expect(subject.size).to eq(20)
end
end
- context "page 2" do
- let(:page) { 2 }
+ context "page 5" do
+ let(:page) { 5 }
- it "has the remaining 10 diffs" do
- expect(subject.size).to eq(10)
+ it "has the remaining 5 out of 85 diffs" do
+ expect(subject.size).to eq(5)
end
end
end
@@ -289,4 +289,46 @@ RSpec.describe CommitsHelper do
}
end
end
+
+ describe "#commit_partial_cache_key" do
+ subject(:cache_key) { helper.commit_partial_cache_key(commit, ref: ref, merge_request: merge_request, request: request) }
+
+ let(:commit) { create(:commit).present(current_user: user) }
+ let(:commit_status) { Gitlab::Ci::Status::Running.new(pipeline, user) }
+ let(:pipeline) { create(:ci_pipeline, :running) }
+ let(:user) { create(:user) }
+ let(:ref) { "master" }
+ let(:merge_request) { nil }
+ let(:request) { double(xhr?: true) }
+ let(:current_path) { "test" }
+
+ before do
+ expect(commit).to receive(:status_for).with(ref).and_return(commit_status)
+ assign(:path, current_path)
+ end
+
+ it { is_expected.to be_an(Array) }
+ it { is_expected.to include(commit) }
+ it { is_expected.to include(commit.author) }
+ it { is_expected.to include(ref) }
+
+ it do
+ is_expected.to include(
+ {
+ merge_request: merge_request,
+ pipeline_status: commit_status,
+ xhr: true,
+ controller: "commits",
+ path: current_path
+ }
+ )
+ end
+
+ describe "final cache key output" do
+ subject { ActiveSupport::Cache.expand_cache_key(cache_key) }
+
+ it { is_expected.to include(commit.cache_key) }
+ it { is_expected.to include(pipeline.cache_key) }
+ end
+ end
end
diff --git a/spec/helpers/dev_ops_report_helper_spec.rb b/spec/helpers/dev_ops_report_helper_spec.rb
new file mode 100644
index 00000000000..7e7a89a3039
--- /dev/null
+++ b/spec/helpers/dev_ops_report_helper_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DevOpsReportHelper do
+ subject { DevOpsReport::MetricPresenter.new(metric) }
+
+ let(:metric) { build(:dev_ops_report_metric, created_at: DateTime.new(2021, 4, 3, 2, 1, 0) ) }
+
+ describe '#devops_score_metrics' do
+ let(:devops_score_metrics) { helper.devops_score_metrics(subject) }
+
+ it { expect(devops_score_metrics[:averageScore]).to eq({ scoreLevel: { icon: "status-alert", label: "Moderate", variant: "warning" }, value: "55.9" } ) }
+
+ it { expect(devops_score_metrics[:cards].first).to eq({ leadInstance: "9.3", score: "13.3", scoreLevel: { label: "Low", variant: "muted" }, title: "Issues created per active user", usage: "1.2" } ) }
+ it { expect(devops_score_metrics[:cards].second).to eq({ leadInstance: "30.3", score: "92.7", scoreLevel: { label: "High", variant: "success" }, title: "Comments created per active user", usage: "28.1" } ) }
+ it { expect(devops_score_metrics[:cards].fourth).to eq({ leadInstance: "5.2", score: "62.4", scoreLevel: { label: "Moderate", variant: "neutral" }, title: "Boards created per active user", usage: "3.3" } ) }
+
+ it { expect(devops_score_metrics[:createdAt]).to eq("2021-04-03 02:01") }
+
+ describe 'with low average score' do
+ let(:low_metric) { double(average_percentage_score: 2, cards: subject.cards, created_at: subject.created_at) }
+ let(:devops_score_metrics) { helper.devops_score_metrics(low_metric) }
+
+ it { expect(devops_score_metrics[:averageScore]).to eq({ scoreLevel: { icon: "status-failed", label: "Low", variant: "danger" }, value: "2.0" } ) }
+ end
+
+ describe 'with high average score' do
+ let(:high_metric) { double(average_percentage_score: 82, cards: subject.cards, created_at: subject.created_at) }
+ let(:devops_score_metrics) { helper.devops_score_metrics(high_metric) }
+
+ it { expect(devops_score_metrics[:averageScore]).to eq({ scoreLevel: { icon: "status_success_solid", label: "High", variant: "success" }, value: "82.0" } ) }
+ end
+
+ describe 'with blank metrics' do
+ let(:devops_score_metrics) { helper.devops_score_metrics({}) }
+
+ it { expect(devops_score_metrics).to eq({}) }
+ end
+ end
+end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index d316f2b0a0a..89cb0f72277 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -45,7 +45,8 @@ RSpec.describe EnvironmentsHelper do
'custom_dashboard_base_path' => Gitlab::Metrics::Dashboard::RepoDashboardFinder::DASHBOARD_ROOT,
'operations_settings_path' => project_settings_operations_path(project),
'can_access_operations_settings' => 'true',
- 'panel_preview_endpoint' => project_metrics_dashboards_builder_path(project, format: :json)
+ 'panel_preview_endpoint' => project_metrics_dashboards_builder_path(project, format: :json),
+ 'has_managed_prometheus' => 'false'
)
end
@@ -120,6 +121,52 @@ RSpec.describe EnvironmentsHelper do
end
end
end
+
+ context 'has_managed_prometheus' do
+ context 'without prometheus service' do
+ it "doesn't have managed prometheus" do
+ expect(metrics_data).to include(
+ 'has_managed_prometheus' => 'false'
+ )
+ end
+ end
+
+ context 'with prometheus service' do
+ let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
+
+ context 'when manual prometheus service is active' do
+ it "doesn't have managed prometheus" do
+ prometheus_service.update!(manual_configuration: true)
+
+ expect(metrics_data).to include(
+ 'has_managed_prometheus' => 'false'
+ )
+ end
+ end
+
+ context 'when prometheus service is inactive' do
+ it "doesn't have managed prometheus" do
+ prometheus_service.update!(manual_configuration: false)
+
+ expect(metrics_data).to include(
+ 'has_managed_prometheus' => 'false'
+ )
+ end
+ end
+
+ context 'when a cluster prometheus is available' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ it 'has managed prometheus' do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+
+ expect(metrics_data).to include(
+ 'has_managed_prometheus' => 'true'
+ )
+ end
+ end
+ end
+ end
end
describe '#custom_metrics_available?' do
@@ -144,7 +191,7 @@ RSpec.describe EnvironmentsHelper do
it 'returns logs data' do
expected_data = {
"environment_name": environment.name,
- "environments_path": project_environments_path(project, format: :json),
+ "environments_path": api_v4_projects_environments_path(id: project.id),
"environment_id": environment.id,
"cluster_applications_documentation_path" => help_page_path('user/clusters/applications.md', anchor: 'elastic-stack'),
"clusters_path": project_clusters_path(project, format: :json)
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index 0df04d2a8a7..40faf994ad2 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -113,6 +113,24 @@ RSpec.describe GitlabRoutingHelper do
end
end
+ describe 'members helpers' do
+ describe '#source_members_url' do
+ it 'returns a url to the memberships page for a group membership' do
+ membership = build_stubbed(:group_member)
+ group_members_url = "http://test.host/groups/#{membership.source.full_path}/-/group_members"
+
+ expect(source_members_url(membership)).to eq(group_members_url)
+ end
+
+ it 'returns a url to the memberships page for a project membership' do
+ membership = build_stubbed(:project_member)
+ project_members_url = "http://test.host/#{membership.source.full_path}/-/project_members"
+
+ expect(source_members_url(membership)).to eq(project_members_url)
+ end
+ end
+ end
+
context 'artifacts' do
let_it_be(:project) { create(:project) }
let_it_be(:job) { create(:ci_build, project: project, name: 'test:job', artifacts_expire_at: 1.hour.from_now) }
@@ -335,7 +353,12 @@ RSpec.describe GitlabRoutingHelper do
context 'GraphQL ETag paths' do
context 'with pipelines' do
- let(:pipeline) { double(id: 5) }
+ let(:sha) { 'b08774cb1a11ecdc27a82c5f444a69ea7e038ede' }
+ let(:pipeline) { double(id: 5 ) }
+
+ it 'returns an ETag path for a pipeline sha' do
+ expect(graphql_etag_pipeline_sha_path(sha)).to eq('/api/graphql:pipelines/sha/b08774cb1a11ecdc27a82c5f444a69ea7e038ede')
+ end
it 'returns an ETag path for pipelines' do
expect(graphql_etag_pipeline_path(pipeline)).to eq('/api/graphql:pipelines/id/5')
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index 99efc7963e6..c3f1509fbc8 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -23,83 +23,119 @@ RSpec.describe Groups::GroupMembersHelper do
end
end
- describe '#group_group_links_data_json' do
- include_context 'group_group_link'
+ describe '#group_members_list_data_json' do
+ let(:group_members) { create_list(:group_member, 2, group: group, created_by: current_user) }
- it 'matches json schema' do
- json = helper.group_group_links_data_json(shared_group.shared_with_group_links)
+ let(:pagination) { {} }
+ let(:collection) { group_members }
+ let(:presented_members) { present_members(collection) }
- expect(json).to match_schema('group_link/group_group_links')
- end
- end
+ subject { Gitlab::Json.parse(helper.group_members_list_data_json(group, presented_members, pagination)) }
- describe '#members_data_json' do
shared_examples 'members.json' do
- it 'matches json schema' do
- json = helper.members_data_json(group, present_members([group_member]))
-
- expect(json).to match_schema('members')
+ it 'returns `members` property that matches json schema' do
+ expect(subject['members'].to_json).to match_schema('members')
end
end
- context 'for a group member' do
- let(:group_member) { create(:group_member, group: group, created_by: current_user) }
+ before do
+ allow(helper).to receive(:group_group_member_path).with(group, ':id').and_return('/groups/foo-bar/-/group_members/:id')
+ allow(helper).to receive(:can?).with(current_user, :admin_group_member, group).and_return(true)
+ end
+
+ it 'returns expected json' do
+ expected = {
+ member_path: '/groups/foo-bar/-/group_members/:id',
+ source_id: group.id,
+ can_manage_members: true
+ }.as_json
+ expect(subject).to include(expected)
+ end
+
+ context 'for a group member' do
it_behaves_like 'members.json'
context 'with user status set' do
let(:user) { create(:user) }
let!(:status) { create(:user_status, user: user) }
- let(:group_member) { create(:group_member, group: group, user: user, created_by: current_user) }
+ let(:group_members) { [create(:group_member, group: group, user: user, created_by: current_user)] }
it_behaves_like 'members.json'
end
end
context 'for an invited group member' do
- let(:group_member) { create(:group_member, :invited, group: group, created_by: current_user) }
+ let(:group_members) { create_list(:group_member, 2, :invited, group: group, created_by: current_user) }
it_behaves_like 'members.json'
end
context 'for an access request' do
- let(:group_member) { create(:group_member, :access_request, group: group, created_by: current_user) }
+ let(:group_members) { create_list(:group_member, 2, :access_request, group: group, created_by: current_user) }
it_behaves_like 'members.json'
end
- end
-
- describe '#group_members_list_data_attributes' do
- let(:group_member) { create(:group_member, group: group, created_by: current_user) }
- before do
- allow(helper).to receive(:group_group_member_path).with(group, ':id').and_return('/groups/foo-bar/-/group_members/:id')
- allow(helper).to receive(:can?).with(current_user, :admin_group_member, group).and_return(true)
+ context 'when pagination is not available' do
+ it 'sets `pagination` attribute to expected json' do
+ expected = {
+ current_page: nil,
+ per_page: nil,
+ total_items: 2,
+ param_name: nil,
+ params: {}
+ }.as_json
+
+ expect(subject['pagination']).to include(expected)
+ end
end
- it 'returns expected hash' do
- expect(helper.group_members_list_data_attributes(group, present_members([group_member]))).to include({
- members: helper.members_data_json(group, present_members([group_member])),
- member_path: '/groups/foo-bar/-/group_members/:id',
- source_id: group.id,
- can_manage_members: 'true'
- })
+ context 'when pagination is available' do
+ let(:collection) { Kaminari.paginate_array(group_members).page(1).per(1) }
+ let(:pagination) { { param_name: :page, params: { search_groups: nil } } }
+
+ it 'sets `pagination` attribute to expected json' do
+ expected = {
+ current_page: 1,
+ per_page: 1,
+ total_items: 2,
+ param_name: :page,
+ params: { search_groups: nil }
+ }.as_json
+
+ expect(subject['pagination']).to include(expected)
+ end
end
end
- describe '#group_group_links_list_data_attributes' do
+ describe '#group_group_links_list_data_json' do
include_context 'group_group_link'
+ subject { Gitlab::Json.parse(helper.group_group_links_list_data_json(shared_group)) }
+
before do
allow(helper).to receive(:group_group_link_path).with(shared_group, ':id').and_return('/groups/foo-bar/-/group_links/:id')
end
- it 'returns expected hash' do
- expect(helper.group_group_links_list_data_attributes(shared_group)).to include({
- members: helper.group_group_links_data_json(shared_group.shared_with_group_links),
+ it 'returns expected json' do
+ expected = {
+ pagination: {
+ current_page: nil,
+ per_page: nil,
+ total_items: 1,
+ param_name: nil,
+ params: {}
+ },
member_path: '/groups/foo-bar/-/group_links/:id',
source_id: shared_group.id
- })
+ }.as_json
+
+ expect(subject).to include(expected)
+ end
+
+ it 'returns `members` property that matches json schema' do
+ expect(subject['members'].to_json).to match_schema('group_link/group_group_links')
end
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index d588120bb98..ad6852f63df 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -96,9 +96,31 @@ RSpec.describe GroupsHelper do
subject { helper.group_title(very_deep_nested_group) }
- it 'outputs the groups in the correct order' do
- expect(subject)
- .to match(%r{<li style="text-indent: 16px;"><a.*>#{deep_nested_group.name}.*</li>.*<a.*>#{very_deep_nested_group.name}</a>}m)
+ context 'traversal queries' do
+ shared_examples 'correct ancestor order' do
+ it 'outputs the groups in the correct order' do
+ expect(subject)
+ .to match(%r{<li style="text-indent: 16px;"><a.*>#{deep_nested_group.name}.*</li>.*<a.*>#{very_deep_nested_group.name}</a>}m)
+ end
+ end
+
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+
+ very_deep_nested_group.reload # make sure traversal_ids are reloaded
+ end
+
+ include_examples 'correct ancestor order'
+ end
end
it 'enqueues the elements in the breadcrumb schema list' do
@@ -122,101 +144,121 @@ RSpec.describe GroupsHelper do
end
describe '#share_with_group_lock_help_text' do
- let_it_be_with_reload(:root_group) { create(:group) }
- let_it_be_with_reload(:subgroup) { create(:group, parent: root_group) }
- let_it_be_with_reload(:sub_subgroup) { create(:group, parent: subgroup) }
- let_it_be(:root_owner) { create(:user) }
- let_it_be(:sub_owner) { create(:user) }
- let_it_be(:sub_sub_owner) { create(:user) }
-
- let(:possible_help_texts) do
- {
- default_help: "This setting will be applied to all subgroups unless overridden by a group owner",
- ancestor_locked_but_you_can_override: %r{This setting is applied on <a .+>.+</a>\. You can override the setting or .+},
- ancestor_locked_so_ask_the_owner: /This setting is applied on .+\. To share projects in this group with another group, ask the owner to override the setting or remove the share with group lock from .+/,
- ancestor_locked_and_has_been_overridden: /This setting is applied on .+ and has been overridden on this subgroup/
- }
- end
-
- let(:possible_linked_ancestors) do
- {
- root_group: root_group,
- subgroup: subgroup
- }
- end
-
- let(:users) do
- {
- root_owner: root_owner,
- sub_owner: sub_owner,
- sub_sub_owner: sub_sub_owner
- }
- end
-
- subject { helper.share_with_group_lock_help_text(sub_subgroup) }
-
- before_all do
- root_group.add_owner(root_owner)
- subgroup.add_owner(sub_owner)
- sub_subgroup.add_owner(sub_sub_owner)
- end
-
- # rubocop:disable Layout/SpaceBeforeComma
- where(:root_share_with_group_locked, :subgroup_share_with_group_locked, :sub_subgroup_share_with_group_locked, :current_user, :help_text, :linked_ancestor) do
- [
- [false , false , false , :root_owner , :default_help , nil],
- [false , false , false , :sub_owner , :default_help , nil],
- [false , false , false , :sub_sub_owner , :default_help , nil],
- [false , false , true , :root_owner , :default_help , nil],
- [false , false , true , :sub_owner , :default_help , nil],
- [false , false , true , :sub_sub_owner , :default_help , nil],
- [false , true , false , :root_owner , :ancestor_locked_and_has_been_overridden , :subgroup],
- [false , true , false , :sub_owner , :ancestor_locked_and_has_been_overridden , :subgroup],
- [false , true , false , :sub_sub_owner , :ancestor_locked_and_has_been_overridden , :subgroup],
- [false , true , true , :root_owner , :ancestor_locked_but_you_can_override , :subgroup],
- [false , true , true , :sub_owner , :ancestor_locked_but_you_can_override , :subgroup],
- [false , true , true , :sub_sub_owner , :ancestor_locked_so_ask_the_owner , :subgroup],
- [true , false , false , :root_owner , :default_help , nil],
- [true , false , false , :sub_owner , :default_help , nil],
- [true , false , false , :sub_sub_owner , :default_help , nil],
- [true , false , true , :root_owner , :default_help , nil],
- [true , false , true , :sub_owner , :default_help , nil],
- [true , false , true , :sub_sub_owner , :default_help , nil],
- [true , true , false , :root_owner , :ancestor_locked_and_has_been_overridden , :root_group],
- [true , true , false , :sub_owner , :ancestor_locked_and_has_been_overridden , :root_group],
- [true , true , false , :sub_sub_owner , :ancestor_locked_and_has_been_overridden , :root_group],
- [true , true , true , :root_owner , :ancestor_locked_but_you_can_override , :root_group],
- [true , true , true , :sub_owner , :ancestor_locked_so_ask_the_owner , :root_group],
- [true , true , true , :sub_sub_owner , :ancestor_locked_so_ask_the_owner , :root_group]
- ]
- end
- # rubocop:enable Layout/SpaceBeforeComma
+ context 'traversal queries' do
+ let_it_be_with_reload(:root_group) { create(:group) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: root_group) }
+ let_it_be_with_reload(:sub_subgroup) { create(:group, parent: subgroup) }
+ let_it_be(:root_owner) { create(:user) }
+ let_it_be(:sub_owner) { create(:user) }
+ let_it_be(:sub_sub_owner) { create(:user) }
+
+ let(:possible_help_texts) do
+ {
+ default_help: "This setting will be applied to all subgroups unless overridden by a group owner",
+ ancestor_locked_but_you_can_override: %r{This setting is applied on <a .+>.+</a>\. You can override the setting or .+},
+ ancestor_locked_so_ask_the_owner: /This setting is applied on .+\. To share projects in this group with another group, ask the owner to override the setting or remove the share with group lock from .+/,
+ ancestor_locked_and_has_been_overridden: /This setting is applied on .+ and has been overridden on this subgroup/
+ }
+ end
- with_them do
- before do
- root_group.update_column(:share_with_group_lock, true) if root_share_with_group_locked
- subgroup.update_column(:share_with_group_lock, true) if subgroup_share_with_group_locked
- sub_subgroup.update_column(:share_with_group_lock, true) if sub_subgroup_share_with_group_locked
-
- allow(helper).to receive(:current_user).and_return(users[current_user])
- allow(helper).to receive(:can?)
- .with(users[current_user], :change_share_with_group_lock, subgroup)
- .and_return(Ability.allowed?(users[current_user], :change_share_with_group_lock, subgroup))
-
- ancestor = possible_linked_ancestors[linked_ancestor]
- if ancestor
- allow(helper).to receive(:can?)
- .with(users[current_user], :read_group, ancestor)
- .and_return(Ability.allowed?(users[current_user], :read_group, ancestor))
- allow(helper).to receive(:can?)
- .with(users[current_user], :admin_group, ancestor)
- .and_return(Ability.allowed?(users[current_user], :admin_group, ancestor))
+ let(:possible_linked_ancestors) do
+ {
+ root_group: root_group,
+ subgroup: subgroup
+ }
+ end
+
+ let(:users) do
+ {
+ root_owner: root_owner,
+ sub_owner: sub_owner,
+ sub_sub_owner: sub_sub_owner
+ }
+ end
+
+ subject { helper.share_with_group_lock_help_text(sub_subgroup) }
+
+ before_all do
+ root_group.add_owner(root_owner)
+ subgroup.add_owner(sub_owner)
+ sub_subgroup.add_owner(sub_sub_owner)
+ end
+
+ shared_examples 'correct ancestor order' do
+ # rubocop:disable Layout/SpaceBeforeComma
+ where(:root_share_with_group_locked, :subgroup_share_with_group_locked, :sub_subgroup_share_with_group_locked, :current_user, :help_text, :linked_ancestor) do
+ [
+ [false , false , false , :root_owner , :default_help , nil],
+ [false , false , false , :sub_owner , :default_help , nil],
+ [false , false , false , :sub_sub_owner , :default_help , nil],
+ [false , false , true , :root_owner , :default_help , nil],
+ [false , false , true , :sub_owner , :default_help , nil],
+ [false , false , true , :sub_sub_owner , :default_help , nil],
+ [false , true , false , :root_owner , :ancestor_locked_and_has_been_overridden , :subgroup],
+ [false , true , false , :sub_owner , :ancestor_locked_and_has_been_overridden , :subgroup],
+ [false , true , false , :sub_sub_owner , :ancestor_locked_and_has_been_overridden , :subgroup],
+ [false , true , true , :root_owner , :ancestor_locked_but_you_can_override , :subgroup],
+ [false , true , true , :sub_owner , :ancestor_locked_but_you_can_override , :subgroup],
+ [false , true , true , :sub_sub_owner , :ancestor_locked_so_ask_the_owner , :subgroup],
+ [true , false , false , :root_owner , :default_help , nil],
+ [true , false , false , :sub_owner , :default_help , nil],
+ [true , false , false , :sub_sub_owner , :default_help , nil],
+ [true , false , true , :root_owner , :default_help , nil],
+ [true , false , true , :sub_owner , :default_help , nil],
+ [true , false , true , :sub_sub_owner , :default_help , nil],
+ [true , true , false , :root_owner , :ancestor_locked_and_has_been_overridden , :root_group],
+ [true , true , false , :sub_owner , :ancestor_locked_and_has_been_overridden , :root_group],
+ [true , true , false , :sub_sub_owner , :ancestor_locked_and_has_been_overridden , :root_group],
+ [true , true , true , :root_owner , :ancestor_locked_but_you_can_override , :root_group],
+ [true , true , true , :sub_owner , :ancestor_locked_so_ask_the_owner , :root_group],
+ [true , true , true , :sub_sub_owner , :ancestor_locked_so_ask_the_owner , :root_group]
+ ]
+ end
+ # rubocop:enable Layout/SpaceBeforeComma
+
+ with_them do
+ before do
+ root_group.update_column(:share_with_group_lock, true) if root_share_with_group_locked
+ subgroup.update_column(:share_with_group_lock, true) if subgroup_share_with_group_locked
+ sub_subgroup.update_column(:share_with_group_lock, true) if sub_subgroup_share_with_group_locked
+
+ allow(helper).to receive(:current_user).and_return(users[current_user])
+ allow(helper).to receive(:can?)
+ .with(users[current_user], :change_share_with_group_lock, subgroup)
+ .and_return(Ability.allowed?(users[current_user], :change_share_with_group_lock, subgroup))
+
+ ancestor = possible_linked_ancestors[linked_ancestor]
+ if ancestor
+ allow(helper).to receive(:can?)
+ .with(users[current_user], :read_group, ancestor)
+ .and_return(Ability.allowed?(users[current_user], :read_group, ancestor))
+ allow(helper).to receive(:can?)
+ .with(users[current_user], :admin_group, ancestor)
+ .and_return(Ability.allowed?(users[current_user], :admin_group, ancestor))
+ end
+ end
+
+ it 'has the correct help text with correct ancestor links' do
+ expect(subject).to match(possible_help_texts[help_text])
+ expect(subject).to match(possible_linked_ancestors[linked_ancestor].name) unless help_text == :default_help
+ end
end
end
- it 'has the correct help text with correct ancestor links' do
- expect(subject).to match(possible_help_texts[help_text])
- expect(subject).to match(possible_linked_ancestors[linked_ancestor].name) unless help_text == :default_help
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+ end
+
+ include_examples 'correct ancestor order'
end
end
end
@@ -420,42 +462,59 @@ RSpec.describe GroupsHelper do
describe '#show_invite_banner?' do
let_it_be(:current_user) { create(:user) }
let_it_be_with_refind(:group) { create(:group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:users) { [current_user, create(:user)] }
- subject { helper.show_invite_banner?(group) }
-
before do
allow(helper).to receive(:current_user) { current_user }
allow(helper).to receive(:can?).with(current_user, :admin_group, group).and_return(can_admin_group)
- stub_feature_flags(invite_your_teammates_banner_a: feature_enabled_flag)
+ allow(helper).to receive(:can?).with(current_user, :admin_group, subgroup).and_return(can_admin_group)
users.take(group_members_count).each { |user| group.add_guest(user) }
end
using RSpec::Parameterized::TableSyntax
- where(:feature_enabled_flag, :can_admin_group, :group_members_count, :expected_result) do
- true | true | 1 | true
- true | false | 1 | false
- false | true | 1 | false
- false | false | 1 | false
- true | true | 2 | false
- true | false | 2 | false
- false | true | 2 | false
- false | false | 2 | false
+ where(:can_admin_group, :group_members_count, :expected_result) do
+ true | 1 | true
+ false | 1 | false
+ true | 2 | false
+ false | 2 | false
end
with_them do
- context 'when the group was just created' do
- before do
- flash[:notice] = "Group #{group.name} was successfully created"
+ context 'for a parent group' do
+ subject { helper.show_invite_banner?(group) }
+
+ context 'when the group was just created' do
+ before do
+ flash[:notice] = "Group #{group.name} was successfully created"
+ end
+
+ it { is_expected.to be_falsey }
end
- it { is_expected.to be_falsey }
+ context 'when no flash message' do
+ it 'returns the expected result' do
+ expect(subject).to eq(expected_result)
+ end
+ end
end
- context 'when no flash message' do
- it 'returns the expected result' do
- expect(subject).to eq(expected_result)
+ context 'for a subgroup' do
+ subject { helper.show_invite_banner?(subgroup) }
+
+ context 'when the subgroup was just created' do
+ before do
+ flash[:notice] = "Group #{subgroup.name} was successfully created"
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when no flash message' do
+ it 'returns the expected result' do
+ expect(subject).to eq(expected_result)
+ end
end
end
end
diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb
index 963d5953d4c..d34358e49c0 100644
--- a/spec/helpers/ide_helper_spec.rb
+++ b/spec/helpers/ide_helper_spec.rb
@@ -45,5 +45,35 @@ RSpec.describe IdeHelper do
)
end
end
+
+ context 'environments guidance experiment', :experiment do
+ before do
+ stub_experiments(in_product_guidance_environments_webide: :candidate)
+ self.instance_variable_set(:@project, project)
+ end
+
+ context 'when project has no enviornments' do
+ it 'enables environment guidance' do
+ expect(helper.ide_data).to include('enable-environments-guidance' => 'true')
+ end
+
+ context 'and the callout has been dismissed' do
+ it 'disables environment guidance' do
+ callout = create(:user_callout, feature_name: :web_ide_ci_environments_guidance, user: project.creator)
+ callout.update!(dismissed_at: Time.now - 1.week)
+ allow(helper).to receive(:current_user).and_return(User.find(project.creator.id))
+ expect(helper.ide_data).to include('enable-environments-guidance' => 'false')
+ end
+ end
+ end
+
+ context 'when the project has environments' do
+ it 'disables environment guidance' do
+ create(:environment, project: project)
+
+ expect(helper.ide_data).to include('enable-environments-guidance' => 'false')
+ end
+ end
+ end
end
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index 109b1fc4441..122f2339b28 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -3,6 +3,8 @@
require "spec_helper"
RSpec.describe InviteMembersHelper do
+ include Devise::Test::ControllerHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user, developer_projects: [project]) }
@@ -12,35 +14,21 @@ RSpec.describe InviteMembersHelper do
helper.extend(Gitlab::Experimentation::ControllerConcern)
end
- describe '#show_invite_members_track_event' do
- it 'shows values when can directly invite members' do
- allow(helper).to receive(:directly_invite_members?).and_return(true)
-
- expect(helper.show_invite_members_track_event).to eq 'show_invite_members'
- end
-
- it 'shows values when can indirectly invite members' do
- allow(helper).to receive(:directly_invite_members?).and_return(false)
- allow(helper).to receive(:indirectly_invite_members?).and_return(true)
-
- expect(helper.show_invite_members_track_event).to eq 'show_invite_members_version_b'
- end
- end
-
context 'with project' do
before do
+ allow(helper).to receive(:current_user) { owner }
assign(:project, project)
end
describe "#can_invite_members_for_project?" do
- context 'when the user can_import_members' do
+ context 'when the user can_manage_project_members' do
before do
- allow(helper).to receive(:can_import_members?).and_return(true)
+ allow(helper).to receive(:can_manage_project_members?).and_return(true)
end
it 'returns true' do
expect(helper.can_invite_members_for_project?(project)).to eq true
- expect(helper).to have_received(:can_import_members?)
+ expect(helper).to have_received(:can_manage_project_members?)
end
context 'when feature flag is disabled' do
@@ -50,14 +38,14 @@ RSpec.describe InviteMembersHelper do
it 'returns false' do
expect(helper.can_invite_members_for_project?(project)).to eq false
- expect(helper).not_to have_received(:can_import_members?)
+ expect(helper).not_to have_received(:can_manage_project_members?)
end
end
end
- context 'when the user can not invite members' do
+ context 'when the user can not manage project members' do
before do
- expect(helper).to receive(:can_import_members?).and_return(false)
+ expect(helper).to receive(:can_manage_project_members?).and_return(false)
end
it 'returns false' do
@@ -87,88 +75,11 @@ RSpec.describe InviteMembersHelper do
end
end
end
-
- describe "#indirectly_invite_members?" do
- context 'when a user is a developer' do
- before do
- allow(helper).to receive(:current_user) { developer }
- end
-
- it 'returns false' do
- allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_b) { false }
-
- expect(helper.indirectly_invite_members?).to eq false
- end
-
- it 'returns true' do
- allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_b) { true }
-
- expect(helper.indirectly_invite_members?).to eq true
- end
- end
-
- context 'when a user is an owner' do
- before do
- allow(helper).to receive(:current_user) { owner }
- end
-
- it 'returns false' do
- allow(helper).to receive(:experiment_enabled?).with(:invite_members_version_b) { true }
-
- expect(helper.indirectly_invite_members?).to eq false
- end
- end
- end
end
context 'with group' do
let_it_be(:group) { create(:group) }
- describe "#can_invite_members_for_group?" do
- include Devise::Test::ControllerHelpers
-
- let_it_be(:user) { create(:user) }
-
- before do
- sign_in(user)
- allow(helper).to receive(:current_user) { user }
- end
-
- context 'when the user can_import_members' do
- before do
- allow(helper).to receive(:can?).with(user, :admin_group_member, group).and_return(true)
- end
-
- it 'returns true' do
- expect(helper.can_invite_members_for_group?(group)).to eq true
- expect(helper).to have_received(:can?).with(user, :admin_group_member, group)
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(invite_members_group_modal: false)
- end
-
- it 'returns false' do
- stub_feature_flags(invite_members_group_modal: false)
-
- expect(helper.can_invite_members_for_group?(group)).to eq false
- expect(helper).not_to have_received(:can?)
- end
- end
- end
-
- context 'when the user can not invite members' do
- before do
- expect(helper).to receive(:can?).with(user, :admin_group_member, group).and_return(false)
- end
-
- it 'returns false' do
- expect(helper.can_invite_members_for_group?(group)).to eq false
- end
- end
- end
-
describe "#invite_group_members?" do
context 'when the user is an owner' do
before do
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 54524858962..b0338d80ee7 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -133,13 +133,13 @@ RSpec.describe IssuablesHelper do
it 'returns navigation with badges' do
expect(helper.issuables_state_counter_text(:issues, :opened, true))
- .to eq('<span>Open</span> <span class="badge badge-pill">42</span>')
+ .to eq('<span>Open</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
expect(helper.issuables_state_counter_text(:issues, :closed, true))
- .to eq('<span>Closed</span> <span class="badge badge-pill">42</span>')
+ .to eq('<span>Closed</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
expect(helper.issuables_state_counter_text(:merge_requests, :merged, true))
- .to eq('<span>Merged</span> <span class="badge badge-pill">42</span>')
+ .to eq('<span>Merged</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
expect(helper.issuables_state_counter_text(:merge_requests, :all, true))
- .to eq('<span>All</span> <span class="badge badge-pill">42</span>')
+ .to eq('<span>All</span> <span class="badge badge-muted badge-pill gl-badge gl-tab-counter-badge sm">42</span>')
end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 21a01f349b5..17e6c75ca27 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -293,23 +293,32 @@ RSpec.describe IssuesHelper do
allow(helper).to receive(:url_for).and_return('#')
expected = {
+ autocomplete_award_emojis_path: autocomplete_award_emojis_path,
+ autocomplete_users_path: autocomplete_users_path(active: true, current_user: true, project_id: project.id, format: :json),
calendar_path: '#',
can_bulk_update: 'true',
can_edit: 'true',
can_import_issues: 'true',
email: current_user&.notification_email,
+ emails_help_page_path: help_page_path('development/emails', anchor: 'email-namespace'),
empty_state_svg_path: '#',
endpoint: expose_path(api_v4_projects_issues_path(id: project.id)),
export_csv_path: export_csv_project_issues_path(project),
- full_path: project.full_path,
has_issues: project_issues(project).exists?.to_s,
import_csv_issues_path: '#',
+ initial_email: project.new_issuable_address(current_user, 'issue'),
is_signed_in: current_user.present?.to_s,
issues_path: project_issues_path(project),
jira_integration_path: help_page_url('user/project/integrations/jira', anchor: 'view-jira-issues'),
+ markdown_help_path: help_page_path('user/markdown'),
max_attachment_size: number_to_human_size(Gitlab::CurrentSettings.max_attachment_size.megabytes),
new_issue_path: new_project_issue_path(project, issue: { assignee_id: finder.assignee.id, milestone_id: finder.milestones.first.id }),
project_import_jira_path: project_import_jira_path(project),
+ project_labels_path: project_labels_path(project, include_ancestor_groups: true, format: :json),
+ project_milestones_path: project_milestones_path(project, format: :json),
+ project_path: project.full_path,
+ quick_actions_help_path: help_page_path('user/project/quick_actions'),
+ reset_path: new_issuable_address_project_path(project, issuable_type: 'issue'),
rss_path: '#',
show_new_issue_link: 'true',
sign_in_path: new_user_session_path
@@ -332,4 +341,65 @@ RSpec.describe IssuesHelper do
end
end
end
+
+ describe '#issue_manual_ordering_class' do
+ context 'when sorting by relative position' do
+ before do
+ assign(:sort, 'relative_position')
+ end
+
+ it 'returns manual ordering class' do
+ expect(helper.issue_manual_ordering_class).to eq("manual-ordering")
+ end
+
+ context 'when manual sorting disabled' do
+ before do
+ allow(helper).to receive(:issue_repositioning_disabled?).and_return(true)
+ end
+
+ it 'returns nil' do
+ expect(helper.issue_manual_ordering_class).to eq(nil)
+ end
+ end
+ end
+ end
+
+ describe '#issue_repositioning_disabled?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ subject { helper.issue_repositioning_disabled? }
+
+ context 'for project' do
+ before do
+ assign(:project, project)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'when block_issue_repositioning feature flag is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ context 'for group' do
+ before do
+ assign(:group, group)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'when block_issue_repositioning feature flag is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
end
diff --git a/spec/helpers/learn_gitlab_helper_spec.rb b/spec/helpers/learn_gitlab_helper_spec.rb
index 82c8e4ba596..cf0d329c36f 100644
--- a/spec/helpers/learn_gitlab_helper_spec.rb
+++ b/spec/helpers/learn_gitlab_helper_spec.rb
@@ -7,14 +7,14 @@ RSpec.describe LearnGitlabHelper do
include Devise::Test::ControllerHelpers
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, name: LearnGitlab::PROJECT_NAME, namespace: user.namespace) }
+ let_it_be(:project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME, namespace: user.namespace) }
let_it_be(:namespace) { project.namespace }
before do
project.add_developer(user)
allow(helper).to receive(:user).and_return(user)
- allow_next_instance_of(LearnGitlab) do |learn_gitlab|
+ allow_next_instance_of(LearnGitlab::Project) do |learn_gitlab|
allow(learn_gitlab).to receive(:project).and_return(project)
end
@@ -41,12 +41,12 @@ RSpec.describe LearnGitlabHelper do
it 'sets correct path and completion status' do
expect(onboarding_actions_data[:git_write]).to eq({
- url: project_issue_url(project, LearnGitlabHelper::ACTION_ISSUE_IDS[:git_write]),
+ url: project_issue_url(project, LearnGitlab::Onboarding::ACTION_ISSUE_IDS[:git_write]),
completed: true,
svg: helper.image_path("learn_gitlab/git_write.svg")
})
expect(onboarding_actions_data[:pipeline_created]).to eq({
- url: project_issue_url(project, LearnGitlabHelper::ACTION_ISSUE_IDS[:pipeline_created]),
+ url: project_issue_url(project, LearnGitlab::Onboarding::ACTION_ISSUE_IDS[:pipeline_created]),
completed: false,
svg: helper.image_path("learn_gitlab/pipeline_created.svg")
})
@@ -75,7 +75,7 @@ RSpec.describe LearnGitlabHelper do
before do
stub_experiment_for_subject(learn_gitlab_a: experiment_a, learn_gitlab_b: experiment_b)
allow(OnboardingProgress).to receive(:onboarding?).with(project.namespace).and_return(onboarding)
- allow_next(LearnGitlab, user).to receive(:available?).and_return(learn_gitlab_available)
+ allow_next(LearnGitlab::Project, user).to receive(:available?).and_return(learn_gitlab_available)
end
context 'when signed in' do
@@ -85,10 +85,62 @@ RSpec.describe LearnGitlabHelper do
it { is_expected.to eq(result) }
end
+ end
- context 'when not signed in' do
- it { is_expected.to eq(false) }
+ context 'when not signed in' do
+ before do
+ stub_experiment_for_subject(learn_gitlab_a: true, learn_gitlab_b: true)
end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '.onboarding_sections_data' do
+ subject(:sections) { helper.onboarding_sections_data }
+
+ it 'has the right keys' do
+ expect(sections.keys).to contain_exactly(:deploy, :plan, :workspace)
+ end
+ it 'has the svg' do
+ expect(sections.values.map { |section| section.keys }).to eq([[:svg]] * 3)
+ end
+ end
+
+ describe '.learn_gitlab_experiment_tracking_category' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create(:user) }
+
+ subject { helper.learn_gitlab_experiment_tracking_category }
+
+ where(:experiment_a, :experiment_b, :result) do
+ false | false | nil
+ false | true | 'Growth::Activation::Experiment::LearnGitLabB'
+ true | false | 'Growth::Conversion::Experiment::LearnGitLabA'
+ true | true | 'Growth::Conversion::Experiment::LearnGitLabA'
+ end
+
+ with_them do
+ before do
+ stub_experiment_for_subject(learn_gitlab_a: experiment_a, learn_gitlab_b: experiment_b)
+ end
+
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ context 'when not signed in' do
+ before do
+ stub_experiment_for_subject(learn_gitlab_a: true, learn_gitlab_b: true)
+ end
+
+ it { is_expected.to eq(nil) }
end
end
end
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index 8c08b06d8a8..a8a918cbc74 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -265,4 +265,32 @@ RSpec.describe NamespacesHelper do
end
end
end
+
+ describe '#cascading_namespace_setting_locked?' do
+ let(:attribute) { :delayed_project_removal }
+
+ context 'when `group` argument is `nil`' do
+ it 'returns `false`' do
+ expect(helper.cascading_namespace_setting_locked?(attribute, nil)).to eq(false)
+ end
+ end
+
+ context 'when `*_locked?` method does not exist' do
+ it 'returns `false`' do
+ expect(helper.cascading_namespace_setting_locked?(:attribute_that_does_not_exist, admin_group)).to eq(false)
+ end
+ end
+
+ context 'when `*_locked?` method does exist' do
+ before do
+ allow(admin_group.namespace_settings).to receive(:delayed_project_removal_locked?).and_return(true)
+ end
+
+ it 'calls corresponding `*_locked?` method' do
+ helper.cascading_namespace_setting_locked?(attribute, admin_group, include_self: true)
+
+ expect(admin_group.namespace_settings).to have_received(:delayed_project_removal_locked?).with(include_self: true)
+ end
+ end
+ end
end
diff --git a/spec/helpers/nav/top_nav_helper_spec.rb b/spec/helpers/nav/top_nav_helper_spec.rb
new file mode 100644
index 00000000000..5c9e1e82b01
--- /dev/null
+++ b/spec/helpers/nav/top_nav_helper_spec.rb
@@ -0,0 +1,376 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Nav::TopNavHelper do
+ include ActionView::Helpers::UrlHelper
+
+ describe '#top_nav_view_model' do
+ let_it_be(:user) { build_stubbed(:user) }
+ let_it_be(:admin) { build_stubbed(:user, :admin) }
+
+ let(:current_user) { nil }
+ let(:current_project) { nil }
+ let(:current_group) { nil }
+ let(:with_current_settings_admin_mode) { false }
+ let(:with_header_link_admin_mode) { false }
+ let(:with_sherlock_enabled) { false }
+ let(:with_projects) { false }
+ let(:with_groups) { false }
+ let(:with_milestones) { false }
+ let(:with_snippets) { false }
+ let(:with_activity) { false }
+
+ let(:subject) { helper.top_nav_view_model(project: current_project, group: current_group) }
+
+ let(:active_title) { 'Menu' }
+
+ before do
+ allow(helper).to receive(:current_user) { current_user }
+ allow(Gitlab::CurrentSettings).to receive(:admin_mode) { with_current_settings_admin_mode }
+ allow(helper).to receive(:header_link?).with(:admin_mode) { with_header_link_admin_mode }
+ allow(Gitlab::Sherlock).to receive(:enabled?) { with_sherlock_enabled }
+
+ # Defaulting all `dashboard_nav_link?` calls to false ensures the EE-specific behavior
+ # is not enabled in this CE spec
+ allow(helper).to receive(:dashboard_nav_link?).with(anything) { false }
+
+ allow(helper).to receive(:dashboard_nav_link?).with(:projects) { with_projects }
+ allow(helper).to receive(:dashboard_nav_link?).with(:groups) { with_groups }
+ allow(helper).to receive(:dashboard_nav_link?).with(:milestones) { with_milestones }
+ allow(helper).to receive(:dashboard_nav_link?).with(:snippets) { with_snippets }
+ allow(helper).to receive(:dashboard_nav_link?).with(:activity) { with_activity }
+ end
+
+ it 'has :activeTitle' do
+ expect(subject[:activeTitle]).to eq(active_title)
+ end
+
+ context 'when current_user is nil (anonymous)' do
+ it 'has expected :primary' do
+ expected_projects_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/explore',
+ icon: 'project',
+ id: 'project',
+ title: 'Projects'
+ )
+ expected_groups_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/explore/groups',
+ icon: 'group',
+ id: 'groups',
+ title: 'Groups'
+ )
+ expected_snippets_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/explore/snippets',
+ icon: 'snippet',
+ id: 'snippets',
+ title: 'Snippets'
+ )
+ expect(subject[:primary])
+ .to eq([
+ expected_projects_item,
+ expected_groups_item,
+ expected_snippets_item
+ ])
+ end
+ end
+
+ context 'when current_user is non-admin' do
+ let(:current_user) { user }
+
+ it 'has no menu items or views by default' do
+ expect(subject).to eq({ activeTitle: active_title,
+ primary: [],
+ secondary: [],
+ views: {} })
+ end
+
+ context 'with projects' do
+ let(:with_projects) { true }
+ let(:projects_view) { subject[:views][:projects] }
+
+ it 'has expected :primary' do
+ expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
+ css_class: 'qa-projects-dropdown',
+ data: {
+ track_event: 'click_dropdown',
+ track_experiment: 'new_repo',
+ track_label: 'projects_dropdown'
+ },
+ icon: 'project',
+ id: 'project',
+ title: 'Projects',
+ view: 'projects'
+ )
+ expect(subject[:primary]).to eq([expected_primary])
+ end
+
+ context 'projects' do
+ it 'has expected :currentUserName' do
+ expect(projects_view[:currentUserName]).to eq(current_user.username)
+ end
+
+ it 'has expected :namespace' do
+ expect(projects_view[:namespace]).to eq('projects')
+ end
+
+ it 'has expected :linksPrimary' do
+ expected_links_primary = [
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/dashboard/projects',
+ id: 'your',
+ title: 'Your projects'
+ ),
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/dashboard/projects/starred',
+ id: 'starred',
+ title: 'Starred projects'
+ ),
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/explore',
+ id: 'explore',
+ title: 'Explore projects'
+ )
+ ]
+ expect(projects_view[:linksPrimary]).to eq(expected_links_primary)
+ end
+
+ it 'has expected :linksSecondary' do
+ expected_links_secondary = [
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/projects/new',
+ id: 'create',
+ title: 'Create new project'
+ )
+ ]
+ expect(projects_view[:linksSecondary]).to eq(expected_links_secondary)
+ end
+
+ context 'with persisted project' do
+ let_it_be(:project) { build_stubbed(:project) }
+
+ let(:current_project) { project }
+ let(:avatar_url) { 'project_avatar_url' }
+
+ before do
+ allow(project).to receive(:persisted?) { true }
+ allow(project).to receive(:avatar_url) { avatar_url }
+ end
+
+ it 'has project as :container' do
+ expected_container = {
+ avatarUrl: avatar_url,
+ id: project.id,
+ name: project.name,
+ namespace: project.full_name,
+ webUrl: project_path(project)
+ }
+
+ expect(projects_view[:currentItem]).to eq(expected_container)
+ end
+ end
+ end
+ end
+
+ context 'with groups' do
+ let(:with_groups) { true }
+ let(:groups_view) { subject[:views][:groups] }
+
+ it 'has expected :primary' do
+ expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
+ css_class: 'qa-groups-dropdown',
+ data: {
+ track_event: 'click_dropdown',
+ track_label: 'groups_dropdown'
+ },
+ icon: 'group',
+ id: 'groups',
+ title: 'Groups',
+ view: 'groups'
+ )
+ expect(subject[:primary]).to eq([expected_primary])
+ end
+
+ context 'groups' do
+ it 'has expected :currentUserName' do
+ expect(groups_view[:currentUserName]).to eq(current_user.username)
+ end
+
+ it 'has expected :namespace' do
+ expect(groups_view[:namespace]).to eq('groups')
+ end
+
+ it 'has expected :linksPrimary' do
+ expected_links_primary = [
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/dashboard/groups',
+ id: 'your',
+ title: 'Your groups'
+ ),
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/explore/groups',
+ id: 'explore',
+ title: 'Explore groups'
+ )
+ ]
+ expect(groups_view[:linksPrimary]).to eq(expected_links_primary)
+ end
+
+ it 'has expected :linksSecondary' do
+ expected_links_secondary = [
+ ::Gitlab::Nav::TopNavMenuItem.build(
+ href: '/groups/new#create-group-pane',
+ id: 'create',
+ title: 'Create group'
+ )
+ ]
+ expect(groups_view[:linksSecondary]).to eq(expected_links_secondary)
+ end
+
+ context 'with persisted group' do
+ let_it_be(:group) { build_stubbed(:group) }
+
+ let(:current_group) { group }
+ let(:avatar_url) { 'group_avatar_url' }
+
+ before do
+ allow(group).to receive(:persisted?) { true }
+ allow(group).to receive(:avatar_url) { avatar_url }
+ end
+
+ it 'has expected :container' do
+ expected_container = {
+ avatarUrl: avatar_url,
+ id: group.id,
+ name: group.name,
+ namespace: group.full_name,
+ webUrl: group_path(group)
+ }
+
+ expect(groups_view[:currentItem]).to eq(expected_container)
+ end
+ end
+ end
+ end
+
+ context 'with milestones' do
+ let(:with_milestones) { true }
+
+ it 'has expected :primary' do
+ expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'milestones_link'
+ },
+ href: '/dashboard/milestones',
+ icon: 'clock',
+ id: 'milestones',
+ title: 'Milestones'
+ )
+ expect(subject[:primary]).to eq([expected_primary])
+ end
+ end
+
+ context 'with snippets' do
+ let(:with_snippets) { true }
+
+ it 'has expected :primary' do
+ expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'snippets_link'
+ },
+ href: '/dashboard/snippets',
+ icon: 'snippet',
+ id: 'snippets',
+ title: 'Snippets'
+ )
+ expect(subject[:primary]).to eq([expected_primary])
+ end
+ end
+
+ context 'with activity' do
+ let(:with_activity) { true }
+
+ it 'has expected :primary' do
+ expected_primary = ::Gitlab::Nav::TopNavMenuItem.build(
+ data: {
+ qa_selector: 'activity_link'
+ },
+ href: '/dashboard/activity',
+ icon: 'history',
+ id: 'activity',
+ title: 'Activity'
+ )
+ expect(subject[:primary]).to eq([expected_primary])
+ end
+ end
+
+ context 'when sherlock is enabled' do
+ let(:with_sherlock_enabled) { true }
+
+ before do
+ # Note: We have to mock the sherlock route because the route is conditional on
+ # sherlock being enabled, but it parsed at Rails load time and can't be overridden
+ # in a spec.
+ allow(helper).to receive(:sherlock_transactions_path) { '/fake_sherlock_path' }
+ end
+
+ it 'has sherlock as last :secondary item' do
+ expected_sherlock_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ id: 'sherlock',
+ title: 'Sherlock Transactions',
+ icon: 'admin',
+ href: '/fake_sherlock_path'
+ )
+ expect(subject[:secondary].last).to eq(expected_sherlock_item)
+ end
+ end
+ end
+
+ context 'when current_user is admin' do
+ let_it_be(:current_user) { admin }
+
+ let(:with_current_settings_admin_mode) { true }
+
+ it 'has admin as first :secondary item' do
+ expected_admin_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ id: 'admin',
+ title: 'Admin',
+ icon: 'admin',
+ href: '/admin',
+ css_class: 'qa-admin-area-link'
+ )
+
+ expect(subject[:secondary].first).to eq(expected_admin_item)
+ end
+
+ context 'with header link admin_mode true' do
+ let(:with_header_link_admin_mode) { true }
+
+ it 'has leave_admin_mode as last :secondary item' do
+ expected_leave_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ id: 'leave_admin_mode',
+ title: 'Leave Admin Mode',
+ icon: 'lock-open',
+ href: '/admin/session/destroy',
+ method: :post
+ )
+ expect(subject[:secondary].last).to eq(expected_leave_admin_mode_item)
+ end
+ end
+
+ context 'with header link admin_mode false' do
+ let(:with_header_link_admin_mode) { false }
+
+ it 'has enter_admin_mode as last :secondary item' do
+ expected_enter_admin_mode_item = ::Gitlab::Nav::TopNavMenuItem.build(
+ id: 'enter_admin_mode',
+ title: 'Enter Admin Mode',
+ icon: 'lock',
+ href: '/admin/session/new'
+ )
+ expect(subject[:secondary].last).to eq(expected_enter_admin_mode_item)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 2efff3402c5..4c5f440b8a3 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -115,6 +115,10 @@ RSpec.describe NavHelper do
describe '.group_issues_sub_menu_items' do
subject { helper.group_issues_sub_menu_items }
+ before do
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
it { is_expected.to all(be_a(String)) }
end
diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb
index d03e39f2051..d261fb43bb6 100644
--- a/spec/helpers/page_layout_helper_spec.rb
+++ b/spec/helpers/page_layout_helper_spec.rb
@@ -232,7 +232,6 @@ RSpec.describe PageLayoutHelper do
is_expected.to eq({
current_emoji: '',
current_message: '',
- can_set_user_availability: true,
default_emoji: UserStatus::DEFAULT_EMOJI
})
end
@@ -251,7 +250,6 @@ RSpec.describe PageLayoutHelper do
current_availability: 'busy',
current_emoji: 'basketball',
current_message: 'Some message',
- can_set_user_availability: true,
default_emoji: UserStatus::DEFAULT_EMOJI
})
end
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index 4d7083c4ca7..6be6d3670d4 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -121,6 +121,20 @@ RSpec.describe PreferencesHelper do
end
end
+ describe '#language_choices' do
+ include StubLanguagesTranslationPercentage
+
+ it 'lists all the selectable language options with their translation percent' do
+ stub_languages_translation_percentage(en: 100, es: 65)
+ stub_user(preferred_language: :en)
+
+ expect(helper.language_choices).to eq([
+ '<option selected="selected" value="en">English (100% translated)</option>',
+ '<option value="es">Spanish - español (65% translated)</option>'
+ ].join("\n"))
+ end
+ end
+
def stub_user(messages = {})
if messages.empty?
allow(helper).to receive(:current_user).and_return(nil)
diff --git a/spec/helpers/projects/alert_management_helper_spec.rb b/spec/helpers/projects/alert_management_helper_spec.rb
index e836461b099..6f66a93b9ec 100644
--- a/spec/helpers/projects/alert_management_helper_spec.rb
+++ b/spec/helpers/projects/alert_management_helper_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe Projects::AlertManagementHelper do
'empty-alert-svg-path' => match_asset_path('/assets/illustrations/alert-management-empty-state.svg'),
'user-can-enable-alert-management' => 'true',
'alert-management-enabled' => 'false',
+ 'has-managed-prometheus' => 'false',
'text-query': nil,
'assignee-username-query': nil
)
@@ -43,25 +44,53 @@ RSpec.describe Projects::AlertManagementHelper do
context 'with prometheus service' do
let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
- context 'when prometheus service is active' do
- it 'enables alert management' do
+ context 'when manual prometheus service is active' do
+ it "enables alert management and doesn't show managed prometheus" do
+ prometheus_service.update!(manual_configuration: true)
+
expect(data).to include(
'alert-management-enabled' => 'true'
)
+ expect(data).to include(
+ 'has-managed-prometheus' => 'false'
+ )
+ end
+ end
+
+ context 'when a cluster prometheus is available' do
+ let(:cluster) { create(:cluster, projects: [project]) }
+
+ it 'has managed prometheus' do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+
+ expect(data).to include(
+ 'has-managed-prometheus' => 'true'
+ )
end
end
context 'when prometheus service is inactive' do
- it 'disables alert management' do
+ it 'disables alert management and hides managed prometheus' do
prometheus_service.update!(manual_configuration: false)
expect(data).to include(
'alert-management-enabled' => 'false'
)
+ expect(data).to include(
+ 'has-managed-prometheus' => 'false'
+ )
end
end
end
+ context 'without prometheus service' do
+ it "doesn't have managed prometheus" do
+ expect(data).to include(
+ 'has-managed-prometheus' => 'false'
+ )
+ end
+ end
+
context 'with http integration' do
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index 0e08a18f912..90035f3e1c5 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -147,28 +147,64 @@ RSpec.describe Projects::ProjectMembersHelper do
end
describe 'project members' do
- let_it_be(:project_members) { create_list(:project_member, 1, project: project) }
+ let_it_be(:project_members) { create_list(:project_member, 2, project: project) }
- describe '#project_members_data_json' do
- it 'matches json schema' do
- expect(helper.project_members_data_json(project, present_members(project_members))).to match_schema('members')
- end
- end
+ let(:collection) { project_members }
+ let(:presented_members) { present_members(collection) }
- describe '#project_members_list_data_attributes' do
+ describe '#project_members_list_data_json' do
let(:allow_admin_project) { true }
+ let(:pagination) { {} }
+
+ subject { Gitlab::Json.parse(helper.project_members_list_data_json(project, presented_members, pagination)) }
before do
allow(helper).to receive(:project_project_member_path).with(project, ':id').and_return('/foo-bar/-/project_members/:id')
end
- it 'returns expected hash' do
- expect(helper.project_members_list_data_attributes(project, present_members(project_members))).to include({
- members: helper.project_members_data_json(project, present_members(project_members)),
+ it 'returns expected json' do
+ expected = {
member_path: '/foo-bar/-/project_members/:id',
source_id: project.id,
- can_manage_members: 'true'
- })
+ can_manage_members: true
+ }.as_json
+
+ expect(subject).to include(expected)
+ end
+
+ it 'returns `members` property that matches json schema' do
+ expect(subject['members'].to_json).to match_schema('members')
+ end
+
+ context 'when pagination is not available' do
+ it 'sets `pagination` attribute to expected json' do
+ expected = {
+ current_page: nil,
+ per_page: nil,
+ total_items: 2,
+ param_name: nil,
+ params: {}
+ }.as_json
+
+ expect(subject['pagination']).to include(expected)
+ end
+ end
+
+ context 'when pagination is available' do
+ let(:collection) { Kaminari.paginate_array(project_members).page(1).per(1) }
+ let(:pagination) { { param_name: :page, params: { search_groups: nil } } }
+
+ it 'sets `pagination` attribute to expected json' do
+ expected = {
+ current_page: 1,
+ per_page: 1,
+ total_items: 2,
+ param_name: :page,
+ params: { search_groups: nil }
+ }.as_json
+
+ expect(subject['pagination']).to match(expected)
+ end
end
end
end
@@ -178,25 +214,33 @@ RSpec.describe Projects::ProjectMembersHelper do
let(:allow_admin_project) { true }
- describe '#project_group_links_data_json' do
- it 'matches json schema' do
- expect(helper.project_group_links_data_json(project_group_links)).to match_schema('group_link/project_group_links')
- end
- end
+ describe '#project_group_links_list_data_json' do
+ subject { Gitlab::Json.parse(helper.project_group_links_list_data_json(project, project_group_links)) }
- describe '#project_group_links_list_data_attributes' do
before do
allow(helper).to receive(:project_group_link_path).with(project, ':id').and_return('/foo-bar/-/group_links/:id')
allow(helper).to receive(:can?).with(current_user, :admin_project_member, project).and_return(true)
end
- it 'returns expected hash' do
- expect(helper.project_group_links_list_data_attributes(project, project_group_links)).to include({
- members: helper.project_group_links_data_json(project_group_links),
+ it 'returns expected json' do
+ expected = {
+ pagination: {
+ current_page: nil,
+ per_page: nil,
+ total_items: 1,
+ param_name: nil,
+ params: {}
+ },
member_path: '/foo-bar/-/group_links/:id',
source_id: project.id,
- can_manage_members: 'true'
- })
+ can_manage_members: true
+ }.as_json
+
+ expect(subject).to include(expected)
+ end
+
+ it 'returns `members` property that matches json schema' do
+ expect(subject['members'].to_json).to match_schema('group_link/project_group_links')
end
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 124cdcec05d..1804a9a99cf 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -390,149 +390,6 @@ RSpec.describe ProjectsHelper do
end
end
- describe '#get_project_nav_tabs' do
- before do
- allow(helper).to receive(:current_user).and_return(user)
- allow(helper).to receive(:can?) { true }
- end
-
- subject do
- helper.send(:get_project_nav_tabs, project, user)
- end
-
- context 'Security & Compliance tabs' do
- before do
- allow(helper).to receive(:can?).with(user, :read_security_configuration, project).and_return(can_read_security_configuration)
- end
-
- context 'when user cannot read security configuration' do
- let(:can_read_security_configuration) { false }
-
- it { is_expected.not_to include(:security_configuration) }
- end
-
- context 'when user can read security configuration' do
- let(:can_read_security_configuration) { true }
- let(:feature_flag_enabled) { true }
-
- it { is_expected.to include(:security_configuration) }
- end
- end
-
- context 'when builds feature is enabled' do
- before do
- allow(project).to receive(:builds_enabled?).and_return(true)
- end
-
- it "does include pipelines tab" do
- is_expected.to include(:pipelines)
- end
- end
-
- context 'when builds feature is disabled' do
- before do
- allow(project).to receive(:builds_enabled?).and_return(false)
- end
-
- context 'when user has access to builds' do
- it "does include pipelines tab" do
- is_expected.to include(:pipelines)
- end
- end
-
- context 'when user does not have access to builds' do
- before do
- allow(helper).to receive(:can?) { false }
- end
-
- it "does not include pipelines tab" do
- is_expected.not_to include(:pipelines)
- end
- end
- end
-
- context 'when project has external wiki' do
- it 'includes external wiki tab' do
- project.create_external_wiki_service(active: true, properties: { 'external_wiki_url' => 'https://gitlab.com' })
- project.reload
-
- is_expected.to include(:external_wiki)
- end
- end
-
- context 'when project does not have external wiki' do
- it 'does not include external wiki tab' do
- expect(project.external_wiki).to be_nil
- is_expected.not_to include(:external_wiki)
- end
- end
-
- context 'when project has confluence enabled' do
- before do
- allow(project).to receive(:has_confluence?).and_return(true)
- end
-
- it { is_expected.to include(:confluence) }
- it { is_expected.not_to include(:wiki) }
- end
-
- context 'when project does not have confluence enabled' do
- it { is_expected.not_to include(:confluence) }
- it { is_expected.to include(:wiki) }
- end
-
- context 'learn gitlab experiment' do
- context 'when it is enabled' do
- before do
- expect(helper).to receive(:learn_gitlab_experiment_enabled?).with(project).and_return(true)
- end
-
- it { is_expected.to include(:learn_gitlab) }
- end
-
- context 'when it is not enabled' do
- it { is_expected.not_to include(:learn_gitlab) }
- end
- end
- end
-
- describe '#can_view_operations_tab?' do
- before do
- allow(helper).to receive(:current_user).and_return(user)
- allow(helper).to receive(:can?).and_return(false)
- end
-
- subject { helper.send(:can_view_operations_tab?, user, project) }
-
- where(:ability) do
- [
- :metrics_dashboard,
- :read_alert_management_alert,
- :read_environment,
- :read_issue,
- :read_sentry_issue,
- :read_cluster
- ]
- end
-
- with_them do
- it 'includes operations tab' do
- allow(helper).to receive(:can?).with(user, ability, project).and_return(true)
-
- is_expected.to be(true)
- end
-
- context 'when operations feature is disabled' do
- it 'does not include operations tab' do
- allow(helper).to receive(:can?).with(user, ability, project).and_return(true)
- project.project_feature.update_attribute(:operations_access_level, ProjectFeature::DISABLED)
-
- is_expected.to be(false)
- end
- end
- end
- end
-
describe '#show_projects' do
let(:projects) do
Project.all
diff --git a/spec/helpers/registrations_helper_spec.rb b/spec/helpers/registrations_helper_spec.rb
new file mode 100644
index 00000000000..00d0a0850cd
--- /dev/null
+++ b/spec/helpers/registrations_helper_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RegistrationsHelper do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '#social_signin_enabled?' do
+ before do
+ allow(::Gitlab).to receive(:dev_env_or_com?).and_return(com)
+ allow(view).to receive(:omniauth_enabled?).and_return(omniauth_enabled)
+ allow(view).to receive(:button_based_providers_enabled?).and_return(button_based_providers_enabled)
+ allow(view).to receive(:devise_mapping).and_return(double(omniauthable?: omniauthable))
+ end
+
+ subject { helper.social_signin_enabled? }
+
+ where com: [true, false],
+ omniauth_enabled: [true, false],
+ omniauthable: [true, false],
+ button_based_providers_enabled: [true, false]
+
+ with_them do
+ let(:result) { com && omniauth_enabled && button_based_providers_enabled && omniauthable }
+
+ it { is_expected.to eq(result) }
+ end
+ end
+end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index f0f09408249..862fd58df04 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -136,6 +136,16 @@ RSpec.describe UsersHelper do
end
end
+ context 'with a banned user' do
+ it 'returns the banned badge' do
+ banned_user = create(:user, :banned)
+
+ badges = helper.user_badges_in_admin_section(banned_user)
+
+ expect(filter_ee_badges(badges)).to eq([text: 'Banned', variant: 'danger'])
+ end
+ end
+
context 'with an admin user' do
it "returns the admin badge" do
admin_user = create(:admin)
@@ -160,7 +170,7 @@ RSpec.describe UsersHelper do
it 'returns the "It\'s You" badge' do
badges = helper.user_badges_in_admin_section(user)
- expect(filter_ee_badges(badges)).to eq([text: "It's you!", variant: nil])
+ expect(filter_ee_badges(badges)).to eq([text: "It's you!", variant: "muted"])
end
end
diff --git a/spec/helpers/webpack_helper_spec.rb b/spec/helpers/webpack_helper_spec.rb
new file mode 100644
index 00000000000..f9386c99dc3
--- /dev/null
+++ b/spec/helpers/webpack_helper_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebpackHelper do
+ let(:source) { 'foo.js' }
+ let(:asset_path) { "/assets/webpack/#{source}" }
+
+ describe '#prefetch_link_tag' do
+ it 'returns prefetch link tag' do
+ expect(helper.prefetch_link_tag(source)).to eq("<link rel=\"prefetch\" href=\"/#{source}\">")
+ end
+ end
+
+ describe '#webpack_preload_asset_tag' do
+ before do
+ allow(Gitlab::Webpack::Manifest).to receive(:asset_paths).and_return([asset_path])
+ end
+
+ it 'preloads the resource by default' do
+ expect(helper).to receive(:preload_link_tag).with(asset_path, {}).and_call_original
+
+ output = helper.webpack_preload_asset_tag(source)
+
+ expect(output).to eq("<link rel=\"preload\" href=\"#{asset_path}\" as=\"script\" type=\"text/javascript\">")
+ end
+
+ it 'prefetches the resource if explicitly asked' do
+ expect(helper).to receive(:prefetch_link_tag).with(asset_path).and_call_original
+
+ output = helper.webpack_preload_asset_tag(source, prefetch: true)
+
+ expect(output).to eq("<link rel=\"prefetch\" href=\"#{asset_path}\">")
+ end
+ end
+end
diff --git a/spec/helpers/whats_new_helper_spec.rb b/spec/helpers/whats_new_helper_spec.rb
index 0e4b4621560..9ae7ef38736 100644
--- a/spec/helpers/whats_new_helper_spec.rb
+++ b/spec/helpers/whats_new_helper_spec.rb
@@ -59,5 +59,62 @@ RSpec.describe WhatsNewHelper do
expect(subject).to be false
end
end
+
+ context 'depending on whats_new_variant' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:variant, :result) do
+ :all_tiers | true
+ :current_tier | true
+ :disabled | false
+ end
+
+ with_them do
+ it 'returns correct result depending on variant' do
+ allow(Gitlab).to receive(:dev_env_org_or_com?).and_return(true)
+ Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[variant])
+
+ expect(subject).to eq(result)
+ end
+ end
+ end
+ end
+
+ describe '#whats_new_variants' do
+ it 'returns ApplicationSetting.whats_new_variants' do
+ expect(helper.whats_new_variants).to eq(ApplicationSetting.whats_new_variants)
+ end
+ end
+
+ describe '#whats_new_variants_label' do
+ let(:labels) do
+ [
+ helper.whats_new_variants_label('all_tiers'),
+ helper.whats_new_variants_label('current_tier'),
+ helper.whats_new_variants_label('disabled'),
+ helper.whats_new_variants_label(nil)
+ ]
+ end
+
+ it 'returns different labels depending on variant' do
+ expect(labels.uniq.size).to eq(labels.size)
+ expect(labels[3]).to be_nil
+ end
+ end
+
+ describe '#whats_new_variants_description' do
+ let(:descriptions) do
+ [
+ helper.whats_new_variants_description('all_tiers'),
+ helper.whats_new_variants_description('current_tier'),
+ helper.whats_new_variants_description('disabled'),
+ helper.whats_new_variants_description(nil)
+ ]
+ end
+
+ it 'returns different descriptions depending on variant' do
+ expect(descriptions.uniq.size).to eq(descriptions.size)
+ expect(descriptions[3]).to be_nil
+ end
end
end
diff --git a/spec/initializers/6_validations_spec.rb b/spec/initializers/6_validations_spec.rb
index b909fc9db0a..cdd96640933 100644
--- a/spec/initializers/6_validations_spec.rb
+++ b/spec/initializers/6_validations_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative '../../config/initializers/6_validations.rb'
+require_relative '../../config/initializers/6_validations'
RSpec.describe '6_validations' do
describe 'validate_storages_config' do
diff --git a/spec/initializers/mail_encoding_patch_spec.rb b/spec/initializers/mail_encoding_patch_spec.rb
index efacaf6b1b4..52a0d041f48 100644
--- a/spec/initializers/mail_encoding_patch_spec.rb
+++ b/spec/initializers/mail_encoding_patch_spec.rb
@@ -3,7 +3,7 @@
require 'fast_spec_helper'
require 'mail'
-require_relative '../../config/initializers/mail_encoding_patch.rb'
+require_relative '../../config/initializers/mail_encoding_patch'
RSpec.describe 'Mail quoted-printable transfer encoding patch and Unicode characters' do
shared_examples 'email encoding' do |email|
diff --git a/spec/initializers/pages_storage_check_spec.rb b/spec/initializers/pages_storage_check_spec.rb
index a76002dbdcf..e81802ae669 100644
--- a/spec/initializers/pages_storage_check_spec.rb
+++ b/spec/initializers/pages_storage_check_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'pages storage check' do
context 'when local store does not exist yet' do
before do
- Settings.pages['local_store'] = nil
+ stub_config(pages: { enabled: true, local_store: nil })
end
it { is_expected.to be_truthy }
@@ -17,78 +17,42 @@ RSpec.describe 'pages storage check' do
context 'when pages is not enabled' do
before do
- Settings.pages['enabled'] = false
+ stub_config(pages: { enabled: false })
end
it { is_expected.to be_truthy }
end
context 'when pages is enabled' do
- before do
- Settings.pages['enabled'] = true
- Settings.pages['local_store'] = Settingslogic.new({})
+ using RSpec::Parameterized::TableSyntax
+
+ where(:local_storage_enabled, :object_storage_enabled, :raises_exception) do
+ false | false | true
+ false | true | false
+ true | false | false
+ true | true | false
+ 1 | 0 | false
+ nil | nil | true
end
- context 'when pages object storage is not enabled' do
+ with_them do
before do
- Settings.pages['object_store']['enabled'] = false
+ stub_config(
+ pages: {
+ enabled: true,
+ local_store: { enabled: local_storage_enabled },
+ object_store: { enabled: object_storage_enabled }
+ }
+ )
end
- context 'when pages local storage is not enabled' do
- it 'raises an exception' do
- Settings.pages['local_store']['enabled'] = false
-
+ it 'validates pages storage configuration' do
+ if raises_exception
expect { subject }.to raise_error(main_error_message)
- end
- end
-
- context 'when pages local storage is enabled' do
- it 'is true' do
- Settings.pages['local_store']['enabled'] = true
-
- expect(subject).to be_truthy
- end
- end
- end
-
- context 'when pages object storage is enabled' do
- before do
- Settings.pages['object_store']['enabled'] = true
- end
-
- context 'when pages local storage is not enabled' do
- it 'is true' do
- Settings.pages['local_store']['enabled'] = false
-
+ else
expect(subject).to be_truthy
end
end
-
- context 'when pages local storage is enabled' do
- it 'is true' do
- Settings.pages['local_store']['enabled'] = true
-
- expect(subject).to be_truthy
- end
- end
- end
-
- context 'when using integers instead of booleans' do
- it 'is true' do
- Settings.pages['local_store']['enabled'] = 1
- Settings.pages['object_store']['enabled'] = 0
-
- expect(subject).to be_truthy
- end
- end
-
- context 'when both enabled attributes are not set' do
- it 'raises an exception' do
- Settings.pages['local_store']['enabled'] = nil
- Settings.pages['object_store']['enabled'] = nil
-
- expect { subject }.to raise_error(main_error_message)
- end
end
end
end
diff --git a/spec/initializers/secret_token_spec.rb b/spec/initializers/secret_token_spec.rb
index ab16dbad3fc..2c396a18361 100644
--- a/spec/initializers/secret_token_spec.rb
+++ b/spec/initializers/secret_token_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe 'create_tokens' do
it 'writes the secrets to secrets.yml' do
expect(File).to receive(:write).with('config/secrets.yml', any_args) do |filename, contents, options|
- new_secrets = YAML.load(contents)[Rails.env]
+ new_secrets = YAML.safe_load(contents)[Rails.env]
expect(new_secrets['secret_key_base']).to eq(secrets.secret_key_base)
expect(new_secrets['otp_key_base']).to eq(secrets.otp_key_base)
@@ -179,7 +179,7 @@ RSpec.describe 'create_tokens' do
it 'uses the file secret' do
expect(File).to receive(:write) do |filename, contents, options|
- new_secrets = YAML.load(contents)[Rails.env]
+ new_secrets = YAML.safe_load(contents)[Rails.env]
expect(new_secrets['secret_key_base']).to eq('file_key')
expect(new_secrets['otp_key_base']).to eq('file_key')
diff --git a/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js b/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js
index ec8d2778c1f..b85f50ec998 100644
--- a/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js
+++ b/spec/javascripts/monitoring/components/dashboard_resize_browser_spec.js
@@ -61,6 +61,7 @@ describe('Dashboard', () => {
showPanels: true,
},
store,
+ provide: { hasManagedPrometheus: false },
});
setupStoreWithData(component.$store);
diff --git a/spec/lib/api/entities/bulk_imports/export_status_spec.rb b/spec/lib/api/entities/bulk_imports/export_status_spec.rb
new file mode 100644
index 00000000000..7d79e372027
--- /dev/null
+++ b/spec/lib/api/entities/bulk_imports/export_status_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::BulkImports::ExportStatus do
+ let_it_be(:export) { create(:bulk_import_export) }
+
+ let(:entity) { described_class.new(export, request: double) }
+
+ subject { entity.as_json }
+
+ it 'has the correct attributes' do
+ expect(subject).to eq({
+ relation: export.relation,
+ status: export.status,
+ error: export.error,
+ updated_at: export.updated_at
+ })
+ end
+end
diff --git a/spec/lib/api/entities/release_spec.rb b/spec/lib/api/entities/release_spec.rb
index 06062634015..4f40830a15c 100644
--- a/spec/lib/api/entities/release_spec.rb
+++ b/spec/lib/api/entities/release_spec.rb
@@ -54,18 +54,41 @@ RSpec.describe API::Entities::Release do
subject(:description_html) { entity.as_json['description_html'] }
- it 'renders special references if current user has access' do
- project.add_reporter(user)
+ it 'is inexistent' do
+ expect(description_html).to be_nil
+ end
+
+ context 'when remove_description_html_in_release_api feature flag is disabled' do
+ before do
+ stub_feature_flags(remove_description_html_in_release_api: false)
+ end
+
+ it 'renders special references if current user has access' do
+ project.add_reporter(user)
+
+ expect(description_html).to include(issue_path)
+ expect(description_html).to include(issue_title)
+ end
- expect(description_html).to include(issue_path)
- expect(description_html).to include(issue_title)
+ it 'does not render special references if current user has no access' do
+ project.add_guest(user)
+
+ expect(description_html).not_to include(issue_path)
+ expect(description_html).not_to include(issue_title)
+ end
end
- it 'does not render special references if current user has no access' do
- project.add_guest(user)
+ context 'when remove_description_html_in_release_api_override feature flag is enabled' do
+ before do
+ stub_feature_flags(remove_description_html_in_release_api_override: project)
+ end
- expect(description_html).not_to include(issue_path)
- expect(description_html).not_to include(issue_title)
+ it 'renders special references if current user has access' do
+ project.add_reporter(user)
+
+ expect(description_html).to include(issue_path)
+ expect(description_html).to include(issue_title)
+ end
end
end
end
diff --git a/spec/lib/api/helpers/caching_spec.rb b/spec/lib/api/helpers/caching_spec.rb
index a8cd061e123..f94c44c7382 100644
--- a/spec/lib/api/helpers/caching_spec.rb
+++ b/spec/lib/api/helpers/caching_spec.rb
@@ -2,34 +2,46 @@
require "spec_helper"
-RSpec.describe API::Helpers::Caching do
+RSpec.describe API::Helpers::Caching, :use_clean_rails_redis_caching do
subject(:instance) { Class.new.include(described_class).new }
- describe "#present_cached" do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
- let(:presenter) { API::Entities::Todo }
+ let(:presenter) { API::Entities::Todo }
- let(:kwargs) do
- {
- with: presenter,
- project: project
- }
+ let(:return_value) do
+ {
+ foo: "bar"
+ }
+ end
+
+ let(:kwargs) do
+ {
+ expires_in: 1.minute
+ }
+ end
+
+ before do
+ # We have to stub #body as it's a Grape method
+ # unavailable in the module by itself
+ allow(instance).to receive(:body) do |data|
+ data
end
+ allow(instance).to receive(:current_user) { user }
+ end
+
+ describe "#present_cached" do
subject do
instance.present_cached(presentable, **kwargs)
end
- before do
- # We have to stub #body as it's a Grape method
- # unavailable in the module by itself
- expect(instance).to receive(:body) do |data|
- data
- end
-
- allow(instance).to receive(:current_user) { user }
+ let(:kwargs) do
+ {
+ with: presenter,
+ project: project
+ }
end
context "single object" do
@@ -136,4 +148,116 @@ RSpec.describe API::Helpers::Caching do
end
end
end
+
+ describe "#cache_action" do
+ def perform
+ instance.cache_action(cache_key, **kwargs) do
+ expensive_thing.do_very_expensive_action
+ end
+ end
+
+ subject { perform }
+
+ let(:expensive_thing) { double(do_very_expensive_action: return_value) }
+ let(:cache_key) do
+ [user, :foo]
+ end
+
+ it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
+
+ it "represents the correct data" do
+ expect(subject.to_s).to eq(Gitlab::Json.dump(return_value).to_s)
+ end
+
+ it "only calls the expensive action once" do
+ expected_kwargs = described_class::DEFAULT_CACHE_OPTIONS.merge(kwargs)
+
+ expect(expensive_thing).to receive(:do_very_expensive_action).once
+ expect(instance.cache).to receive(:fetch).with(cache_key, **expected_kwargs).exactly(5).times.and_call_original
+
+ 5.times { perform }
+ end
+
+ it "handles nested cache calls" do
+ nested_call = instance.cache_action(cache_key, **kwargs) do
+ instance.cache_action([:nested], **kwargs) do
+ expensive_thing.do_very_expensive_action
+ end
+ end
+
+ expect(nested_call.to_s).to eq(subject.to_s)
+ end
+ end
+
+ describe "#cache_action_if" do
+ subject do
+ instance.cache_action_if(conditional, cache_key, **kwargs) do
+ return_value
+ end
+ end
+
+ let(:cache_key) do
+ [user, :conditional_if]
+ end
+
+ context "conditional is truthy" do
+ let(:conditional) { "truthy thing" }
+
+ it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
+
+ it "caches the block" do
+ expect(instance).to receive(:cache_action).with(cache_key, **kwargs)
+
+ subject
+ end
+ end
+
+ context "conditional is falsey" do
+ let(:conditional) { false }
+
+ it { is_expected.to eq(return_value) }
+
+ it "doesn't cache the block" do
+ expect(instance).not_to receive(:cache_action).with(cache_key, **kwargs)
+
+ subject
+ end
+ end
+ end
+
+ describe "#cache_action_unless" do
+ subject do
+ instance.cache_action_unless(conditional, cache_key, **kwargs) do
+ return_value
+ end
+ end
+
+ let(:cache_key) do
+ [user, :conditional_unless]
+ end
+
+ context "conditional is truthy" do
+ let(:conditional) { "truthy thing" }
+
+ it { is_expected.to eq(return_value) }
+
+ it "doesn't cache the block" do
+ expect(instance).not_to receive(:cache_action).with(cache_key, **kwargs)
+
+ subject
+ end
+ end
+
+ context "conditional is falsey" do
+ let(:conditional) { false }
+
+ it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
+
+ it "caches the block" do
+ expect(instance).to receive(:cache_action).with(cache_key, **kwargs)
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/lib/api/helpers/related_resources_helpers_spec.rb b/spec/lib/api/helpers/related_resources_helpers_spec.rb
index a0dc69536b4..8b2e95c0434 100644
--- a/spec/lib/api/helpers/related_resources_helpers_spec.rb
+++ b/spec/lib/api/helpers/related_resources_helpers_spec.rb
@@ -63,6 +63,12 @@ RSpec.describe API::Helpers::RelatedResourcesHelpers do
is_expected.to start_with('https://')
end
+ it 'accepts the host if it contains an underscore' do
+ stub_default_url_options(host: 'w_ww.example.com')
+
+ is_expected.to start_with('http://w_ww.example.com/')
+ end
+
it 'accepts port to be nil' do
stub_default_url_options(port: nil)
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 15b22fcf25e..87cd0d4388c 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe API::Helpers do
+ using RSpec::Parameterized::TableSyntax
+
subject { Class.new.include(described_class).new }
describe '#find_project' do
@@ -99,6 +101,59 @@ RSpec.describe API::Helpers do
end
end
+ describe '#find_project!' do
+ let_it_be(:project) { create(:project) }
+
+ let(:user) { project.owner}
+
+ before do
+ allow(subject).to receive(:current_user).and_return(user)
+ allow(subject).to receive(:authorized_project_scope?).and_return(true)
+ allow(subject).to receive(:job_token_authentication?).and_return(false)
+ allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ end
+
+ shared_examples 'project finder' do
+ context 'when project exists' do
+ it 'returns requested project' do
+ expect(subject.find_project!(existing_id)).to eq(project)
+ end
+
+ it 'returns nil' do
+ expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
+ expect(subject.find_project!(non_existing_id)).to be_nil
+ end
+ end
+ end
+
+ context 'when ID is used as an argument' do
+ let(:existing_id) { project.id }
+ let(:non_existing_id) { non_existing_record_id }
+
+ it_behaves_like 'project finder'
+ end
+
+ context 'when PATH is used as an argument' do
+ let(:existing_id) { project.full_path }
+ let(:non_existing_id) { 'something/else' }
+
+ it_behaves_like 'project finder'
+
+ context 'with an invalid PATH' do
+ let(:non_existing_id) { 'undefined' } # path without slash
+
+ it_behaves_like 'project finder'
+
+ it 'does not hit the database' do
+ expect(Project).not_to receive(:find_by_full_path)
+ expect(subject).to receive(:render_api_error!).with('404 Project Not Found', 404)
+
+ subject.find_project!(non_existing_id)
+ end
+ end
+ end
+ end
+
describe '#find_namespace' do
let(:namespace) { create(:namespace) }
@@ -191,6 +246,49 @@ RSpec.describe API::Helpers do
it_behaves_like 'user namespace finder'
end
+ describe '#authorized_project_scope?' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:job) { create(:ci_build) }
+
+ let(:send_authorized_project_scope) { subject.authorized_project_scope?(project) }
+
+ where(:job_token_authentication, :route_setting, :feature_flag, :same_job_project, :expected_result) do
+ false | false | false | false | true
+ false | false | false | true | true
+ false | false | true | false | true
+ false | false | true | true | true
+ false | true | false | false | true
+ false | true | false | true | true
+ false | true | true | false | true
+ false | true | true | true | true
+ true | false | false | false | true
+ true | false | false | true | true
+ true | false | true | false | true
+ true | false | true | true | true
+ true | true | false | false | false
+ true | true | false | true | false
+ true | true | true | false | false
+ true | true | true | true | true
+ end
+
+ with_them do
+ before do
+ allow(subject).to receive(:job_token_authentication?).and_return(job_token_authentication)
+ allow(subject).to receive(:route_authentication_setting).and_return(job_token_scope: route_setting ? :project : nil)
+ allow(subject).to receive(:current_authenticated_job).and_return(job)
+ allow(job).to receive(:project).and_return(same_job_project ? project : other_project)
+
+ stub_feature_flags(ci_job_token_scope: false)
+ stub_feature_flags(ci_job_token_scope: project) if feature_flag
+ end
+
+ it 'returns the expected result' do
+ expect(send_authorized_project_scope).to eq(expected_result)
+ end
+ end
+ end
+
describe '#send_git_blob' do
let(:repository) { double }
let(:blob) { double(name: 'foobar') }
diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb
index 95b78ceb5d5..60ff15a88e0 100644
--- a/spec/lib/banzai/cross_project_reference_spec.rb
+++ b/spec/lib/banzai/cross_project_reference_spec.rb
@@ -4,10 +4,12 @@ require 'spec_helper'
RSpec.describe Banzai::CrossProjectReference do
let(:including_class) { Class.new.include(described_class).new }
+ let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {})}
before do
allow(including_class).to receive(:context).and_return({})
allow(including_class).to receive(:parent_from_ref).and_call_original
+ allow(including_class).to receive(:reference_cache).and_return(reference_cache)
end
describe '#parent_from_ref' do
@@ -47,5 +49,18 @@ RSpec.describe Banzai::CrossProjectReference do
expect(including_class.parent_from_ref('cross/reference')).to eq project2
end
end
+
+ context 'when reference cache is loaded' do
+ let(:project2) { double('referenced project') }
+
+ before do
+ allow(reference_cache).to receive(:cache_loaded?).and_return(true)
+ allow(reference_cache).to receive(:parent_per_reference).and_return({ 'cross/reference' => project2 })
+ end
+
+ it 'pulls from the reference cache' do
+ expect(including_class.parent_from_ref('cross/reference')).to eq project2
+ end
+ end
end
end
diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
index 5e76e8164dd..94e77663d0f 100644
--- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
+++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter do
end
expect do
- filter('<p>:tanuki: :party-parrot:</p>')
+ filter('<p>:tanuki:</p> <p>:party-parrot:</p>')
end.not_to exceed_all_query_limit(control_count.count)
end
end
diff --git a/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb b/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb
index 076c112ac87..3cb3ebc42a6 100644
--- a/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb
@@ -8,18 +8,6 @@ RSpec.describe Banzai::Filter::References::AbstractReferenceFilter do
let(:doc) { Nokogiri::HTML.fragment('') }
let(:filter) { described_class.new(doc, project: project) }
- describe '#references_per_parent' do
- let(:doc) { Nokogiri::HTML.fragment("#1 #{project.full_path}#2 #2") }
-
- it 'returns a Hash containing references grouped per parent paths' do
- expect(described_class).to receive(:object_class).exactly(6).times.and_return(Issue)
-
- refs = filter.references_per_parent
-
- expect(refs).to match(a_hash_including(project.full_path => contain_exactly(1, 2)))
- end
- end
-
describe '#data_attributes_for' do
let_it_be(:issue) { create(:issue, project: project) }
@@ -32,71 +20,17 @@ RSpec.describe Banzai::Filter::References::AbstractReferenceFilter do
end
end
- describe '#parent_per_reference' do
- it 'returns a Hash containing projects grouped per parent paths' do
- expect(filter).to receive(:references_per_parent)
- .and_return({ project.full_path => Set.new([1]) })
-
- expect(filter.parent_per_reference)
- .to eq({ project.full_path => project })
- end
- end
-
- describe '#find_for_paths' do
- context 'with RequestStore disabled' do
- it 'returns a list of Projects for a list of paths' do
- expect(filter.find_for_paths([project.full_path]))
- .to eq([project])
- end
-
- it "return an empty array for paths that don't exist" do
- expect(filter.find_for_paths(['nonexistent/project']))
- .to eq([])
+ context 'abstract methods' do
+ describe '#find_object' do
+ it 'raises NotImplementedError' do
+ expect { filter.find_object(nil, nil) }.to raise_error(NotImplementedError)
end
end
- context 'with RequestStore enabled', :request_store do
- it 'returns a list of Projects for a list of paths' do
- expect(filter.find_for_paths([project.full_path]))
- .to eq([project])
+ describe '#url_for_object' do
+ it 'raises NotImplementedError' do
+ expect { filter.url_for_object(nil, nil) }.to raise_error(NotImplementedError)
end
-
- context "when no project with that path exists" do
- it "returns no value" do
- expect(filter.find_for_paths(['nonexistent/project']))
- .to eq([])
- end
-
- it "adds the ref to the project refs cache" do
- project_refs_cache = {}
- allow(filter).to receive(:refs_cache).and_return(project_refs_cache)
-
- filter.find_for_paths(['nonexistent/project'])
-
- expect(project_refs_cache).to eq({ 'nonexistent/project' => nil })
- end
-
- context 'when the project refs cache includes nil values' do
- before do
- # adds { 'nonexistent/project' => nil } to cache
- filter.from_ref_cached('nonexistent/project')
- end
-
- it "return an empty array for paths that don't exist" do
- expect(filter.find_for_paths(['nonexistent/project']))
- .to eq([])
- end
- end
- end
- end
- end
-
- describe '#current_parent_path' do
- it 'returns the path of the current parent' do
- doc = Nokogiri::HTML.fragment('')
- filter = described_class.new(doc, project: project)
-
- expect(filter.current_parent_path).to eq(project.full_path)
end
end
end
diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
index 52514ad17fc..cb1f3d520a4 100644
--- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter do
let(:pattern) { described_class.object_class.link_reference_pattern }
let(:parsed) do
m = pattern.match(url)
- described_class.identifier(m) if m
+ described_class.new('', project: nil).identifier(m) if m
end
it 'can parse the reference' do
@@ -119,9 +119,11 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter do
describe 'static properties' do
specify do
expect(described_class).to have_attributes(
- object_sym: :design,
+ reference_type: :design,
object_class: ::DesignManagement::Design
)
+
+ expect(described_class.new('', project: nil).object_sym).to eq :design
end
end
diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index b849355f6db..88c2494b243 100644
--- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -470,42 +470,24 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
end
end
- describe '#records_per_parent' do
- context 'using an internal issue tracker' do
- it 'returns a Hash containing the issues per project' do
- doc = Nokogiri::HTML.fragment('')
- filter = described_class.new(doc, project: project)
-
- expect(filter).to receive(:parent_per_reference)
- .and_return({ project.full_path => project })
-
- expect(filter).to receive(:references_per_parent)
- .and_return({ project.full_path => Set.new([issue.iid]) })
-
- expect(filter.records_per_parent)
- .to eq({ project => { issue.iid => issue } })
- end
- end
- end
-
describe '.references_in' do
let(:merge_request) { create(:merge_request) }
it 'yields valid references' do
expect do |b|
- described_class.references_in(issue.to_reference, &b)
+ described_class.new('', project: nil).references_in(issue.to_reference, &b)
end.to yield_with_args(issue.to_reference, issue.iid, nil, nil, MatchData)
end
it "doesn't yield invalid references" do
expect do |b|
- described_class.references_in('#0', &b)
+ described_class.new('', project: nil).references_in('#0', &b)
end.not_to yield_control
end
it "doesn't yield unsupported references" do
expect do |b|
- described_class.references_in(merge_request.to_reference, &b)
+ described_class.new('', project: nil).references_in(merge_request.to_reference, &b)
end.not_to yield_control
end
end
diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index 7a634b0b513..ee2ce967a47 100644
--- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -142,6 +142,17 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
expect(doc.text).to eq("Merge (#{reference}.)")
end
+ it 'has correct data attributes' do
+ doc = reference_filter("Merge (#{reference}.)")
+
+ link = doc.css('a').first
+
+ expect(link.attr('data-project')).to eq project2.id.to_s
+ expect(link.attr('data-project-path')).to eq project2.full_path
+ expect(link.attr('data-iid')).to eq merge.iid.to_s
+ expect(link.attr('data-mr-title')).to eq merge.title
+ end
+
it 'ignores invalid merge IDs on the referenced project' do
exp = act = "Merge #{invalidate_reference(reference)}"
diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index 7a77d57cd42..63a5a9184c1 100644
--- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter do
document = Nokogiri::HTML.fragment("<p>#{get_reference(project)}</p>")
filter = described_class.new(document, project: project)
- expect(filter.projects_hash).to eq({ project.full_path => project })
+ expect(filter.send(:projects_hash)).to eq({ project.full_path => project })
end
end
@@ -94,7 +94,7 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter do
document = Nokogiri::HTML.fragment("<p>#{get_reference(project)}</p>")
filter = described_class.new(document, project: project)
- expect(filter.projects).to eq([project.full_path])
+ expect(filter.send(:projects)).to eq([project.full_path])
end
end
end
diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb
new file mode 100644
index 00000000000..9e2a6f35910
--- /dev/null
+++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::References::ReferenceCache do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:issue1) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
+ let_it_be(:issue3) { create(:issue, project: project2) }
+ let_it_be(:doc) { Nokogiri::HTML.fragment("#{issue1.to_reference} #{issue2.to_reference} #{issue3.to_reference(full: true)}") }
+
+ let(:filter_class) { Banzai::Filter::References::IssueReferenceFilter }
+ let(:filter) { filter_class.new(doc, project: project) }
+ let(:cache) { described_class.new(filter, { project: project }) }
+
+ describe '#load_references_per_parent' do
+ it 'loads references grouped per parent paths' do
+ cache.load_references_per_parent(filter.nodes)
+
+ expect(cache.references_per_parent).to eq({ project.full_path => [issue1.iid, issue2.iid].to_set,
+ project2.full_path => [issue3.iid].to_set })
+ end
+ end
+
+ describe '#load_parent_per_reference' do
+ it 'returns a Hash containing projects grouped per parent paths' do
+ cache.load_references_per_parent(filter.nodes)
+ cache.load_parent_per_reference
+
+ expect(cache.parent_per_reference).to match({ project.full_path => project, project2.full_path => project2 })
+ end
+ end
+
+ describe '#load_records_per_parent' do
+ it 'returns a Hash containing projects grouped per parent paths' do
+ cache.load_references_per_parent(filter.nodes)
+ cache.load_parent_per_reference
+ cache.load_records_per_parent
+
+ expect(cache.records_per_parent).to match({ project => { issue1.iid => issue1, issue2.iid => issue2 },
+ project2 => { issue3.iid => issue3 } })
+ end
+ end
+
+ describe '#initialize_reference_cache' do
+ it 'does not have an N+1 query problem with cross projects' do
+ doc_single = Nokogiri::HTML.fragment("#1")
+ filter_single = filter_class.new(doc_single, project: project)
+ cache_single = described_class.new(filter_single, { project: project })
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ cache_single.load_references_per_parent(filter_single.nodes)
+ cache_single.load_parent_per_reference
+ cache_single.load_records_per_parent
+ end.count
+
+ # Since this is an issue filter that is not batching issue queries
+ # across projects, we have to account for that.
+ # 1 for both projects, 1 for issues in each project == 3
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
+ max_count = control_count + 1
+
+ expect do
+ cache.load_references_per_parent(filter.nodes)
+ cache.load_parent_per_reference
+ cache.load_records_per_parent
+ end.not_to exceed_query_limit(max_count)
+ end
+ end
+
+ describe '#find_for_paths' do
+ context 'with RequestStore disabled' do
+ it 'returns a list of Projects for a list of paths' do
+ expect(cache.find_for_paths([project.full_path]))
+ .to eq([project])
+ end
+
+ it 'return an empty array for paths that do not exist' do
+ expect(cache.find_for_paths(['nonexistent/project']))
+ .to eq([])
+ end
+ end
+
+ context 'with RequestStore enabled', :request_store do
+ it 'returns a list of Projects for a list of paths' do
+ expect(cache.find_for_paths([project.full_path]))
+ .to eq([project])
+ end
+
+ context 'when no project with that path exists' do
+ it 'returns no value' do
+ expect(cache.find_for_paths(['nonexistent/project']))
+ .to eq([])
+ end
+
+ it 'adds the ref to the project refs cache' do
+ project_refs_cache = {}
+ allow(cache).to receive(:refs_cache).and_return(project_refs_cache)
+
+ cache.find_for_paths(['nonexistent/project'])
+
+ expect(project_refs_cache).to eq({ 'nonexistent/project' => nil })
+ end
+ end
+ end
+ end
+
+ describe '#current_parent_path' do
+ it 'returns the path of the current parent' do
+ expect(cache.current_parent_path).to eq project.full_path
+ end
+ end
+
+ describe '#current_project_namespace_path' do
+ it 'returns the path of the current project namespace' do
+ expect(cache.current_project_namespace_path).to eq project.namespace.full_path
+ end
+ end
+
+ describe '#full_project_path' do
+ it 'returns current parent path when no ref specified' do
+ expect(cache.full_project_path('something', nil)).to eq cache.current_parent_path
+ end
+
+ it 'returns combined namespace and project ref' do
+ expect(cache.full_project_path('something', 'cool')).to eq 'something/cool'
+ end
+
+ it 'returns uses default namespace and project ref when namespace nil' do
+ expect(cache.full_project_path(nil, 'cool')).to eq "#{project.namespace.full_path}/cool"
+ end
+ end
+
+ describe '#full_group_path' do
+ it 'returns current parent path when no group ref specified' do
+ expect(cache.full_group_path(nil)).to eq cache.current_parent_path
+ end
+
+ it 'returns group ref' do
+ expect(cache.full_group_path('cool_group')).to eq 'cool_group'
+ end
+ end
+end
diff --git a/spec/lib/banzai/filter/references/reference_filter_spec.rb b/spec/lib/banzai/filter/references/reference_filter_spec.rb
index 4bcb41ef2a9..b14b9374364 100644
--- a/spec/lib/banzai/filter/references/reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_filter_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter do
let(:nodes) { [node] }
it 'skips node' do
- expect { |b| filter.replace_text_when_pattern_matches(filter.nodes[0], 0, ref_pattern, &b) }.not_to yield_control
+ expect { |b| filter.send(:replace_text_when_pattern_matches, filter.nodes[0], 0, ref_pattern, &b) }.not_to yield_control
end
end
@@ -183,12 +183,12 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter do
end
end
- describe "#call_and_update_nodes" do
+ describe '#call_and_update_nodes' do
include_context 'new nodes'
let(:document) { Nokogiri::HTML.fragment('<a href="foo">foo</a>') }
let(:filter) { described_class.new(document, project: project) }
- it "updates all new nodes", :aggregate_failures do
+ it 'updates all new nodes', :aggregate_failures do
filter.instance_variable_set('@nodes', nodes)
expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
@@ -201,14 +201,14 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter do
end
end
- describe ".call" do
+ describe '.call' do
include_context 'new nodes'
let(:document) { Nokogiri::HTML.fragment('<a href="foo">foo</a>') }
let(:result) { { reference_filter_nodes: nodes } }
- it "updates all nodes", :aggregate_failures do
+ it 'updates all nodes', :aggregate_failures do
expect_next_instance_of(described_class) do |filter|
expect(filter).to receive(:call_and_update_nodes).and_call_original
expect(filter).to receive(:with_update_nodes).and_call_original
@@ -221,4 +221,21 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter do
expect(result[:reference_filter_nodes]).to eq(expected_nodes)
end
end
+
+ context 'abstract methods' do
+ let(:document) { Nokogiri::HTML.fragment('<a href="foo">foo</a>') }
+ let(:filter) { described_class.new(document, project: project) }
+
+ describe '#references_in' do
+ it 'raises NotImplementedError' do
+ expect { filter.references_in('foo', %r{(?<!\w)}) }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#object_link_filter' do
+ it 'raises NotImplementedError' do
+ expect { filter.send(:object_link_filter, 'foo', %r{(?<!\w)}) }.to raise_error(NotImplementedError)
+ end
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index 32a706925ba..7ab3b24b1c2 100644
--- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -219,4 +219,31 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter do
expect(reference_filter(act, project: nil, group: create(:group)).to_html).to eq exp
end
end
+
+ context 'checking N+1' do
+ let(:namespace2) { create(:namespace) }
+ let(:project2) { create(:project, :public, namespace: namespace2) }
+ let(:snippet2) { create(:project_snippet, project: project2) }
+ let(:reference2) { "#{project2.full_path}$#{snippet2.id}" }
+
+ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
+ markdown = "#{reference} $9999990"
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter(markdown)
+ end.count
+
+ markdown = "#{reference} $9999990 $9999991 $9999992 $9999993 #{reference2} something/cool$12"
+
+ # Since we're not batching snippet queries across projects,
+ # we have to account for that.
+ # 1 for both projects, 1 for snippets in each project == 3
+ # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
+ max_count = control_count + 1
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(max_count)
+ end
+ end
end
diff --git a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
index e4703606b47..70cbdb080a4 100644
--- a/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
@@ -189,7 +189,7 @@ RSpec.describe Banzai::Filter::References::UserReferenceFilter do
filter = described_class.new(document, project: project)
ns = user.namespace
- expect(filter.namespaces).to eq({ ns.path => ns })
+ expect(filter.send(:namespaces)).to eq({ ns.path => ns })
end
end
@@ -198,7 +198,28 @@ RSpec.describe Banzai::Filter::References::UserReferenceFilter do
document = Nokogiri::HTML.fragment("<p>#{get_reference(user)}</p>")
filter = described_class.new(document, project: project)
- expect(filter.usernames).to eq([user.username])
+ expect(filter.send(:usernames)).to eq([user.username])
+ end
+ end
+
+ context 'checking N+1' do
+ let(:user2) { create(:user) }
+ let(:group) { create(:group) }
+ let(:reference2) { user2.to_reference }
+ let(:reference3) { group.to_reference }
+
+ it 'does not have N+1 per multiple user references', :use_sql_query_cache do
+ markdown = "#{reference}"
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter(markdown)
+ end.count
+
+ markdown = "#{reference} @qwertyuiopzx @wertyuio @ertyu @rtyui #{reference2} #{reference3}"
+
+ expect do
+ reference_filter(markdown)
+ end.not_to exceed_all_query_limit(control_count)
end
end
end
diff --git a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
index d9f45769550..ebe1ca4d403 100644
--- a/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/post_process_pipeline_spec.rb
@@ -3,24 +3,56 @@
require 'spec_helper'
RSpec.describe Banzai::Pipeline::PostProcessPipeline do
- context 'when a document only has upload links' do
- it 'does not make any Gitaly calls', :request_store do
- markdown = <<-MARKDOWN.strip_heredoc
- [Relative Upload Link](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
+ subject { described_class.call(doc, context) }
+
+ let_it_be(:project) { create(:project, :public, :repository) }
- ![Relative Upload Image](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
- MARKDOWN
+ let(:context) { { project: project, ref: 'master' } }
- context = {
- project: create(:project, :public, :repository),
- ref: 'master'
- }
+ context 'when a document only has upload links' do
+ let(:doc) do
+ <<-HTML.strip_heredoc
+ <a href="/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg">Relative Upload Link</a>
+ <img src="/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg">
+ HTML
+ end
+ it 'does not make any Gitaly calls', :request_store do
Gitlab::GitalyClient.reset_counts
- described_class.call(markdown, context)
+ subject
expect(Gitlab::GitalyClient.get_request_count).to eq(0)
end
end
+
+ context 'when both upload and repository links are present' do
+ let(:html) do
+ <<-HTML.strip_heredoc
+ <a href="/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg">Relative Upload Link</a>
+ <img src="/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg">
+ <a href="/test.jpg">Just a link</a>
+ HTML
+ end
+
+ let(:doc) { HTML::Pipeline.parse(html) }
+
+ it 'searches for attributes only once' do
+ expect(doc).to receive(:search).once.and_call_original
+
+ subject
+ end
+
+ context 'when "optimize_linkable_attributes" is disabled' do
+ before do
+ stub_feature_flags(optimize_linkable_attributes: false)
+ end
+
+ it 'searches for attributes twice' do
+ expect(doc).to receive(:search).twice.and_call_original
+
+ subject
+ end
+ end
+ end
end
diff --git a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
index 32a9f09c3f6..1820141c898 100644
--- a/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/merge_request_parser_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:merge_request) { create(:merge_request, source_project: project) }
- subject { described_class.new(Banzai::RenderContext.new(merge_request.target_project, user)) }
+ subject(:parser) { described_class.new(Banzai::RenderContext.new(merge_request.target_project, user)) }
let(:link) { empty_html_link }
@@ -65,4 +65,49 @@ RSpec.describe Banzai::ReferenceParser::MergeRequestParser do
it_behaves_like 'no N+1 queries'
end
+
+ describe '#can_read_reference?' do
+ subject { parser.can_read_reference?(user, merge_request) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when merge request belongs to the private project' do
+ let(:project) { create(:project, :private) }
+
+ it 'prevents user from reading merge request references' do
+ is_expected.to be_falsey
+ end
+
+ context 'when user has access to the project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+
+ context 'with memoization' do
+ context 'when project is the same' do
+ it 'calls #can? only once' do
+ expect(parser).to receive(:can?).once
+
+ 2.times { parser.can_read_reference?(user, merge_request) }
+ end
+ end
+
+ context 'when merge requests belong to different projects' do
+ it 'calls #can? for each project' do
+ expect(parser).to receive(:can?).twice
+
+ another_merge_request = create(:merge_request)
+
+ 2.times do
+ parser.can_read_reference?(user, merge_request)
+ parser.can_read_reference?(user, another_merge_request)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 2d841b7fac2..213fa23675e 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -8,66 +8,23 @@ RSpec.describe BulkImports::Clients::Http do
let(:uri) { 'http://gitlab.example' }
let(:token) { 'token' }
let(:resource) { 'resource' }
+ let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
subject { described_class.new(uri: uri, token: token) }
- describe '#get' do
- let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
-
- shared_examples 'performs network request' do
- it 'performs network request' do
- expect(Gitlab::HTTP).to receive(:get).with(*expected_args).and_return(response_double)
-
- subject.get(resource)
- end
- end
-
- describe 'request query' do
- include_examples 'performs network request' do
- let(:expected_args) do
- [
- anything,
- hash_including(
- query: {
- page: described_class::DEFAULT_PAGE,
- per_page: described_class::DEFAULT_PER_PAGE
- }
- )
- ]
- end
- end
- end
-
- describe 'request headers' do
- include_examples 'performs network request' do
- let(:expected_args) do
- [
- anything,
- hash_including(
- headers: {
- 'Content-Type' => 'application/json',
- 'Authorization' => "Bearer #{token}"
- }
- )
- ]
- end
- end
- end
+ shared_examples 'performs network request' do
+ it 'performs network request' do
+ expect(Gitlab::HTTP).to receive(method).with(*expected_args).and_return(response_double)
- describe 'request uri' do
- include_examples 'performs network request' do
- let(:expected_args) do
- ['http://gitlab.example:80/api/v4/resource', anything]
- end
- end
+ subject.public_send(method, resource)
end
context 'error handling' do
context 'when error occurred' do
it 'raises ConnectionError' do
- allow(Gitlab::HTTP).to receive(:get).and_raise(Errno::ECONNREFUSED)
+ allow(Gitlab::HTTP).to receive(method).and_raise(Errno::ECONNREFUSED)
- expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
+ expect { subject.public_send(method, resource) }.to raise_exception(described_class::ConnectionError)
end
end
@@ -75,12 +32,34 @@ RSpec.describe BulkImports::Clients::Http do
it 'raises ConnectionError' do
response_double = double(code: 503, success?: false)
- allow(Gitlab::HTTP).to receive(:get).and_return(response_double)
+ allow(Gitlab::HTTP).to receive(method).and_return(response_double)
- expect { subject.get(resource) }.to raise_exception(described_class::ConnectionError)
+ expect { subject.public_send(method, resource) }.to raise_exception(described_class::ConnectionError)
end
end
end
+ end
+
+ describe '#get' do
+ let(:method) { :get }
+
+ include_examples 'performs network request' do
+ let(:expected_args) do
+ [
+ 'http://gitlab.example:80/api/v4/resource',
+ hash_including(
+ query: {
+ page: described_class::DEFAULT_PAGE,
+ per_page: described_class::DEFAULT_PER_PAGE
+ },
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Authorization' => "Bearer #{token}"
+ }
+ )
+ ]
+ end
+ end
describe '#each_page' do
let(:objects1) { [{ object: 1 }, { object: 2 }] }
@@ -129,4 +108,23 @@ RSpec.describe BulkImports::Clients::Http do
end
end
end
+
+ describe '#post' do
+ let(:method) { :post }
+
+ include_examples 'performs network request' do
+ let(:expected_args) do
+ [
+ 'http://gitlab.example:80/api/v4/resource',
+ hash_including(
+ body: {},
+ headers: {
+ 'Content-Type' => 'application/json',
+ 'Authorization' => "Bearer #{token}"
+ }
+ )
+ ]
+ end
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/stage_spec.rb b/spec/lib/bulk_imports/stage_spec.rb
new file mode 100644
index 00000000000..713cd3f22ab
--- /dev/null
+++ b/spec/lib/bulk_imports/stage_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe BulkImports::Stage do
+ let(:pipelines) do
+ [
+ [0, BulkImports::Groups::Pipelines::GroupPipeline],
+ [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
+ [1, BulkImports::Groups::Pipelines::MembersPipeline],
+ [1, BulkImports::Groups::Pipelines::LabelsPipeline],
+ [1, BulkImports::Groups::Pipelines::MilestonesPipeline],
+ [1, BulkImports::Groups::Pipelines::BadgesPipeline]
+ ]
+ end
+
+ describe '.pipelines' do
+ it 'list all the pipelines with their stage number, ordered by stage' do
+ expect(described_class.pipelines & pipelines).to eq(pipelines)
+ expect(described_class.pipelines.last.last).to eq(BulkImports::Groups::Pipelines::EntityFinisher)
+ end
+ end
+
+ describe '.pipeline_exists?' do
+ it 'returns true when the given pipeline name exists in the pipelines list' do
+ expect(described_class.pipeline_exists?(BulkImports::Groups::Pipelines::GroupPipeline)).to eq(true)
+ expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::GroupPipeline')).to eq(true)
+ end
+
+ it 'returns false when the given pipeline name exists in the pipelines list' do
+ expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
+ end
+ end
+end
diff --git a/spec/lib/declarative_policy/overrides_spec.rb b/spec/lib/declarative_policy/overrides_spec.rb
deleted file mode 100644
index 84dc8f7ac71..00000000000
--- a/spec/lib/declarative_policy/overrides_spec.rb
+++ /dev/null
@@ -1,82 +0,0 @@
-# frozen_string_literal: true
-
-require 'fast_spec_helper'
-require_dependency 'rspec-parameterized'
-
-RSpec.describe 'DeclarativePolicy overrides' do
- let(:foo_policy) do
- Class.new(DeclarativePolicy::Base) do
- condition(:foo_prop_cond) { @subject.foo_prop }
-
- rule { foo_prop_cond }.policy do
- enable :common_ability
- enable :foo_prop_ability
- end
- end
- end
-
- let(:bar_policy) do
- Class.new(DeclarativePolicy::Base) do
- delegate { @subject.foo }
-
- overrides :common_ability
-
- condition(:bar_prop_cond) { @subject.bar_prop }
-
- rule { bar_prop_cond }.policy do
- enable :common_ability
- enable :bar_prop_ability
- end
-
- rule { bar_prop_cond & can?(:foo_prop_ability) }.policy do
- enable :combined_ability
- end
- end
- end
-
- before do
- stub_const('Foo', Struct.new(:foo_prop))
- stub_const('FooPolicy', foo_policy)
- stub_const('Bar', Struct.new(:foo, :bar_prop))
- stub_const('BarPolicy', bar_policy)
- end
-
- where(:foo_prop, :bar_prop) do
- [
- [true, true],
- [true, false],
- [false, true],
- [false, false]
- ]
- end
-
- with_them do
- let(:foo) { Foo.new(foo_prop) }
- let(:bar) { Bar.new(foo, bar_prop) }
-
- it 'determines the correct bar_prop_ability (non-delegated) permissions for bar' do
- policy = DeclarativePolicy.policy_for(nil, bar)
- expect(policy.allowed?(:bar_prop_ability)).to eq(bar_prop)
- end
-
- it 'determines the correct foo_prop (non-overridden) permissions for bar' do
- policy = DeclarativePolicy.policy_for(nil, bar)
- expect(policy.allowed?(:foo_prop_ability)).to eq(foo_prop)
- end
-
- it 'determines the correct common_ability (overridden) permissions for bar' do
- policy = DeclarativePolicy.policy_for(nil, bar)
- expect(policy.allowed?(:common_ability)).to eq(bar_prop)
- end
-
- it 'determines the correct common_ability permissions for foo' do
- policy = DeclarativePolicy.policy_for(nil, foo)
- expect(policy.allowed?(:common_ability)).to eq(foo_prop)
- end
-
- it 'allows combinations of overridden and inherited values' do
- policy = DeclarativePolicy.policy_for(nil, bar)
- expect(policy.allowed?(:combined_ability)).to eq(foo_prop && bar_prop)
- end
- end
-end
diff --git a/spec/lib/declarative_policy_spec.rb b/spec/lib/declarative_policy_spec.rb
deleted file mode 100644
index fc21bd43f48..00000000000
--- a/spec/lib/declarative_policy_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe DeclarativePolicy do
- describe '.class_for' do
- it 'uses declarative_policy_class if present' do
- instance = Gitlab::ErrorTracking::ErrorEvent.new
-
- expect(described_class.class_for(instance)).to eq(ErrorTracking::BasePolicy)
- end
-
- it 'infers policy class from name' do
- instance = PersonalSnippet.new
-
- expect(described_class.class_for(instance)).to eq(PersonalSnippetPolicy)
- end
-
- it 'raises error if not found' do
- instance = Object.new
-
- expect { described_class.class_for(instance) }.to raise_error('no policy for Object')
- end
-
- context 'when found policy class does not inherit base' do
- before do
- stub_const('Foo', Class.new)
- stub_const('FooPolicy', Class.new)
- end
-
- it 'raises error if inferred class does not inherit Base' do
- instance = Foo.new
-
- expect { described_class.class_for(instance) }.to raise_error('no policy for Foo')
- end
- end
- end
-end
diff --git a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
new file mode 100644
index 00000000000..25c4001a192
--- /dev/null
+++ b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'generator_helper'
+
+RSpec.describe Gitlab::SnowplowEventDefinitionGenerator do
+ let(:ce_temp_dir) { Dir.mktmpdir }
+ let(:ee_temp_dir) { Dir.mktmpdir }
+ let(:generator_options) { { 'category' => 'Groups::EmailCampaignsController', 'action' => 'click' } }
+
+ before do
+ stub_const("#{described_class}::CE_DIR", ce_temp_dir)
+ stub_const("#{described_class}::EE_DIR", ee_temp_dir)
+ end
+
+ after do
+ FileUtils.rm_rf([ce_temp_dir, ee_temp_dir])
+ end
+
+ describe 'Creating event definition file' do
+ before do
+ stub_const('Gitlab::VERSION', '13.11.0-pre')
+ end
+
+ let(:sample_event_dir) { 'lib/generators/gitlab/snowplow_event_definition_generator' }
+
+ it 'creates CE event definition file using the template' do
+ sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event.yml'))).load_raw!
+
+ described_class.new([], generator_options).invoke_all
+
+ event_definition_path = File.join(ce_temp_dir, 'groups__email_campaigns_controller_click.yml')
+ expect(::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!).to eq(sample_event)
+ end
+
+ context 'event definition already exists' do
+ before do
+ stub_const('Gitlab::VERSION', '12.11.0-pre')
+ described_class.new([], generator_options).invoke_all
+ end
+
+ it 'overwrites event definition --force flag set to true' do
+ sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event.yml'))).load_raw!
+
+ stub_const('Gitlab::VERSION', '13.11.0-pre')
+ described_class.new([], generator_options.merge('force' => true)).invoke_all
+
+ event_definition_path = File.join(ce_temp_dir, 'groups__email_campaigns_controller_click.yml')
+ event_data = ::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!
+
+ expect(event_data).to eq(sample_event)
+ end
+
+ it 'raises error when --force flag set to false' do
+ expect { described_class.new([], generator_options.merge('force' => false)).invoke_all }
+ .to raise_error(StandardError, /Event definition already exists at/)
+ end
+ end
+
+ it 'creates EE event definition file using the template' do
+ sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event_ee.yml'))).load_raw!
+
+ described_class.new([], generator_options.merge('ee' => true)).invoke_all
+
+ event_definition_path = File.join(ee_temp_dir, 'groups__email_campaigns_controller_click.yml')
+ expect(::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!).to eq(sample_event)
+ end
+ end
+end
diff --git a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
index 021fb8f5f58..95a577e6334 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
@@ -18,6 +18,10 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator do
stub_prometheus_queries
end
+ after do
+ FileUtils.rm_rf(temp_dir)
+ end
+
it 'creates metric definition files' do
described_class.new(args).invoke_all
@@ -27,4 +31,27 @@ RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator do
expect(YAML.safe_load(File.read(weekly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly")
expect(YAML.safe_load(File.read(monthly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly")
end
+
+ context 'with ee option' do
+ let(:weekly_metric_definition_path) { Dir.glob(File.join(temp_dir, 'ee/config/metrics/counts_7d/*i_test_event_weekly.yml')).first }
+ let(:monthly_metric_definition_path) { Dir.glob(File.join(temp_dir, 'ee/config/metrics/counts_28d/*i_test_event_monthly.yml')).first }
+
+ let(:weekly_metric_definition) { YAML.safe_load(File.read(weekly_metric_definition_path)) }
+ let(:monthly_metric_definition) { YAML.safe_load(File.read(monthly_metric_definition_path)) }
+
+ before do
+ stub_const("#{Gitlab::UsageMetricDefinitionGenerator}::TOP_LEVEL_DIR", 'config')
+ stub_const("#{Gitlab::UsageMetricDefinitionGenerator}::TOP_LEVEL_DIR_EE", File.join(temp_dir, 'ee'))
+ end
+
+ it 'creates metric definition files' do
+ described_class.new(args, { 'ee': true }).invoke_all
+
+ expect(weekly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly")
+ expect(weekly_metric_definition["distribution"]).to include('ee')
+
+ expect(monthly_metric_definition).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly")
+ expect(monthly_metric_definition["distribution"]).to include('ee')
+ end
+ end
end
diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
index f8c055ae111..74aaf34e82c 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
@@ -20,20 +20,37 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator do
end
describe 'Creating metric definition file' do
+ let(:sample_metric) { load_sample_metric_definition(filename: sample_filename) }
+
# Stub version so that `milestone` key remains constant between releases to prevent flakiness.
before do
stub_const('Gitlab::VERSION', '13.9.0')
allow(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('test metric name')
end
- let(:sample_metric) { load_sample_metric_definition(filename: 'sample_metric_with_name_suggestions.yml') }
+ context 'without ee option' do
+ let(:sample_filename) { 'sample_metric_with_name_suggestions.yml' }
+ let(:metric_definition_path) { Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first }
- it 'creates a metric definition file using the template' do
- described_class.new([key_path], { 'dir' => dir }).invoke_all
+ it 'creates a metric definition file using the template' do
+ described_class.new([key_path], { 'dir' => dir }).invoke_all
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric)
+ end
+ end
- metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
+ context 'with ee option' do
+ let(:sample_filename) { 'sample_metric_with_ee.yml' }
+ let(:metric_definition_path) { Dir.glob(File.join(temp_dir, 'ee/config/metrics/counts_7d/*_test_metric.yml')).first }
- expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric)
+ before do
+ stub_const("#{described_class}::TOP_LEVEL_DIR", 'config')
+ stub_const("#{described_class}::TOP_LEVEL_DIR_EE", File.join(temp_dir, 'ee'))
+ end
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([key_path], { 'dir' => dir, 'ee': true }).invoke_all
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric)
+ end
end
end
diff --git a/spec/lib/gitlab/alert_management/payload/base_spec.rb b/spec/lib/gitlab/alert_management/payload/base_spec.rb
index e093b3587c2..d3c1a96253c 100644
--- a/spec/lib/gitlab/alert_management/payload/base_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/base_spec.rb
@@ -89,6 +89,12 @@ RSpec.describe Gitlab::AlertManagement::Payload::Base do
it { is_expected.to be_nil }
end
+
+ context 'with time in seconds' do
+ let(:raw_payload) { { 'test' => 1618877936 } }
+
+ it { is_expected.to be_nil }
+ end
end
context 'with an integer type provided' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
index 0a333965f68..24f8fb40445 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
@@ -62,4 +62,29 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
expect(records.size).to eq(2)
end
+
+ describe 'in progress filter' do
+ let_it_be(:mr3) { create(:merge_request, :opened, target_project: project, source_project: project, allow_broken: true, created_at: 3.months.ago) }
+ let_it_be(:mr4) { create(:merge_request, :closed, target_project: project, source_project: project, allow_broken: true, created_at: 1.month.ago) }
+
+ before do
+ params[:from] = 5.months.ago
+ end
+
+ context 'when the filter is present' do
+ before do
+ params[:end_event_filter] = :in_progress
+ end
+
+ it 'returns only open items' do
+ expect(records).to eq([mr3])
+ end
+ end
+
+ context 'when the filter is absent' do
+ it 'returns finished items' do
+ expect(records).to match_array([mr1, mr2])
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
index 8f5be709a11..daf85ea379a 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/sorting_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::Sorting do
let(:stage) { build(:cycle_analytics_project_stage, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
- subject(:order_values) { described_class.apply(MergeRequest.joins(:metrics), stage, sort, direction).order_values }
+ subject(:order_values) { described_class.new(query: MergeRequest.joins(:metrics), stage: stage).apply(sort, direction).order_values }
context 'when invalid sorting params are given' do
let(:sort) { :unknown_sort }
diff --git a/spec/lib/gitlab/api_authentication/token_locator_spec.rb b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
index e933fd8352e..4b19a3d5846 100644
--- a/spec/lib/gitlab/api_authentication/token_locator_spec.rb
+++ b/spec/lib/gitlab/api_authentication/token_locator_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
let(:request) { double(authorization: nil) }
it 'returns nil' do
- expect(subject).to be(nil)
+ expect(subject).to be_nil
end
end
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
let(:request) { double(headers: {}) }
it 'returns nil' do
- expect(subject).to be(nil)
+ expect(subject).to be_nil
end
end
@@ -72,5 +72,110 @@ RSpec.describe Gitlab::APIAuthentication::TokenLocator do
end
end
end
+
+ context 'with :http_bearer_token' do
+ let(:type) { :http_bearer_token }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { "Authorization" => "Bearer #{password}" }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :http_deploy_token_header' do
+ let(:type) { :http_deploy_token_header }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Deploy-Token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :http_job_token_header' do
+ let(:type) { :http_job_token_header }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Job-Token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :http_private_token_header' do
+ let(:type) { :http_private_token_header }
+
+ context 'without credentials' do
+ let(:request) { double(headers: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be(nil)
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(headers: { 'Private-Token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
+
+ context 'with :token_param' do
+ let(:type) { :token_param }
+
+ context 'without credentials' do
+ let(:request) { double(query_parameters: {}) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with credentials' do
+ let(:password) { 'bar' }
+ let(:request) { double(query_parameters: { 'token' => password }) }
+
+ it 'returns the credentials' do
+ expect(subject.password).to eq(password)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
index 97a7c8ba7cf..bbc6bf0d481 100644
--- a/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
+++ b/spec/lib/gitlab/api_authentication/token_resolver_spec.rb
@@ -160,9 +160,58 @@ RSpec.describe Gitlab::APIAuthentication::TokenResolver do
it_behaves_like 'an authorized request'
end
end
+
+ context 'with :personal_access_token_from_jwt' do
+ let(:type) { :personal_access_token_from_jwt }
+ let(:token) { personal_access_token }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password_from_jwt(token.id) }
+
+ it_behaves_like 'an authorized request'
+ end
+ end
+
+ context 'with :deploy_token_from_jwt' do
+ let(:type) { :deploy_token_from_jwt }
+ let(:token) { deploy_token }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password_from_jwt(token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+ end
+
+ context 'with :job_token_from_jwt' do
+ let(:type) { :job_token_from_jwt }
+ let(:token) { ci_job }
+
+ context 'with valid credentials' do
+ let(:raw) { username_and_password_from_jwt(token.token) }
+
+ it_behaves_like 'an authorized request'
+ end
+
+ context 'when the job is not running' do
+ let(:raw) { username_and_password_from_jwt(ci_job_done.token) }
+
+ it_behaves_like 'an unauthorized request'
+ end
+
+ context 'with an invalid job token' do
+ let(:raw) { username_and_password_from_jwt('not a valid CI job token') }
+
+ it_behaves_like 'an unauthorized request'
+ end
+ end
end
def username_and_password(username, password)
::Gitlab::APIAuthentication::TokenLocator::UsernameAndPassword.new(username, password)
end
+
+ def username_and_password_from_jwt(token)
+ username_and_password(nil, ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = token }.encoded)
+ end
end
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 7a578ad3c90..7f06e66ad50 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -779,7 +779,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end.not_to change(user, :failed_attempts)
end
- context 'when the database is read only' do
+ context 'when the database is read-only' do
before do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
new file mode 100644
index 00000000000..35928deff82
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210506065000 do
+ let(:namespaces_table) { table(:namespaces) }
+
+ let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
+ let!(:root_group) { namespaces_table.create!(id: 2, name: 'group', path: 'group', type: 'Group', parent_id: nil) }
+ let!(:sub_group) { namespaces_table.create!(id: 3, name: 'subgroup', path: 'subgroup', type: 'Group', parent_id: 2) }
+
+ describe '#perform' do
+ it 'backfills traversal_ids for child namespaces' do
+ described_class.new.perform(1, 3, 5)
+
+ expect(user_namespace.reload.traversal_ids).to eq([])
+ expect(root_group.reload.traversal_ids).to eq([])
+ expect(sub_group.reload.traversal_ids).to eq([root_group.id, sub_group.id])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
new file mode 100644
index 00000000000..96e43275972
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210506065000 do
+ let(:namespaces_table) { table(:namespaces) }
+
+ let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
+ let!(:root_group) { namespaces_table.create!(id: 2, name: 'group', path: 'group', type: 'Group', parent_id: nil) }
+ let!(:sub_group) { namespaces_table.create!(id: 3, name: 'subgroup', path: 'subgroup', type: 'Group', parent_id: 2) }
+
+ describe '#perform' do
+ it 'backfills traversal_ids for root namespaces' do
+ described_class.new.perform(1, 3, 5)
+
+ expect(user_namespace.reload.traversal_ids).to eq([user_namespace.id])
+ expect(root_group.reload.traversal_ids).to eq([root_group.id])
+ expect(sub_group.reload.traversal_ids).to eq([])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
index c4c0247ad3e..3e378db04d4 100644
--- a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -6,6 +6,11 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
let(:table_name) { :copy_primary_key_test }
let(:test_table) { table(table_name) }
let(:sub_batch_size) { 1000 }
+ let(:pause_ms) { 0 }
+
+ let(:helpers) do
+ ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
+ end
before do
ActiveRecord::Base.connection.execute(<<~SQL)
@@ -14,8 +19,8 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
id integer NOT NULL,
name character varying,
fk integer NOT NULL,
- id_convert_to_bigint bigint DEFAULT 0 NOT NULL,
- fk_convert_to_bigint bigint DEFAULT 0 NOT NULL,
+ #{helpers.convert_to_bigint_column(:id)} bigint DEFAULT 0 NOT NULL,
+ #{helpers.convert_to_bigint_column(:fk)} bigint DEFAULT 0 NOT NULL,
name_convert_to_text text DEFAULT 'no name'
);
SQL
@@ -34,43 +39,85 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
SQL
end
- subject { described_class.new }
+ subject(:copy_columns) { described_class.new }
describe '#perform' do
let(:migration_class) { described_class.name }
it 'copies all primary keys in range' do
- subject.perform(12, 15, table_name, 'id', sub_batch_size, 'id', 'id_convert_to_bigint')
+ temporary_column = helpers.convert_to_bigint_column(:id)
+ copy_columns.perform(12, 15, table_name, 'id', sub_batch_size, pause_ms, 'id', temporary_column)
- expect(test_table.where('id = id_convert_to_bigint').pluck(:id)).to contain_exactly(12, 15)
- expect(test_table.where(id_convert_to_bigint: 0).pluck(:id)).to contain_exactly(11, 19)
+ expect(test_table.where("id = #{temporary_column}").pluck(:id)).to contain_exactly(12, 15)
+ expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(11, 19)
expect(test_table.all.count).to eq(4)
end
it 'copies all foreign keys in range' do
- subject.perform(10, 14, table_name, 'id', sub_batch_size, 'fk', 'fk_convert_to_bigint')
+ temporary_column = helpers.convert_to_bigint_column(:fk)
+ copy_columns.perform(10, 14, table_name, 'id', sub_batch_size, pause_ms, 'fk', temporary_column)
- expect(test_table.where('fk = fk_convert_to_bigint').pluck(:id)).to contain_exactly(11, 12)
- expect(test_table.where(fk_convert_to_bigint: 0).pluck(:id)).to contain_exactly(15, 19)
+ expect(test_table.where("fk = #{temporary_column}").pluck(:id)).to contain_exactly(11, 12)
+ expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(15, 19)
expect(test_table.all.count).to eq(4)
end
it 'copies columns with NULLs' do
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(4)
- subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
+ copy_columns.perform(10, 20, table_name, 'id', sub_batch_size, pause_ms, 'name', 'name_convert_to_text')
expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(11, 12, 19)
expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
end
+ it 'copies multiple columns when given' do
+ columns_to_copy_from = %w[id fk]
+ id_tmp_column = helpers.convert_to_bigint_column('id')
+ fk_tmp_column = helpers.convert_to_bigint_column('fk')
+ columns_to_copy_to = [id_tmp_column, fk_tmp_column]
+
+ subject.perform(10, 15, table_name, 'id', sub_batch_size, pause_ms, columns_to_copy_from, columns_to_copy_to)
+
+ expect(test_table.where("id = #{id_tmp_column} AND fk = #{fk_tmp_column}").pluck(:id)).to contain_exactly(11, 12, 15)
+ expect(test_table.where(id_tmp_column => 0).where(fk_tmp_column => 0).pluck(:id)).to contain_exactly(19)
+ expect(test_table.all.count).to eq(4)
+ end
+
+ it 'raises error when number of source and target columns does not match' do
+ columns_to_copy_from = %w[id fk]
+ columns_to_copy_to = [helpers.convert_to_bigint_column(:id)]
+
+ expect do
+ subject.perform(10, 15, table_name, 'id', sub_batch_size, pause_ms, columns_to_copy_from, columns_to_copy_to)
+ end.to raise_error(ArgumentError, 'number of source and destination columns must match')
+ end
+
it 'tracks timings of queries' do
- expect(subject.batch_metrics.timings).to be_empty
+ expect(copy_columns.batch_metrics.timings).to be_empty
+
+ copy_columns.perform(10, 20, table_name, 'id', sub_batch_size, pause_ms, 'name', 'name_convert_to_text')
+
+ expect(copy_columns.batch_metrics.timings[:update_all]).not_to be_empty
+ end
+
+ context 'pause interval between sub-batches' do
+ it 'sleeps for the specified time between sub-batches' do
+ sub_batch_size = 2
+
+ expect(copy_columns).to receive(:sleep).with(0.005)
+
+ copy_columns.perform(10, 12, table_name, 'id', sub_batch_size, 5, 'name', 'name_convert_to_text')
+ end
+
+ it 'treats negative values as 0' do
+ sub_batch_size = 2
- subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
+ expect(copy_columns).to receive(:sleep).with(0)
- expect(subject.batch_metrics.timings[:update_all]).not_to be_empty
+ copy_columns.perform(10, 12, table_name, 'id', sub_batch_size, -5, 'name', 'name_convert_to_text')
+ end
end
end
end
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..c4beb719e1e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20201110110454 do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) { create_user! }
+ let_it_be(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let_it_be(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+
+ let_it_be(:vulnerabilities) { table(:vulnerabilities) }
+ let_it_be(:vulnerability_with_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:primary_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let_it_be(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let_it_be(:finding) do
+ create_finding!(
+ vulnerability_id: vulnerability_with_finding.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: primary_identifier.id
+ )
+ end
+
+ let(:succeeded_status) { 1 }
+ let(:pending_status) { 0 }
+
+ it 'drops Vulnerabilities without any Findings' do
+ expect(vulnerabilities.pluck(:id)).to eq([vulnerability_with_finding.id, vulnerability_without_finding.id])
+
+ expect { subject.perform(vulnerability_with_finding.id, vulnerability_without_finding.id) }.to change(vulnerabilities, :count).by(-1)
+
+ expect(vulnerabilities.pluck(:id)).to eq([vulnerability_with_finding.id])
+ end
+
+ it 'marks jobs as done' do
+ background_migration_jobs.create!(
+ class_name: 'DropInvalidVulnerabilities',
+ arguments: [vulnerability_with_finding.id, vulnerability_with_finding.id]
+ )
+
+ background_migration_jobs.create!(
+ class_name: 'DropInvalidVulnerabilities',
+ arguments: [vulnerability_without_finding.id, vulnerability_without_finding.id]
+ )
+
+ subject.perform(vulnerability_with_finding.id, vulnerability_with_finding.id)
+
+ expect(background_migration_jobs.first.status).to eq(succeeded_status)
+ expect(background_migration_jobs.second.status).to eq(pending_status)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: Time.current
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
new file mode 100644
index 00000000000..5e2f32c54be
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics, schema: 20210511095658 do
+ it 'correctly migrates project taggings context from tags to topics' do
+ taggings = table(:taggings)
+
+ project_old_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'tags')
+ project_new_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'topics')
+ project_other_context_tagging_1 = taggings.create!(taggable_type: 'Project', context: 'other')
+ project_old_tagging_2 = taggings.create!(taggable_type: 'Project', context: 'tags')
+ project_old_tagging_3 = taggings.create!(taggable_type: 'Project', context: 'tags')
+
+ subject.perform(project_old_tagging_1.id, project_old_tagging_2.id)
+
+ project_old_tagging_1.reload
+ project_new_tagging_1.reload
+ project_other_context_tagging_1.reload
+ project_old_tagging_2.reload
+ project_old_tagging_3.reload
+
+ expect(project_old_tagging_1.context).to eq('topics')
+ expect(project_new_tagging_1.context).to eq('topics')
+ expect(project_other_context_tagging_1.context).to eq('other')
+ expect(project_old_tagging_2.context).to eq('topics')
+ expect(project_old_tagging_3.context).to eq('tags')
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
index 1c62d703a34..b34a57f51f1 100644
--- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
+++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
@@ -31,6 +31,15 @@ RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjec
end
it 'copies values to project_features' do
+ table(:background_migration_jobs).create!(
+ class_name: 'MoveContainerRegistryEnabledToProjectFeature',
+ arguments: [project1.id, project4.id]
+ )
+ table(:background_migration_jobs).create!(
+ class_name: 'MoveContainerRegistryEnabledToProjectFeature',
+ arguments: [-1, -3]
+ )
+
expect(project1.container_registry_enabled).to eq(true)
expect(project2.container_registry_enabled).to eq(false)
expect(project3.container_registry_enabled).to eq(nil)
@@ -57,6 +66,9 @@ RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjec
expect(project_feature1.reload.container_registry_access_level).to eq(enabled)
expect(project_feature2.reload.container_registry_access_level).to eq(disabled)
expect(project_feature3.reload.container_registry_access_level).to eq(disabled)
+
+ expect(table(:background_migration_jobs).first.status).to eq(1) # succeeded
+ expect(table(:background_migration_jobs).second.status).to eq(0) # pending
end
context 'when no projects exist in range' do
diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
deleted file mode 100644
index 1c55b50ea3f..00000000000
--- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_spec.rb
+++ /dev/null
@@ -1,241 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizations, schema: 20200204113223 do
- let(:users_table) { table(:users) }
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:project_authorizations_table) { table(:project_authorizations) }
- let(:members_table) { table(:members) }
- let(:group_group_links) { table(:group_group_links) }
- let(:project_group_links) { table(:project_group_links) }
-
- let(:user) { users_table.create!(id: 1, email: 'user@example.com', projects_limit: 10) }
- let(:group) { namespaces_table.create!(type: 'Group', name: 'group', path: 'group') }
-
- subject { described_class.new.perform([user.id]) }
-
- context 'missing authorization' do
- context 'personal project' do
- before do
- user_namespace = namespaces_table.create!(owner_id: user.id, name: 'User', path: 'user')
- projects_table.create!(id: 1,
- name: 'personal-project',
- path: 'personal-project',
- visibility_level: 0,
- namespace_id: user_namespace.id)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 40)]))
- end
- end
-
- context 'group membership' do
- before do
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, notification_level: 3)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'inherited group membership' do
- before do
- sub_group = namespaces_table.create!(type: 'Group', name: 'subgroup',
- path: 'subgroup', parent_id: group.id)
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: sub_group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, notification_level: 3)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'project membership' do
- before do
- project = projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: project.id, source_type: 'Project',
- type: 'ProjectMember', access_level: 20, notification_level: 3)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'shared group' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, notification_level: 3)
-
- shared_group = namespaces_table.create!(type: 'Group', name: 'shared group',
- path: 'shared-group')
- projects_table.create!(id: 1, name: 'project', path: 'project', visibility_level: 0,
- namespace_id: shared_group.id)
-
- group_group_links.create!(shared_group_id: shared_group.id, shared_with_group_id: group.id,
- group_access: 20)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
-
- context 'shared project' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, notification_level: 3)
-
- another_group = namespaces_table.create!(type: 'Group', name: 'another group', path: 'another-group')
- shared_project = projects_table.create!(id: 1, name: 'shared project', path: 'shared-project',
- visibility_level: 0, namespace_id: another_group.id)
-
- project_group_links.create!(project_id: shared_project.id, group_id: group.id, group_access: 20)
- end
-
- it 'creates correct authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(0).to(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 20)]))
- end
- end
- end
-
- context 'unapproved access requests' do
- context 'group membership' do
- before do
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, requested_at: Time.now, notification_level: 3)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'inherited group membership' do
- before do
- sub_group = namespaces_table.create!(type: 'Group', name: 'subgroup', path: 'subgroup',
- parent_id: group.id)
- projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: sub_group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 20, requested_at: Time.now, notification_level: 3)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'project membership' do
- before do
- project = projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: project.id, source_type: 'Project',
- type: 'ProjectMember', access_level: 20, requested_at: Time.now, notification_level: 3)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'shared group' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, requested_at: Time.now, notification_level: 3)
-
- shared_group = namespaces_table.create!(type: 'Group', name: 'shared group',
- path: 'shared-group')
- projects_table.create!(id: 1, name: 'project', path: 'project', visibility_level: 0,
- namespace_id: shared_group.id)
-
- group_group_links.create!(shared_group_id: shared_group.id, shared_with_group_id: group.id,
- group_access: 20)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
-
- context 'shared project' do
- before do
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, requested_at: Time.now, notification_level: 3)
-
- another_group = namespaces_table.create!(type: 'Group', name: 'another group', path: 'another-group')
- shared_project = projects_table.create!(id: 1, name: 'shared project', path: 'shared-project',
- visibility_level: 0, namespace_id: another_group.id)
-
- project_group_links.create!(project_id: shared_project.id, group_id: group.id, group_access: 20)
- end
-
- it 'does not create authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(0)
- end
- end
- end
-
- context 'incorrect authorization' do
- before do
- project = projects_table.create!(id: 1, name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
- members_table.create!(user_id: user.id, source_id: group.id, source_type: 'Namespace',
- type: 'GroupMember', access_level: 30, notification_level: 3)
-
- project_authorizations_table.create!(user_id: user.id, project_id: project.id,
- access_level: 10)
- end
-
- it 'fixes authorization' do
- expect { subject }.not_to change { project_authorizations_table.count }.from(1)
- expect(project_authorizations_table.all).to(
- match_array([have_attributes(user_id: 1, project_id: 1, access_level: 30)]))
- end
- end
-
- context 'unwanted authorization' do
- before do
- project = projects_table.create!(name: 'group-project', path: 'group-project',
- visibility_level: 0, namespace_id: group.id)
-
- project_authorizations_table.create!(user_id: user.id, project_id: project.id,
- access_level: 10)
- end
-
- it 'deletes authorization' do
- expect { subject }.to change { project_authorizations_table.count }.from(1).to(0)
- end
- end
-
- context 'deleted user' do
- it 'does not fail' do
- expect { described_class.new.perform([non_existing_record_id]) }.not_to raise_error
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
new file mode 100644
index 00000000000..fc4d776b8be
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210427212034 do
+ let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:issue1) { table(:issues).create!(project_id: project1.id) }
+ let!(:issue2) { table(:issues).create!(project_id: project2.id) }
+ let!(:merge_request1) { table(:merge_requests).create!(target_project_id: project1.id, source_branch: 'master', target_branch: 'feature') }
+ let!(:merge_request2) { table(:merge_requests).create!(target_project_id: project2.id, source_branch: 'master', target_branch: 'feature') }
+ let!(:timelog1) { table(:timelogs).create!(issue_id: issue1.id, time_spent: 60) }
+ let!(:timelog2) { table(:timelogs).create!(issue_id: issue1.id, time_spent: 60) }
+ let!(:timelog3) { table(:timelogs).create!(issue_id: issue2.id, time_spent: 60) }
+ let!(:timelog4) { table(:timelogs).create!(merge_request_id: merge_request1.id, time_spent: 600) }
+ let!(:timelog5) { table(:timelogs).create!(merge_request_id: merge_request1.id, time_spent: 600) }
+ let!(:timelog6) { table(:timelogs).create!(merge_request_id: merge_request2.id, time_spent: 600) }
+ let!(:timelog7) { table(:timelogs).create!(issue_id: issue2.id, time_spent: 60, project_id: project1.id) }
+ let!(:timelog8) { table(:timelogs).create!(merge_request_id: merge_request2.id, time_spent: 600, project_id: project1.id) }
+
+ describe '#perform' do
+ context 'when timelogs belong to issues' do
+ it 'sets correct project_id' do
+ subject.perform(timelog1.id, timelog3.id)
+
+ expect(timelog1.reload.project_id).to eq(issue1.project_id)
+ expect(timelog2.reload.project_id).to eq(issue1.project_id)
+ expect(timelog3.reload.project_id).to eq(issue2.project_id)
+ end
+ end
+
+ context 'when timelogs belong to merge requests' do
+ it 'sets correct project ids' do
+ subject.perform(timelog4.id, timelog6.id)
+
+ expect(timelog4.reload.project_id).to eq(merge_request1.target_project_id)
+ expect(timelog5.reload.project_id).to eq(merge_request1.target_project_id)
+ expect(timelog6.reload.project_id).to eq(merge_request2.target_project_id)
+ end
+ end
+
+ context 'when timelogs already belong to projects' do
+ it 'does not update the project id' do
+ subject.perform(timelog7.id, timelog8.id)
+
+ expect(timelog7.reload.project_id).to eq(project1.id)
+ expect(timelog8.reload.project_id).to eq(project1.id)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cache_spec.rb b/spec/lib/gitlab/cache_spec.rb
new file mode 100644
index 00000000000..5b1034a77a3
--- /dev/null
+++ b/spec/lib/gitlab/cache_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Cache, :request_store do
+ describe "#fetch_once" do
+ subject do
+ proc do
+ described_class.fetch_once([:test, "key"], expires_in: 10.minutes) do
+ "return value"
+ end
+ end
+ end
+
+ it "fetches from the cache once" do
+ expect(Rails.cache).to receive(:fetch).once.with([:test, "key"], expires_in: 10.minutes).and_call_original
+
+ expect(subject.call).to eq("return value")
+ expect(subject.call).to eq("return value")
+ end
+
+ it "always returns from the request store" do
+ expect(Gitlab::SafeRequestStore).to receive(:fetch).twice.with([:test, "key"]).and_call_original
+
+ expect(subject.call).to eq("return value")
+ expect(subject.call).to eq("return value")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/chat/responder_spec.rb b/spec/lib/gitlab/chat/responder_spec.rb
index 6603dbe8d52..803f30da9e7 100644
--- a/spec/lib/gitlab/chat/responder_spec.rb
+++ b/spec/lib/gitlab/chat/responder_spec.rb
@@ -16,8 +16,8 @@ RSpec.describe Gitlab::Chat::Responder do
it 'returns the responder for the build' do
pipeline = create(:ci_pipeline)
build = create(:ci_build, pipeline: pipeline)
- service = double(:service, chat_responder: Gitlab::Chat::Responder::Slack)
- chat_name = double(:chat_name, service: service)
+ integration = double(:integration, chat_responder: Gitlab::Chat::Responder::Slack)
+ chat_name = double(:chat_name, integration: integration)
chat_data = double(:chat_data, chat_name: chat_name)
allow(pipeline)
diff --git a/spec/lib/gitlab/ci/build/cache_spec.rb b/spec/lib/gitlab/ci/build/cache_spec.rb
index 9188045988b..7477aedb994 100644
--- a/spec/lib/gitlab/ci/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/build/cache_spec.rb
@@ -4,11 +4,23 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Cache do
describe '.initialize' do
- context 'when the multiple cache feature flag is disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
+ context 'when the cache is an array' do
+ it 'instantiates an array of cache seeds' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
+ allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
+
+ cache = described_class.new(cache_config, pipeline)
+
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
+ expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
end
+ end
+ context 'when the cache is a hash' do
it 'instantiates a cache seed' do
cache_config = { key: 'key-a' }
pipeline = double(::Ci::Pipeline)
@@ -18,87 +30,35 @@ RSpec.describe Gitlab::Ci::Build::Cache do
cache = described_class.new(cache_config, pipeline)
expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
- expect(cache.instance_variable_get(:@cache)).to eq(cache_seed)
- end
- end
-
- context 'when the multiple cache feature flag is enabled' do
- context 'when the cache is an array' do
- it 'instantiates an array of cache seeds' do
- cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
- pipeline = double(::Ci::Pipeline)
- cache_seed_a = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- cache_seed_b = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed_a, cache_seed_b)
-
- cache = described_class.new(cache_config, pipeline)
-
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-a' })
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, { key: 'key-b' })
- expect(cache.instance_variable_get(:@cache)).to eq([cache_seed_a, cache_seed_b])
- end
- end
-
- context 'when the cache is a hash' do
- it 'instantiates a cache seed' do
- cache_config = { key: 'key-a' }
- pipeline = double(::Ci::Pipeline)
- cache_seed = double(Gitlab::Ci::Pipeline::Seed::Build::Cache)
- allow(Gitlab::Ci::Pipeline::Seed::Build::Cache).to receive(:new).and_return(cache_seed)
-
- cache = described_class.new(cache_config, pipeline)
-
- expect(Gitlab::Ci::Pipeline::Seed::Build::Cache).to have_received(:new).with(pipeline, cache_config)
- expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
- end
+ expect(cache.instance_variable_get(:@cache)).to eq([cache_seed])
end
end
end
describe '#cache_attributes' do
- context 'when the multiple cache feature flag is disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- it "returns the cache seed's build attributes" do
- cache_config = { key: 'key-a' }
+ context 'when there are no caches' do
+ it 'returns an empty hash' do
+ cache_config = []
pipeline = double(::Ci::Pipeline)
cache = described_class.new(cache_config, pipeline)
attributes = cache.cache_attributes
- expect(attributes).to eq({
- options: { cache: { key: 'key-a' } }
- })
+ expect(attributes).to eq({})
end
end
- context 'when the multiple cache feature flag is enabled' do
- context 'when there are no caches' do
- it 'returns an empty hash' do
- cache_config = []
- pipeline = double(::Ci::Pipeline)
- cache = described_class.new(cache_config, pipeline)
-
- attributes = cache.cache_attributes
-
- expect(attributes).to eq({})
- end
- end
-
- context 'when there are caches' do
- it 'returns the structured attributes for the caches' do
- cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
- pipeline = double(::Ci::Pipeline)
- cache = described_class.new(cache_config, pipeline)
+ context 'when there are caches' do
+ it 'returns the structured attributes for the caches' do
+ cache_config = [{ key: 'key-a' }, { key: 'key-b' }]
+ pipeline = double(::Ci::Pipeline)
+ cache = described_class.new(cache_config, pipeline)
- attributes = cache.cache_attributes
+ attributes = cache.cache_attributes
- expect(attributes).to eq({
- options: { cache: cache_config }
- })
- end
+ expect(attributes).to eq({
+ options: { cache: cache_config }
+ })
end
end
end
diff --git a/spec/lib/gitlab/ci/build/policy/changes_spec.rb b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
index 016730e01cd..5d5a212b9a5 100644
--- a/spec/lib/gitlab/ci/build/policy/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/policy/changes_spec.rb
@@ -120,6 +120,7 @@ RSpec.describe Gitlab::Ci::Build::Policy::Changes do
context 'when branch is created' do
let_it_be(:project) { create(:project, :repository) }
+
let(:pipeline) do
create(:ci_empty_pipeline, project: project,
ref: 'feature',
diff --git a/spec/lib/gitlab/ci/build/releaser_spec.rb b/spec/lib/gitlab/ci/build/releaser_spec.rb
index fa5e90674a6..435f70e9ac5 100644
--- a/spec/lib/gitlab/ci/build/releaser_spec.rb
+++ b/spec/lib/gitlab/ci/build/releaser_spec.rb
@@ -15,18 +15,25 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
tag_name: 'release-$CI_COMMIT_SHA',
ref: '$CI_COMMIT_SHA',
milestones: %w[m1 m2 m3],
- released_at: '2020-07-15T08:00:00Z'
+ released_at: '2020-07-15T08:00:00Z',
+ assets: {
+ links: [
+ { name: 'asset1', url: 'https://example.com/assets/1', link_type: 'other', filepath: '/pretty/asset/1' },
+ { name: 'asset2', url: 'https://example.com/assets/2' }
+ ]
+ }
}
}
end
it 'generates the script' do
- expect(subject).to eq(['release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --ref "$CI_COMMIT_SHA" --released-at "2020-07-15T08:00:00Z" --milestone "m1" --milestone "m2" --milestone "m3"'])
+ expect(subject).to eq(['release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --ref "$CI_COMMIT_SHA" --released-at "2020-07-15T08:00:00Z" --milestone "m1" --milestone "m2" --milestone "m3" --assets-link "{\"name\":\"asset1\",\"url\":\"https://example.com/assets/1\",\"link_type\":\"other\",\"filepath\":\"/pretty/asset/1\"}" --assets-link "{\"name\":\"asset2\",\"url\":\"https://example.com/assets/2\"}"'])
end
end
context 'individual nodes' do
using RSpec::Parameterized::TableSyntax
+ links = { links: [{ name: 'asset1', url: 'https://example.com/assets/1', link_type: 'other', filepath: '/pretty/asset/1' }] }
where(:node_name, :node_value, :result) do
:name | 'Release $CI_COMMIT_SHA' | 'release-cli create --name "Release $CI_COMMIT_SHA"'
@@ -35,6 +42,7 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
:ref | '$CI_COMMIT_SHA' | 'release-cli create --ref "$CI_COMMIT_SHA"'
:milestones | %w[m1 m2 m3] | 'release-cli create --milestone "m1" --milestone "m2" --milestone "m3"'
:released_at | '2020-07-15T08:00:00Z' | 'release-cli create --released-at "2020-07-15T08:00:00Z"'
+ :assets | links | "release-cli create --assets-link #{links[:links][0].to_json.to_json}"
end
with_them do
diff --git a/spec/lib/gitlab/ci/build/step_spec.rb b/spec/lib/gitlab/ci/build/step_spec.rb
index 4b8f68b9fa8..938b52c496c 100644
--- a/spec/lib/gitlab/ci/build/step_spec.rb
+++ b/spec/lib/gitlab/ci/build/step_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Gitlab::Ci::Build::Step do
let(:job) { create(:ci_build, :release_options) }
it 'returns the release-cli command line' do
- expect(subject.script).to eq(["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""])
+ expect(subject.script).to eq(["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\" --assets-link \"{\\\"name\\\":\\\"asset1\\\",\\\"url\\\":\\\"https://example.com/assets/1\\\"}\""])
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index cec1c97085b..247f4b63910 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -7,295 +7,227 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
subject(:entry) { described_class.new(config) }
- context 'with multiple caches' do
+ describe 'validations' do
before do
entry.compose!
end
- describe '#valid?' do
- context 'with an empty hash as cache' do
- let(:config) { {} }
-
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
-
- context 'when configuration is valid with a single cache' do
- let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
-
- it 'is valid' do
- expect(entry).to be_valid
+ context 'when entry config value is correct' do
+ let(:policy) { nil }
+ let(:key) { 'some key' }
+ let(:when_config) { nil }
+
+ let(:config) do
+ {
+ key: key,
+ untracked: true,
+ paths: ['some/path/']
+ }.tap do |config|
+ config[:policy] = policy if policy
+ config[:when] = when_config if when_config
end
end
- context 'when configuration is valid with multiple caches' do
- let(:config) do
- [
- { key: 'key', paths: ["logs/"], untracked: true },
- { key: 'key2', paths: ["logs/"], untracked: true },
- { key: 'key3', paths: ["logs/"], untracked: true }
- ]
+ describe '#value' do
+ shared_examples 'hash key value' do
+ it 'returns hash value' do
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
+ end
end
- it 'is valid' do
- expect(entry).to be_valid
- end
- end
+ it_behaves_like 'hash key value'
- context 'when configuration is not a Hash or Array' do
- let(:config) { 'invalid' }
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- it 'is invalid' do
- expect(entry).not_to be_valid
+ it_behaves_like 'hash key value'
end
- end
- context 'when entry values contain more than four caches' do
- let(:config) do
- [
- { key: 'key', paths: ["logs/"], untracked: true },
- { key: 'key2', paths: ["logs/"], untracked: true },
- { key: 'key3', paths: ["logs/"], untracked: true },
- { key: 'key4', paths: ["logs/"], untracked: true },
- { key: 'key5', paths: ["logs/"], untracked: true }
- ]
- end
+ context 'with files and prefix' do
+ let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
- it 'is invalid' do
- expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
- expect(entry).not_to be_valid
+ it_behaves_like 'hash key value'
end
- end
- end
- end
- context 'with a single cache' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- describe 'validations' do
- before do
- entry.compose!
- end
-
- context 'when entry config value is correct' do
- let(:policy) { nil }
- let(:key) { 'some key' }
- let(:when_config) { nil }
+ context 'with prefix' do
+ let(:key) { { prefix: 'prefix-value' } }
- let(:config) do
- {
- key: key,
- untracked: true,
- paths: ['some/path/']
- }.tap do |config|
- config[:policy] = policy if policy
- config[:when] = when_config if when_config
+ it 'key is nil' do
+ expect(entry.value).to match(a_hash_including(key: nil))
end
end
- describe '#value' do
- shared_examples 'hash key value' do
- it 'returns hash value' do
- expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
- end
- end
-
- it_behaves_like 'hash key value'
-
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
-
- it_behaves_like 'hash key value'
- end
-
- context 'with files and prefix' do
- let(:key) { { files: %w[a-file other-file], prefix: 'prefix-value' } }
-
- it_behaves_like 'hash key value'
+ context 'with `policy`' do
+ where(:policy, :result) do
+ 'pull-push' | 'pull-push'
+ 'push' | 'push'
+ 'pull' | 'pull'
+ 'unknown' | 'unknown' # invalid
end
- context 'with prefix' do
- let(:key) { { prefix: 'prefix-value' } }
-
- it 'key is nil' do
- expect(entry.value).to match(a_hash_including(key: nil))
- end
+ with_them do
+ it { expect(entry.value).to include(policy: result) }
end
+ end
- context 'with `policy`' do
- where(:policy, :result) do
- 'pull-push' | 'pull-push'
- 'push' | 'push'
- 'pull' | 'pull'
- 'unknown' | 'unknown' # invalid
- end
-
- with_them do
- it { expect(entry.value).to include(policy: result) }
- end
+ context 'without `policy`' do
+ it 'assigns policy to default' do
+ expect(entry.value).to include(policy: 'pull-push')
end
+ end
- context 'without `policy`' do
- it 'assigns policy to default' do
- expect(entry.value).to include(policy: 'pull-push')
- end
+ context 'with `when`' do
+ where(:when_config, :result) do
+ 'on_success' | 'on_success'
+ 'on_failure' | 'on_failure'
+ 'always' | 'always'
+ 'unknown' | 'unknown' # invalid
end
- context 'with `when`' do
- where(:when_config, :result) do
- 'on_success' | 'on_success'
- 'on_failure' | 'on_failure'
- 'always' | 'always'
- 'unknown' | 'unknown' # invalid
- end
-
- with_them do
- it { expect(entry.value).to include(when: result) }
- end
+ with_them do
+ it { expect(entry.value).to include(when: result) }
end
+ end
- context 'without `when`' do
- it 'assigns when to default' do
- expect(entry.value).to include(when: 'on_success')
- end
+ context 'without `when`' do
+ it 'assigns when to default' do
+ expect(entry.value).to include(when: 'on_success')
end
end
+ end
- describe '#valid?' do
- it { is_expected.to be_valid }
+ describe '#valid?' do
+ it { is_expected.to be_valid }
- context 'with files' do
- let(:key) { { files: %w[a-file other-file] } }
+ context 'with files' do
+ let(:key) { { files: %w[a-file other-file] } }
- it { is_expected.to be_valid }
- end
+ it { is_expected.to be_valid }
end
+ end
- context 'with `policy`' do
- where(:policy, :valid) do
- 'pull-push' | true
- 'push' | true
- 'pull' | true
- 'unknown' | false
- end
+ context 'with `policy`' do
+ where(:policy, :valid) do
+ 'pull-push' | true
+ 'push' | true
+ 'pull' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
- end
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
end
end
+ end
- context 'with `when`' do
- where(:when_config, :valid) do
- 'on_success' | true
- 'on_failure' | true
- 'always' | true
- 'unknown' | false
- end
+ context 'with `when`' do
+ where(:when_config, :valid) do
+ 'on_success' | true
+ 'on_failure' | true
+ 'always' | true
+ 'unknown' | false
+ end
- with_them do
- it 'returns expected validity' do
- expect(entry.valid?).to eq(valid)
- end
+ with_them do
+ it 'returns expected validity' do
+ expect(entry.valid?).to eq(valid)
end
end
+ end
- context 'with key missing' do
- let(:config) do
- { untracked: true,
- paths: ['some/path/'] }
- end
+ context 'with key missing' do
+ let(:config) do
+ { untracked: true,
+ paths: ['some/path/'] }
+ end
- describe '#value' do
- it 'sets key with the default' do
- expect(entry.value[:key])
- .to eq(Gitlab::Ci::Config::Entry::Key.default)
- end
+ describe '#value' do
+ it 'sets key with the default' do
+ expect(entry.value[:key])
+ .to eq(Gitlab::Ci::Config::Entry::Key.default)
end
end
end
+ end
- context 'when entry value is not correct' do
- describe '#errors' do
- subject { entry.errors }
+ context 'when entry value is not correct' do
+ describe '#errors' do
+ subject { entry.errors }
- context 'when is not a hash' do
- let(:config) { 'ls' }
+ context 'when is not a hash' do
+ let(:config) { 'ls' }
- it 'reports errors with config value' do
- is_expected.to include 'cache config should be a hash'
- end
+ it 'reports errors with config value' do
+ is_expected.to include 'cache config should be a hash'
end
+ end
- context 'when policy is unknown' do
- let(:config) { { policy: 'unknown' } }
+ context 'when policy is unknown' do
+ let(:config) { { policy: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache policy should be pull-push, push, or pull')
- end
+ it 'reports error' do
+ is_expected.to include('cache policy should be pull-push, push, or pull')
end
+ end
- context 'when `when` is unknown' do
- let(:config) { { when: 'unknown' } }
+ context 'when `when` is unknown' do
+ let(:config) { { when: 'unknown' } }
- it 'reports error' do
- is_expected.to include('cache when should be on_success, on_failure or always')
- end
+ it 'reports error' do
+ is_expected.to include('cache when should be on_success, on_failure or always')
end
+ end
- context 'when descendants are invalid' do
- context 'with invalid keys' do
- let(:config) { { key: 1 } }
-
- it 'reports error with descendants' do
- is_expected.to include 'key should be a hash, a string or a symbol'
- end
- end
-
- context 'with empty key' do
- let(:config) { { key: {} } }
+ context 'when descendants are invalid' do
+ context 'with invalid keys' do
+ let(:config) { { key: 1 } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key should be a hash, a string or a symbol'
end
+ end
- context 'with invalid files' do
- let(:config) { { key: { files: 'a-file' } } }
+ context 'with empty key' do
+ let(:config) { { key: {} } }
- it 'reports error with descendants' do
- is_expected.to include 'key:files config should be an array of strings'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
end
+ end
- context 'with prefix without files' do
- let(:config) { { key: { prefix: 'a-prefix' } } }
+ context 'with invalid files' do
+ let(:config) { { key: { files: 'a-file' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config missing required keys: files'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key:files config should be an array of strings'
end
+ end
- context 'when there is an unknown key present' do
- let(:config) { { key: { unknown: 'a-file' } } }
+ context 'with prefix without files' do
+ let(:config) { { key: { prefix: 'a-prefix' } } }
- it 'reports error with descendants' do
- is_expected.to include 'key config contains unknown keys: unknown'
- end
+ it 'reports error with descendants' do
+ is_expected.to include 'key config missing required keys: files'
end
end
context 'when there is an unknown key present' do
- let(:config) { { invalid: true } }
+ let(:config) { { key: { unknown: 'a-file' } } }
it 'reports error with descendants' do
- is_expected.to include 'cache config contains unknown keys: invalid'
+ is_expected.to include 'key config contains unknown keys: unknown'
end
end
end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { invalid: true } }
+
+ it 'reports error with descendants' do
+ is_expected.to include 'cache config contains unknown keys: invalid'
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/caches_spec.rb b/spec/lib/gitlab/ci/config/entry/caches_spec.rb
new file mode 100644
index 00000000000..047cef53b96
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/caches_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Caches do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:entry) { described_class.new(config) }
+
+ before do
+ entry.compose!
+ end
+
+ describe '#valid?' do
+ context 'with an empty hash as cache' do
+ let(:config) { {} }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is valid with a single cache' do
+ let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is valid with multiple caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true }
+ ]
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when configuration is not a Hash or Array' do
+ let(:config) { 'invalid' }
+
+ it 'is invalid' do
+ expect(entry).not_to be_valid
+ end
+ end
+
+ context 'when entry values contain more than four caches' do
+ let(:config) do
+ [
+ { key: 'key', paths: ["logs/"], untracked: true },
+ { key: 'key2', paths: ["logs/"], untracked: true },
+ { key: 'key3', paths: ["logs/"], untracked: true },
+ { key: 'key4', paths: ["logs/"], untracked: true },
+ { key: 'key5', paths: ["logs/"], untracked: true }
+ ]
+ end
+
+ it 'is invalid' do
+ expect(entry.errors).to eq(["caches config no more than 4 caches can be created"])
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/default_spec.rb b/spec/lib/gitlab/ci/config/entry/default_spec.rb
index 6e46d02a96e..5613b0f09d1 100644
--- a/spec/lib/gitlab/ci/config/entry/default_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/default_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Default do
+ let(:config) { {} }
let(:entry) { described_class.new(config) }
it_behaves_like 'with inheritable CI config' do
diff --git a/spec/lib/gitlab/ci/config/entry/hidden_spec.rb b/spec/lib/gitlab/ci/config/entry/hidden_spec.rb
index 090ef67f39d..7a2ecee0dae 100644
--- a/spec/lib/gitlab/ci/config/entry/hidden_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/hidden_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Hidden do
end
describe '.new' do
+ let(:config) { {} }
let(:entry) { described_class.new(config) }
describe 'validations' do
@@ -41,8 +42,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Hidden do
context 'when entry value is not correct' do
context 'when config is empty' do
- let(:config) { {} }
-
describe '#valid' do
it 'is invalid' do
expect(entry).not_to be_valid
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index ffcd029172a..1d23ab0c2c7 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -556,42 +556,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- context 'when job config overrides default config' do
- before do
- entry.compose!(deps)
- end
-
- let(:config) do
- { script: 'rspec', image: 'some_image', cache: { key: 'test' } }
- end
-
- it 'overrides default config' do
- expect(entry[:image].value).to eq(name: 'some_image')
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
- end
- end
-
- context 'when job config does not override default config' do
- before do
- allow(default).to receive('[]').with(:image).and_return(specified)
-
- entry.compose!(deps)
- end
-
- let(:config) { { script: 'ls', cache: { key: 'test' } } }
-
- it 'uses config from default entry' do
- expect(entry[:image].value).to eq 'specified'
- expect(entry[:cache].value).to eq(key: 'test', policy: 'pull-push', when: 'on_success')
- end
- end
- end
-
context 'with workflow rules' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb b/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb
index 53809d2d549..0ac8d01b8e4 100644
--- a/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/kubernetes_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Kubernetes do
+ let(:config) { Hash(namespace: 'namespace') }
+
subject { described_class.new(config) }
describe 'attributes' do
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 041eb748fc9..31e3545e8d8 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -175,68 +175,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
)
end
end
-
- context 'with multuple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#jobs_value' do
- it 'returns jobs configuration' do
- expect(root.jobs_value.keys).to eq([:rspec, :spinach, :release])
- expect(root.jobs_value[:rspec]).to eq(
- { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- expect(root.jobs_value[:spinach]).to eq(
- { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- expect(root.jobs_value[:release]).to eq(
- { name: :release,
- stage: 'release',
- before_script: [],
- script: ["make changelog | tee release_changelog.txt"],
- release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
- image: { name: "ruby:2.7" },
- services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
- cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
- only: { refs: %w(branches tags) },
- variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
- job_variables: { 'VAR' => 'job' },
- root_variables_inheritance: true,
- after_script: [],
- ignore: false,
- scheduling_type: :stage }
- )
- end
- end
- end
end
end
@@ -255,56 +193,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
spinach: { before_script: [], variables: { VAR: 'job' }, script: 'spinach' } }
end
- context 'with multiple_cache_per_job FF disabled' do
- context 'when composed' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#errors' do
- it 'has no errors' do
- expect(root.errors).to be_empty
- end
- end
-
- describe '#jobs_value' do
- it 'returns jobs configuration' do
- expect(root.jobs_value).to eq(
- rspec: { name: :rspec,
- script: %w[rspec ls],
- before_script: %w(ls pwd),
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'root' },
- job_variables: {},
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage },
- spinach: { name: :spinach,
- before_script: [],
- script: %w[spinach],
- image: { name: 'ruby:2.7' },
- services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
- stage: 'test',
- cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
- variables: { 'VAR' => 'job' },
- job_variables: { 'VAR' => 'job' },
- root_variables_inheritance: true,
- ignore: false,
- after_script: ['make clean'],
- only: { refs: %w[branches tags] },
- scheduling_type: :stage }
- )
- end
- end
- end
- end
-
context 'when composed' do
before do
root.compose!
@@ -390,19 +278,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
expect(root.cache_value).to eq([key: 'a', policy: 'pull-push', when: 'on_success'])
end
end
-
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- root.compose!
- end
-
- describe '#cache_value' do
- it 'returns correct cache definition' do
- expect(root.cache_value).to eq(key: 'a', policy: 'pull-push', when: 'on_success')
- end
- end
- end
end
context 'when variables resembles script-type job' do
@@ -525,7 +400,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
context 'when entry exists' do
it 'returns correct entry' do
expect(root[:cache])
- .to be_an_instance_of Gitlab::Ci::Config::Entry::Cache
+ .to be_an_instance_of Gitlab::Ci::Config::Entry::Caches
expect(root[:jobs][:rspec][:script].value).to eq ['ls']
end
end
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 7e39fae7b9b..3d1fc32a62d 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::File::Local do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:sha) { '12345' }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { local: location } }
diff --git a/spec/lib/gitlab/ci/config/external/file/project_spec.rb b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
index 0e8851ba915..c53914c5772 100644
--- a/spec/lib/gitlab/ci/config/external/file/project_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/project_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Project do
let_it_be(:context_project) { create(:project) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:context_user) { user }
let(:parent_pipeline) { double(:parent_pipeline) }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
diff --git a/spec/lib/gitlab/ci/config/external/file/template_spec.rb b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
index ad1d93a64a1..75b22c1516c 100644
--- a/spec/lib/gitlab/ci/config/external/file/template_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/template_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::File::Template do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:context_params) { { project: project, sha: '12345', user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:template) { 'Auto-DevOps.gitlab-ci.yml' }
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index e5b008a482e..88097f3f56a 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:local_file) { '/lib/gitlab/ci/templates/non-existent-file.yml' }
let(:remote_url) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:template_file) { 'Auto-DevOps.gitlab-ci.yml' }
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index d657c3e943f..e032d372ecb 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:another_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
index 53dea1d0d19..6019318a401 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/build_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
+
let(:pipeline) { Ci::Pipeline.new }
let(:variables_attributes) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
index 1d17244e519..2727f2603cd 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:prev_pipeline) { create(:ci_pipeline, project: project) }
let(:new_commit) { create(:commit, project: project) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
index 23cdec61bb3..499dc3554a3 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::Deployments do
it 'adds an informative error to the pipeline' do
perform
- expect(pipeline.errors.messages).to include(base: ['Pipeline has too many deployments! Requested 2, but the limit is 1.'])
+ expect(pipeline.errors.added?(:base, 'Pipeline has too many deployments! Requested 2, but the limit is 1.')).to be true
end
it 'increments the error metric' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 264076859cb..2e537f40692 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -218,15 +218,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
context 'N+1 queries' do
- it 'avoids N+1 queries when calculating variables of jobs' do
+ it 'avoids N+1 queries when calculating variables of jobs', :use_sql_query_cache do
+ warm_up_pipeline, warm_up_command = prepare_pipeline1
+ perform_seed(warm_up_pipeline, warm_up_command)
+
pipeline1, command1 = prepare_pipeline1
pipeline2, command2 = prepare_pipeline2
- control = ActiveRecord::QueryRecorder.new do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
perform_seed(pipeline1, command1)
end
- expect { perform_seed(pipeline2, command2) }.not_to exceed_query_limit(
+ expect { perform_seed(pipeline2, command2) }.not_to exceed_all_query_limit(
control.count + expected_extra_queries
)
end
@@ -259,15 +262,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
def expected_extra_queries
extra_jobs = 2
- non_handled_sql_queries = 3
-
- # 1. Ci::Build Load () SELECT "ci_builds".* FROM "ci_builds"
- # WHERE "ci_builds"."type" = 'Ci::Build'
- # AND "ci_builds"."commit_id" IS NULL
- # AND ("ci_builds"."retried" = FALSE OR "ci_builds"."retried" IS NULL)
- # AND (stage_idx < 1)
- # 2. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `fetch_memory_cache`
- # 3. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore`
+ non_handled_sql_queries = 2
+
+ # 1. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `fetch_memory_cache`
+ # 2. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore`
extra_jobs * non_handled_sql_queries
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
index e4768f2ef0d..27af8d379ef 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/skip_spec.rb
@@ -21,17 +21,25 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Skip do
before do
allow(pipeline).to receive(:git_commit_message)
.and_return('commit message [ci skip]')
-
- step.perform!
end
it 'breaks the chain' do
+ step.perform!
+
expect(step.break?).to be true
end
it 'skips the pipeline' do
+ step.perform!
+
expect(pipeline.reload).to be_skipped
end
+
+ it 'calls ensure_project_iid explicitly' do
+ expect(pipeline).to receive(:ensure_project_iid!)
+
+ step.perform!
+ end
end
context 'when pipeline has not been skipped' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
index cd868a57bbc..8e0b032e68c 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/template_usage_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::TemplateUsage do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:command) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index caf3a053c4e..e3061f8095b 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { build(:ci_empty_pipeline, user: user, project: project) }
let!(:step) { described_class.new(pipeline, command) }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
index 7eefb4d7876..feedef18dcd 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { build_stubbed(:ci_pipeline) }
let!(:step) { described_class.new(pipeline, command) }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index 773cb61b946..910c12389c3 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -9,253 +9,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
let(:processor) { described_class.new(pipeline, config) }
- context 'with multiple_cache_per_job ff disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- describe '#build_attributes' do
- subject { processor.build_attributes }
-
- context 'with cache:key' do
- let(:config) do
- {
- key: 'a-key',
- paths: ['vendor/ruby']
- }
- end
-
- it { is_expected.to include(options: { cache: config }) }
- end
-
- context 'with cache:key as a symbol' do
- let(:config) do
- {
- key: :a_key,
- paths: ['vendor/ruby']
- }
- end
-
- it { is_expected.to include(options: { cache: config.merge(key: "a_key") }) }
- end
-
- context 'with cache:key:files' do
- shared_examples 'default key' do
- let(:config) do
- { key: { files: files } }
- end
-
- it 'uses default key' do
- expected = { options: { cache: { key: 'default' } } }
-
- is_expected.to include(expected)
- end
- end
-
- shared_examples 'version and gemfile files' do
- let(:config) do
- {
- key: {
- files: files
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'builds a string key' do
- expected = {
- options: {
- cache: {
- key: '703ecc8fef1635427a1f86a8a1a308831c122392',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with existing files' do
- let(:files) { ['VERSION', 'Gemfile.zip'] }
-
- it_behaves_like 'version and gemfile files'
- end
-
- context 'with files starting with ./' do
- let(:files) { ['Gemfile.zip', './VERSION'] }
-
- it_behaves_like 'version and gemfile files'
- end
-
- context 'with files ending with /' do
- let(:files) { ['Gemfile.zip/'] }
-
- it_behaves_like 'default key'
- end
-
- context 'with new line in filenames' do
- let(:files) { ["Gemfile.zip\nVERSION"] }
-
- it_behaves_like 'default key'
- end
-
- context 'with missing files' do
- let(:files) { ['project-gemfile.lock', ''] }
-
- it_behaves_like 'default key'
- end
-
- context 'with directories' do
- shared_examples 'foo/bar directory key' do
- let(:config) do
- {
- key: {
- files: files
- }
- }
- end
-
- it 'builds a string key' do
- expected = {
- options: {
- cache: { key: '74bf43fb1090f161bdd4e265802775dbda2f03d1' }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with directory' do
- let(:files) { ['foo/bar'] }
-
- it_behaves_like 'foo/bar directory key'
- end
-
- context 'with directory ending in slash' do
- let(:files) { ['foo/bar/'] }
-
- it_behaves_like 'foo/bar directory key'
- end
-
- context 'with directories ending in slash star' do
- let(:files) { ['foo/bar/*'] }
-
- it_behaves_like 'foo/bar directory key'
- end
- end
- end
-
- context 'with cache:key:prefix' do
- context 'without files' do
- let(:config) do
- {
- key: {
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix to default key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-default',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with existing files' do
- let(:config) do
- {
- key: {
- files: ['VERSION', 'Gemfile.zip'],
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-703ecc8fef1635427a1f86a8a1a308831c122392',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
-
- context 'with missing files' do
- let(:config) do
- {
- key: {
- files: ['project-gemfile.lock', ''],
- prefix: 'a-prefix'
- },
- paths: ['vendor/ruby']
- }
- end
-
- it 'adds prefix to default key' do
- expected = {
- options: {
- cache: {
- key: 'a-prefix-default',
- paths: ['vendor/ruby']
- }
- }
- }
-
- is_expected.to include(expected)
- end
- end
- end
-
- context 'with all cache option keys' do
- let(:config) do
- {
- key: 'a-key',
- paths: ['vendor/ruby'],
- untracked: true,
- policy: 'push',
- when: 'on_success'
- }
- end
-
- it { is_expected.to include(options: { cache: config }) }
- end
-
- context 'with unknown cache option keys' do
- let(:config) do
- {
- key: 'a-key',
- unknown_key: true
- }
- end
-
- it { expect { subject }.to raise_error(ArgumentError, /unknown_key/) }
- end
-
- context 'with empty config' do
- let(:config) { {} }
-
- it { is_expected.to include(options: {}) }
- end
- end
- end
-
describe '#attributes' do
subject { processor.attributes }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index f97935feb86..058fb25807d 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:head_sha) { project.repository.head_commit.id }
+
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:root_variables) { [] }
let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
@@ -89,91 +90,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
-
- context 'with cache:key' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: 'a-value'
- }
- }
- end
-
- it { is_expected.to include(options: { cache: { key: 'a-value' } }) }
- end
-
- context 'with cache:key:files' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION']
- }
- }
- }
- end
-
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: { key: 'f155568ad0933d8358f66b846133614f76dd0ca4' }
- }
- }
-
- is_expected.to include(cache_options)
- end
- end
-
- context 'with cache:key:prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- prefix: 'something'
- }
- }
- }
- end
-
- it { is_expected.to include(options: { cache: { key: 'something-default' } }) }
- end
-
- context 'with cache:key:files and prefix' do
- let(:attributes) do
- {
- name: 'rspec',
- ref: 'master',
- cache: {
- key: {
- files: ['VERSION'],
- prefix: 'something'
- }
- }
- }
- end
-
- it 'includes cache options' do
- cache_options = {
- options: {
- cache: { key: 'something-f155568ad0933d8358f66b846133614f76dd0ca4' }
- }
- }
-
- is_expected.to include(cache_options)
- end
- end
- end
-
context 'with cache:key' do
let(:attributes) do
{
diff --git a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
index 1f38c7aec63..9f7281fb714 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Deployment do
let_it_be(:project, refind: true) { create(:project, :repository) }
+
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0')
diff --git a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
index 99196d393c6..175b12637e6 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Environment do
let_it_be(:project) { create(:project) }
+
let(:job) { build(:ci_build, project: project) }
let(:seed) { described_class.new(job) }
let(:attributes) { {} }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb
index b7260599de2..9bd0e014873 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Processable::ResourceGroup do
let_it_be(:project) { create(:project) }
+
let(:job) { build(:ci_build, project: project) }
let(:seed) { described_class.new(job, resource_group_key) }
diff --git a/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb b/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
index 8b177fa7fc1..73b916da2e9 100644
--- a/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_mr_diff_spec.rb
@@ -9,14 +9,11 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
let(:degradation_3) { build(:codequality_degradation_3) }
describe '#initialize!' do
- subject(:report) { described_class.new(codequality_report) }
+ subject(:report) { described_class.new(new_degradations) }
context 'when quality has degradations' do
context 'with several degradations on the same line' do
- before do
- codequality_report.add_degradation(degradation_1)
- codequality_report.add_degradation(degradation_2)
- end
+ let(:new_degradations) { [degradation_1, degradation_2] }
it 'generates quality report for mr diff' do
expect(report.files).to match(
@@ -29,11 +26,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
end
context 'with several degradations on several files' do
- before do
- codequality_report.add_degradation(degradation_1)
- codequality_report.add_degradation(degradation_2)
- codequality_report.add_degradation(degradation_3)
- end
+ let(:new_degradations) { [degradation_1, degradation_2, degradation_3] }
it 'returns quality report for mr diff' do
expect(report.files).to match(
@@ -50,6 +43,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityMrDiff do
end
context 'when quality has no degradation' do
+ let(:new_degradations) { [] }
+
it 'returns an empty hash' do
expect(report.files).to match({})
end
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
index 8378d096fcf..e289e59b281 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:head_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:major_degradation) { build(:codequality_degradation, :major) }
- let(:minor_degradation) { build(:codequality_degradation, :major) }
+ let(:minor_degradation) { build(:codequality_degradation, :minor) }
let(:critical_degradation) { build(:codequality_degradation, :critical) }
let(:blocker_degradation) { build(:codequality_degradation, :blocker) }
diff --git a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
index 9ee55177ca0..21216241cfb 100644
--- a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Ci::Reports::TestFailureHistory, :aggregate_failures do
describe '#load!' do
let_it_be(:project) { create(:project) }
+
let(:failed_rspec) { create_test_case_rspec_failed }
let(:failed_java) { create_test_case_java_failed }
diff --git a/spec/lib/gitlab/ci/status/core_spec.rb b/spec/lib/gitlab/ci/status/core_spec.rb
new file mode 100644
index 00000000000..b68e4f03433
--- /dev/null
+++ b/spec/lib/gitlab/ci/status/core_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Ci::Status::Core do
+ let(:subj) { double("subject", cache_key: "foo") }
+
+ subject(:status) do
+ described_class.new(subj, double("user"))
+ end
+
+ describe "#cache_key" do
+ it "uses the subject's cache key" do
+ expect(status.cache_key).to eq(subj.cache_key)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/syntax_templates_spec.rb b/spec/lib/gitlab/ci/syntax_templates_spec.rb
deleted file mode 100644
index ce3169e17ec..00000000000
--- a/spec/lib/gitlab/ci/syntax_templates_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'ci/syntax_templates' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let(:lint) { Gitlab::Ci::Lint.new(project: project, current_user: user) }
-
- before do
- project.add_developer(user)
- end
-
- subject(:lint_result) { lint.validate(content) }
-
- Dir.glob('lib/gitlab/ci/syntax_templates/**/*.yml').each do |template|
- describe template do
- let(:content) { File.read(template) }
-
- it 'validates the template' do
- expect(lint_result).to be_valid, "got errors: #{lint_result.errors.join(', ')}"
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
index 1f278048ad5..053499344e1 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/build_gitlab_ci_yaml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Jobs/Build.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
index 0a76de82421..b23457315cc 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
index 25c88c161ea..1d137ef89e1 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb
@@ -208,7 +208,7 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
index b64959a9917..7fa8d906d07 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
index 03fa45fe0a1..e53d2f4f975 100644
--- a/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Verify/load_performance_testing_gitlab_ci_yaml_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe 'Verify/Load-Performance-Testing.gitlab-ci.yml' do
end
context 'on merge request' do
- let(:service) { MergeRequests::CreatePipelineService.new(project, user) }
+ let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) }
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request) }
diff --git a/spec/lib/gitlab/ci/templates/templates_spec.rb b/spec/lib/gitlab/ci/templates/templates_spec.rb
index 768256ee6b3..56443e611e8 100644
--- a/spec/lib/gitlab/ci/templates/templates_spec.rb
+++ b/spec/lib/gitlab/ci/templates/templates_spec.rb
@@ -22,14 +22,34 @@ RSpec.describe 'CI YML Templates' do
with_them do
let(:content) do
- <<~EOS
- include:
- - template: #{template_name}
+ if template_name == 'Security/DAST-API.gitlab-ci.yml'
+ # The DAST-API template purposly excludes a stages
+ # definition.
- concrete_build_implemented_by_a_user:
- stage: test
- script: do something
- EOS
+ <<~EOS
+ include:
+ - template: #{template_name}
+
+ stages:
+ - build
+ - test
+ - deploy
+ - dast
+
+ concrete_build_implemented_by_a_user:
+ stage: test
+ script: do something
+ EOS
+ else
+ <<~EOS
+ include:
+ - template: #{template_name}
+
+ concrete_build_implemented_by_a_user:
+ stage: test
+ script: do something
+ EOS
+ end
end
it 'is valid' do
diff --git a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
index f09e03b4d55..f878d24fe4b 100644
--- a/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
+++ b/spec/lib/gitlab/ci/trace/chunked_io_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
let_it_be(:build) { create(:ci_build, :running) }
+
let(:chunked_io) { described_class.new(build) }
before do
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 0fe7c731f27..69f56871740 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_default: :keep do
let_it_be(:project) { create_default(:project).freeze }
let_it_be_with_reload(:build) { create(:ci_build, :success) }
+
let(:trace) { described_class.new(build) }
describe "associations" do
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index ad94dfc9160..94ab4819361 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -1419,155 +1419,6 @@ module Gitlab
end
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- describe 'cache' do
- context 'when cache definition has unknown keys' do
- let(:config) do
- YAML.dump(
- { cache: { untracked: true, invalid: 'key' },
- rspec: { script: 'rspec' } })
- end
-
- it_behaves_like 'returns errors', 'cache config contains unknown keys: invalid'
- end
-
- it "returns cache when defined globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it "returns cache when defined in default context" do
- config = YAML.dump(
- {
- default: {
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: { files: ['file'] } }
- },
- rspec: {
- script: "rspec"
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["logs/", "binaries/"],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache key when defined in a job' do
- config = YAML.dump({
- rspec: {
- cache: { paths: ['logs/', 'binaries/'], untracked: true, key: 'key' },
- script: 'rspec'
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: 'key',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache files' do
- config = YAML.dump(
- rspec: {
- cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] }
- },
- script: 'rspec'
- }
- )
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'] },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it 'returns cache files with prefix' do
- config = YAML.dump(
- rspec: {
- cache: {
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' }
- },
- script: 'rspec'
- }
- )
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes('test').size).to eq(1)
- expect(config_processor.stage_builds_attributes('test').first[:cache]).to eq(
- paths: ['logs/', 'binaries/'],
- untracked: true,
- key: { files: ['file'], prefix: 'prefix' },
- policy: 'pull-push',
- when: 'on_success'
- )
- end
-
- it "overwrite cache when defined for a job and globally" do
- config = YAML.dump({
- cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
- rspec: {
- script: "rspec",
- cache: { paths: ["test/"], untracked: false, key: 'local' }
- }
- })
-
- config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
-
- expect(config_processor.stage_builds_attributes("test").size).to eq(1)
- expect(config_processor.stage_builds_attributes("test").first[:cache]).to eq(
- paths: ["test/"],
- untracked: false,
- key: 'local',
- policy: 'pull-push',
- when: 'on_success'
- )
- end
- end
- end
-
describe 'cache' do
context 'when cache definition has unknown keys' do
let(:config) do
diff --git a/spec/lib/gitlab/class_attributes_spec.rb b/spec/lib/gitlab/class_attributes_spec.rb
index f8766f20495..ac2a18a1860 100644
--- a/spec/lib/gitlab/class_attributes_spec.rb
+++ b/spec/lib/gitlab/class_attributes_spec.rb
@@ -6,36 +6,62 @@ RSpec.describe Gitlab::ClassAttributes do
Class.new do
include Gitlab::ClassAttributes
- def self.get_attribute(name)
- get_class_attribute(name)
+ class << self
+ attr_reader :counter_1, :counter_2
+
+ # get_class_attribute and set_class_attribute are protected,
+ # hence those methods are for testing purpose
+ def get_attribute(name)
+ get_class_attribute(name)
+ end
+
+ def set_attribute(name, value)
+ set_class_attribute(name, value)
+ end
+ end
+
+ after_set_class_attribute do
+ @counter_1 ||= 0
+ @counter_1 += 1
end
- def self.set_attribute(name, value)
- class_attributes[name] = value
+ after_set_class_attribute do
+ @counter_2 ||= 0
+ @counter_2 += 2
end
end
end
let(:subclass) { Class.new(klass) }
- describe ".get_class_attribute" do
- it "returns values set on the class" do
- klass.set_attribute(:foo, :bar)
+ it "returns values set on the class" do
+ klass.set_attribute(:foo, :bar)
- expect(klass.get_attribute(:foo)).to eq(:bar)
- end
+ expect(klass.get_attribute(:foo)).to eq(:bar)
+ end
- it "returns values set on a superclass" do
- klass.set_attribute(:foo, :bar)
+ it "returns values set on a superclass" do
+ klass.set_attribute(:foo, :bar)
- expect(subclass.get_attribute(:foo)).to eq(:bar)
- end
+ expect(subclass.get_attribute(:foo)).to eq(:bar)
+ end
- it "returns values from the subclass over attributes from a superclass" do
- klass.set_attribute(:foo, :baz)
- subclass.set_attribute(:foo, :bar)
+ it "returns values from the subclass over attributes from a superclass" do
+ klass.set_attribute(:foo, :baz)
+ subclass.set_attribute(:foo, :bar)
- expect(subclass.get_attribute(:foo)).to eq(:bar)
- end
+ expect(klass.get_attribute(:foo)).to eq(:baz)
+ expect(subclass.get_attribute(:foo)).to eq(:bar)
+ end
+
+ it "triggers after hooks after set class values" do
+ expect(klass.counter_1).to be(nil)
+ expect(klass.counter_2).to be(nil)
+
+ klass.set_attribute(:foo, :bar)
+ klass.set_attribute(:foo, :bar)
+
+ expect(klass.counter_1).to eq(2)
+ expect(klass.counter_2).to eq(4)
end
end
diff --git a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
index 5b69b34d04b..05b67a8a93f 100644
--- a/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/puma_cluster_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Gitlab::Cluster::Mixins::PumaCluster do
line = process.readline
puts "PUMA_DEBUG: #{line}" if ENV['PUMA_DEBUG']
end
- rescue
+ rescue StandardError
end
end
end
diff --git a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
index 0aaca0a79c2..7f7c95b2527 100644
--- a/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
+++ b/spec/lib/gitlab/cluster/mixins/unicorn_http_server_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe Gitlab::Cluster::Mixins::UnicornHttpServer do
line = process.readline
puts "UNICORN_DEBUG: #{line}" if ENV['UNICORN_DEBUG']
end
- rescue
+ rescue StandardError
end
end
end
diff --git a/spec/lib/gitlab/conan_token_spec.rb b/spec/lib/gitlab/conan_token_spec.rb
index 00683cf6e47..b6180f69044 100644
--- a/spec/lib/gitlab/conan_token_spec.rb
+++ b/spec/lib/gitlab/conan_token_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::ConanToken do
JSONWebToken::HMACToken.new(jwt_secret).tap do |jwt|
jwt['access_token'] = access_token_id
jwt['user_id'] = user_id || user_id
- jwt.expire_time = expire_time || jwt.issued_at + 1.hour
+ jwt.expire_time = expire_time || jwt.issued_at + ::Gitlab::ConanToken::CONAN_TOKEN_EXPIRE_TIME
end
end
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::ConanToken do
it 'returns nil for expired JWT' do
jwt = build_jwt(access_token_id: 123,
user_id: 456,
- expire_time: Time.zone.now - 2.hours)
+ expire_time: Time.zone.now - (::Gitlab::ConanToken::CONAN_TOKEN_EXPIRE_TIME + 1.hour))
expect(described_class.decode(jwt.encoded)).to be_nil
end
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 46e5334cd81..f8a007cdd75 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -298,12 +298,12 @@ RSpec.describe Gitlab::Conflict::File do
end
it 'creates context sections of the correct length' do
- expect(sections[0][:lines].reject(&:type).length).to eq(3)
- expect(sections[2][:lines].reject(&:type).length).to eq(3)
- expect(sections[3][:lines].reject(&:type).length).to eq(3)
- expect(sections[5][:lines].reject(&:type).length).to eq(3)
- expect(sections[6][:lines].reject(&:type).length).to eq(3)
- expect(sections[8][:lines].reject(&:type).length).to eq(1)
+ expect(sections[0][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[2][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[3][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[5][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[6][:lines].count { |line| line.type.nil? }).to eq(3)
+ expect(sections[8][:lines].count { |line| line.type.nil? }).to eq(1)
end
end
end
diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
index a94fd6acd32..41a6c06f9c9 100644
--- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
+++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb
@@ -20,15 +20,34 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do
end
describe '.default_settings_hash' do
- it 'returns empty defaults' do
+ it 'returns defaults for all keys' do
settings = described_class.default_settings_hash
- expect(settings['enabled']).to be_falsey
+ expect(settings['enabled']).to be_truthy
expect(settings['report_only']).to be_falsey
- described_class::DIRECTIVES.each do |directive|
- expect(settings['directives'].has_key?(directive)).to be_truthy
- expect(settings['directives'][directive]).to be_nil
+ directives = settings['directives']
+ directive_names = (described_class::DIRECTIVES - ['report_uri'])
+ directive_names.each do |directive|
+ expect(directives.has_key?(directive)).to be_truthy
+ expect(directives[directive]).to be_truthy
+ end
+
+ expect(directives.has_key?('report_uri')).to be_truthy
+ expect(directives['report_uri']).to be_nil
+ end
+
+ context 'when GITLAB_CDN_HOST is set' do
+ before do
+ stub_env('GITLAB_CDN_HOST', 'https://example.com')
+ end
+
+ it 'adds GITLAB_CDN_HOST to CSP' do
+ settings = described_class.default_settings_hash
+ directives = settings['directives']
+
+ expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.recaptcha.net https://apis.google.com https://example.com")
+ expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://example.com")
end
end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 932238f281e..a31e5a1d1e2 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe Gitlab::DataBuilder::Build do
let(:build) { create(:ci_build, :running, runner: runner, user: user) }
describe '.build' do
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
let(:data) do
described_class.build(build)
end
@@ -22,6 +26,8 @@ RSpec.describe Gitlab::DataBuilder::Build do
it { expect(data[:build_created_at]).to eq(build.created_at) }
it { expect(data[:build_started_at]).to eq(build.started_at) }
it { expect(data[:build_finished_at]).to eq(build.finished_at) }
+ it { expect(data[:build_duration]).to eq(build.duration) }
+ it { expect(data[:build_queued_duration]).to eq(build.queued_duration) }
it { expect(data[:build_allow_failure]).to eq(false) }
it { expect(data[:build_failure_reason]).to eq(build.failure_reason) }
it { expect(data[:project_id]).to eq(build.project.id) }
diff --git a/spec/lib/gitlab/data_builder/deployment_spec.rb b/spec/lib/gitlab/data_builder/deployment_spec.rb
index 8fb7ab25b17..d64dfc957ca 100644
--- a/spec/lib/gitlab/data_builder/deployment_spec.rb
+++ b/spec/lib/gitlab/data_builder/deployment_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
it 'returns the object kind for a deployment' do
deployment = build(:deployment, deployable: nil, environment: create(:environment))
- data = described_class.build(deployment)
+ data = described_class.build(deployment, Time.current)
expect(data[:object_kind]).to eq('deployment')
end
@@ -21,10 +21,12 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
expected_deployable_url = Gitlab::Routing.url_helpers.project_job_url(deployable.project, deployable)
expected_user_url = Gitlab::Routing.url_helpers.user_url(deployment.deployed_by)
expected_commit_url = Gitlab::UrlBuilder.build(commit)
+ status_changed_at = Time.current
- data = described_class.build(deployment)
+ data = described_class.build(deployment, status_changed_at)
expect(data[:status]).to eq('failed')
+ expect(data[:status_changed_at]).to eq(status_changed_at)
expect(data[:deployable_id]).to eq(deployable.id)
expect(data[:deployable_url]).to eq(expected_deployable_url)
expect(data[:environment]).to eq("somewhere")
@@ -38,7 +40,7 @@ RSpec.describe Gitlab::DataBuilder::Deployment do
it 'does not include the deployable URL when there is no deployable' do
deployment = create(:deployment, status: :failed, deployable: nil)
- data = described_class.build(deployment)
+ data = described_class.build(deployment, Time.current)
expect(data[:deployable_url]).to be_nil
end
diff --git a/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb b/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb
new file mode 100644
index 00000000000..95863ce3765
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batch_optimizer_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchOptimizer do
+ describe '#optimize' do
+ subject { described_class.new(migration, number_of_jobs: number_of_jobs, ema_alpha: ema_alpha).optimize! }
+
+ let(:migration) { create(:batched_background_migration, batch_size: batch_size, sub_batch_size: 100, interval: 120) }
+
+ let(:batch_size) { 10_000 }
+
+ let_it_be(:number_of_jobs) { 5 }
+ let_it_be(:ema_alpha) { 0.4 }
+
+ let_it_be(:target_efficiency) { described_class::TARGET_EFFICIENCY.max }
+
+ def mock_efficiency(eff)
+ expect(migration).to receive(:smoothed_time_efficiency).with(number_of_jobs: number_of_jobs, alpha: ema_alpha).and_return(eff)
+ end
+
+ it 'with unknown time efficiency, it keeps the batch size' do
+ mock_efficiency(nil)
+
+ expect { subject }.not_to change { migration.reload.batch_size }
+ end
+
+ it 'with a time efficiency of 95%, it keeps the batch size' do
+ mock_efficiency(0.95)
+
+ expect { subject }.not_to change { migration.reload.batch_size }
+ end
+
+ it 'with a time efficiency of 90%, it keeps the batch size' do
+ mock_efficiency(0.9)
+
+ expect { subject }.not_to change { migration.reload.batch_size }
+ end
+
+ it 'with a time efficiency of 85%, it increases the batch size' do
+ time_efficiency = 0.85
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = ((target_efficiency / time_efficiency) * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+
+ it 'with a time efficiency of 110%, it decreases the batch size' do
+ time_efficiency = 1.1
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = ((target_efficiency / time_efficiency) * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+
+ context 'reaching the upper limit for an increase' do
+ it 'caps the batch size multiplier at 20% when increasing' do
+ time_efficiency = 0.1 # this would result in a factor of 10 if not limited
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = (1.2 * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+
+ it 'does not limit the decrease multiplier' do
+ time_efficiency = 10
+
+ mock_efficiency(time_efficiency)
+
+ new_batch_size = (0.1 * batch_size).to_i
+
+ expect { subject }.to change { migration.reload.batch_size }.from(batch_size).to(new_batch_size)
+ end
+ end
+
+ context 'reaching the upper limit for the batch size' do
+ let(:batch_size) { 1_950_000 }
+
+ it 'caps the batch size at 10M' do
+ mock_efficiency(0.7)
+
+ expect { subject }.to change { migration.reload.batch_size }.to(2_000_000)
+ end
+ end
+
+ context 'reaching the lower limit for the batch size' do
+ let(:batch_size) { 1_050 }
+
+ it 'caps the batch size at 1k' do
+ mock_efficiency(1.1)
+
+ expect { subject }.to change { migration.reload.batch_size }.to(1_000)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index 1020aafcf08..78e0b7627e9 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -9,6 +9,42 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
it { is_expected.to belong_to(:batched_migration).with_foreign_key(:batched_background_migration_id) }
end
+ describe 'scopes' do
+ let_it_be(:fixed_time) { Time.new(2021, 04, 27, 10, 00, 00, 00) }
+
+ let_it_be(:pending_job) { create(:batched_background_migration_job, status: :pending, updated_at: fixed_time) }
+ let_it_be(:running_job) { create(:batched_background_migration_job, status: :running, updated_at: fixed_time) }
+ let_it_be(:stuck_job) { create(:batched_background_migration_job, status: :pending, updated_at: fixed_time - described_class::STUCK_JOBS_TIMEOUT) }
+ let_it_be(:failed_job) { create(:batched_background_migration_job, status: :failed, attempts: 1) }
+
+ before_all do
+ create(:batched_background_migration_job, status: :failed, attempts: described_class::MAX_ATTEMPTS)
+ create(:batched_background_migration_job, status: :succeeded)
+ end
+
+ before do
+ travel_to fixed_time
+ end
+
+ describe '.active' do
+ it 'returns active jobs' do
+ expect(described_class.active).to contain_exactly(pending_job, running_job, stuck_job)
+ end
+ end
+
+ describe '.stuck' do
+ it 'returns stuck jobs' do
+ expect(described_class.stuck).to contain_exactly(stuck_job)
+ end
+ end
+
+ describe '.retriable' do
+ it 'returns retriable jobs' do
+ expect(described_class.retriable).to contain_exactly(failed_job, stuck_job)
+ end
+ end
+ end
+
describe 'delegated batched_migration attributes' do
let(:batched_job) { build(:batched_background_migration_job) }
let(:batched_migration) { batched_job.batched_migration }
@@ -47,4 +83,55 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
end
end
+
+ describe '#time_efficiency' do
+ subject { job.time_efficiency }
+
+ let(:migration) { build(:batched_background_migration, interval: 120.seconds) }
+ let(:job) { build(:batched_background_migration_job, status: :succeeded, batched_migration: migration) }
+
+ context 'when job has not yet succeeded' do
+ let(:job) { build(:batched_background_migration_job, status: :running) }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when finished_at is not set' do
+ it 'returns nil' do
+ job.started_at = Time.zone.now
+
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when started_at is not set' do
+ it 'returns nil' do
+ job.finished_at = Time.zone.now
+
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when job has finished' do
+ it 'returns ratio of duration to interval, here: 0.5' do
+ freeze_time do
+ job.started_at = Time.zone.now - migration.interval / 2
+ job.finished_at = Time.zone.now
+
+ expect(subject).to eq(0.5)
+ end
+ end
+
+ it 'returns ratio of duration to interval, here: 1' do
+ freeze_time do
+ job.started_at = Time.zone.now - migration.interval
+ job.finished_at = Time.zone.now
+
+ expect(subject).to eq(1)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index 7d0e10b62c6..9f0493ab0d7 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -17,9 +17,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
it 'marks the migration as finished' do
- relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: migration.id)
+ runner.run_migration_job(migration)
- expect { runner.run_migration_job(migration) }.to change { relation.count }.by(1)
+ expect(migration.reload).to be_finished
end
end
@@ -50,6 +50,15 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
batch_size: migration.batch_size,
sub_batch_size: migration.sub_batch_size)
end
+
+ it 'optimizes the migration after executing the job' do
+ migration.update!(min_value: event1.id, max_value: event2.id)
+
+ expect(migration_wrapper).to receive(:perform).ordered
+ expect(migration).to receive(:optimize!).ordered
+
+ runner.run_migration_job(migration)
+ end
end
context 'when the batch maximum exceeds the migration maximum' do
@@ -83,7 +92,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
let!(:event3) { create(:event) }
let!(:migration) do
- create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event3.id)
+ create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event2.id)
end
let!(:previous_job) do
@@ -92,14 +101,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
min_value: event1.id,
max_value: event2.id,
batch_size: 2,
- sub_batch_size: 1)
+ sub_batch_size: 1,
+ status: :succeeded
+ )
end
let(:job_relation) do
Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
end
+ context 'when the migration has no batches remaining' do
+ it_behaves_like 'it has completed the migration'
+ end
+
context 'when the migration has batches to process' do
+ before do
+ migration.update!(max_value: event3.id)
+ end
+
it 'runs the migration job for the next batch' do
expect(migration_wrapper).to receive(:perform) do |job_record|
expect(job_record).to eq(job_relation.last)
@@ -123,17 +142,82 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
end
- context 'when the migration has no batches remaining' do
+ context 'when migration has failed jobs' do
before do
- create(:batched_background_migration_job,
- batched_migration: migration,
- min_value: event3.id,
- max_value: event3.id,
- batch_size: 2,
- sub_batch_size: 1)
+ previous_job.update!(status: :failed)
end
- it_behaves_like 'it has completed the migration'
+ it 'retries the failed job' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(previous_job)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
+ end
+
+ context 'when failed job has reached the maximum number of attempts' do
+ before do
+ previous_job.update!(attempts: Gitlab::Database::BackgroundMigration::BatchedJob::MAX_ATTEMPTS)
+ end
+
+ it 'marks the migration as failed' do
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
+
+ expect(migration).to be_failed
+ end
+ end
+ end
+
+ context 'when migration has stuck jobs' do
+ before do
+ previous_job.update!(status: :running, updated_at: 1.hour.ago - Gitlab::Database::BackgroundMigration::BatchedJob::STUCK_JOBS_TIMEOUT)
+ end
+
+ it 'retries the stuck job' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(previous_job)
+ end
+
+ expect { runner.run_migration_job(migration.reload) }.to change { job_relation.count }.by(0)
+ end
+ end
+
+ context 'when migration has possible stuck jobs' do
+ before do
+ previous_job.update!(status: :running, updated_at: 1.hour.from_now - Gitlab::Database::BackgroundMigration::BatchedJob::STUCK_JOBS_TIMEOUT)
+ end
+
+ it 'keeps the migration active' do
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(0)
+
+ expect(migration.reload).to be_active
+ end
+ end
+
+ context 'when the migration has batches to process and failed jobs' do
+ before do
+ migration.update!(max_value: event3.id)
+ previous_job.update!(status: :failed)
+ end
+
+ it 'runs next batch then retries the failed job' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.last)
+ job_record.update!(status: :succeeded)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(previous_job)
+ end
+
+ expect { runner.run_migration_job(migration.reload) }.to change { job_relation.count }.by(0)
+ end
end
end
end
@@ -180,10 +264,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
it 'runs all jobs inline until finishing the migration' do
expect(migration_wrapper).to receive(:perform) do |job_record|
expect(job_record).to eq(job_relation.first)
+ job_record.update!(status: :succeeded)
end
expect(migration_wrapper).to receive(:perform) do |job_record|
expect(job_record).to eq(job_relation.last)
+ job_record.update!(status: :succeeded)
end
expect { runner.run_entire_migration(migration) }.to change { job_relation.count }.by(2)
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 261e23d0745..43e34325419 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -119,7 +119,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '#create_batched_job!' do
- let(:batched_migration) { create(:batched_background_migration) }
+ let(:batched_migration) do
+ create(:batched_background_migration,
+ batch_size: 999,
+ sub_batch_size: 99,
+ pause_ms: 250
+ )
+ end
it 'creates a batched_job with the correct batch configuration' do
batched_job = batched_migration.create_batched_job!(1, 5)
@@ -128,7 +134,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
min_value: 1,
max_value: 5,
batch_size: batched_migration.batch_size,
- sub_batch_size: batched_migration.sub_batch_size)
+ sub_batch_size: batched_migration.sub_batch_size,
+ pause_ms: 250
+ )
end
end
@@ -196,6 +204,22 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
end
+ describe '#migrated_tuple_count' do
+ subject { batched_migration.migrated_tuple_count }
+
+ let(:batched_migration) { create(:batched_background_migration) }
+
+ before do
+ create_list(:batched_background_migration_job, 5, status: :succeeded, batch_size: 1_000, batched_migration: batched_migration)
+ create_list(:batched_background_migration_job, 1, status: :running, batch_size: 1_000, batched_migration: batched_migration)
+ create_list(:batched_background_migration_job, 1, status: :failed, batch_size: 1_000, batched_migration: batched_migration)
+ end
+
+ it 'sums the batch_size of succeeded jobs' do
+ expect(subject).to eq(5_000)
+ end
+ end
+
describe '#prometheus_labels' do
let(:batched_migration) { create(:batched_background_migration, job_class_name: 'TestMigration', table_name: 'foo', column_name: 'bar') }
@@ -208,4 +232,96 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
expect(batched_migration.prometheus_labels).to eq(labels)
end
end
+
+ describe '#smoothed_time_efficiency' do
+ let(:migration) { create(:batched_background_migration, interval: 120.seconds) }
+ let(:end_time) { Time.zone.now }
+
+ around do |example|
+ freeze_time do
+ example.run
+ end
+ end
+
+ let(:common_attrs) do
+ {
+ status: :succeeded,
+ batched_migration: migration,
+ finished_at: end_time
+ }
+ end
+
+ context 'when there are not enough jobs' do
+ subject { migration.smoothed_time_efficiency(number_of_jobs: 10) }
+
+ it 'returns nil' do
+ create_list(:batched_background_migration_job, 9, **common_attrs)
+
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when there are enough jobs' do
+ subject { migration.smoothed_time_efficiency(number_of_jobs: number_of_jobs) }
+
+ let!(:jobs) { create_list(:batched_background_migration_job, number_of_jobs, **common_attrs.merge(batched_migration: migration)) }
+ let(:number_of_jobs) { 10 }
+
+ before do
+ expect(migration).to receive_message_chain(:batched_jobs, :successful_in_execution_order, :reverse_order, :limit).with(no_args).with(no_args).with(number_of_jobs).and_return(jobs)
+ end
+
+ def mock_efficiencies(*effs)
+ effs.each_with_index do |eff, i|
+ expect(jobs[i]).to receive(:time_efficiency).and_return(eff)
+ end
+ end
+
+ context 'example 1: increasing trend, but only recently crossed threshold' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(1.1, 1, 0.95, 0.9, 0.8, 0.95, 0.9, 0.8, 0.9, 0.95)
+
+ expect(subject).to be_within(0.05).of(0.95)
+ end
+ end
+
+ context 'example 2: increasing trend, crossed threshold a while ago' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(1.2, 1.1, 1, 1, 1.1, 1, 0.95, 0.9, 0.95, 0.9)
+
+ expect(subject).to be_within(0.05).of(1.1)
+ end
+ end
+
+ context 'example 3: decreasing trend, but only recently crossed threshold' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(0.9, 0.95, 1, 1.2, 1.1, 1.2, 1.1, 1.0, 1.1, 1.0)
+
+ expect(subject).to be_within(0.05).of(1.0)
+ end
+ end
+
+ context 'example 4: latest run spiked' do
+ it 'returns the smoothed time efficiency' do
+ mock_efficiencies(1.2, 0.9, 0.8, 0.9, 0.95, 0.9, 0.92, 0.9, 0.95, 0.9)
+
+ expect(subject).to be_within(0.02).of(0.96)
+ end
+ end
+ end
+ end
+
+ describe '#optimize!' do
+ subject { batched_migration.optimize! }
+
+ let(:batched_migration) { create(:batched_background_migration) }
+ let(:optimizer) { instance_double('Gitlab::Database::BackgroundMigration::BatchOptimizer') }
+
+ it 'calls the BatchOptimizer' do
+ expect(Gitlab::Database::BackgroundMigration::BatchOptimizer).to receive(:new).with(batched_migration).and_return(optimizer)
+ expect(optimizer).to receive(:optimize!)
+
+ subject
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 00d13f23d36..c1183a15e37 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -7,9 +7,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
+ let_it_be(:pause_ms) { 250 }
let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
- let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
+ let!(:job_record) do
+ create(:batched_background_migration_job,
+ batched_migration: active_migration,
+ pause_ms: pause_ms
+ )
+ end
+
let(:job_instance) { double('job instance', batch_metrics: {}) }
before do
@@ -17,7 +24,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
it 'runs the migration job' do
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
subject
end
@@ -42,6 +49,42 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
end
+ context 'when running a job that failed previously' do
+ let!(:job_record) do
+ create(:batched_background_migration_job,
+ batched_migration: active_migration,
+ pause_ms: pause_ms,
+ attempts: 1,
+ status: :failed,
+ finished_at: 1.hour.ago,
+ metrics: { 'my_metrics' => 'some_value' }
+ )
+ end
+
+ it 'increments attempts and updates other fields' do
+ updated_metrics = { 'updated_metrics' => 'some_value' }
+
+ expect(job_instance).to receive(:perform)
+ expect(job_instance).to receive(:batch_metrics).and_return(updated_metrics)
+
+ expect(job_record).to receive(:update!).with(
+ hash_including(attempts: 2, status: :running, finished_at: nil, metrics: {})
+ ).and_call_original
+
+ freeze_time do
+ subject
+
+ job_record.reload
+
+ expect(job_record).not_to be_failed
+ expect(job_record.attempts).to eq(2)
+ expect(job_record.started_at).to eq(Time.current)
+ expect(job_record.finished_at).to eq(Time.current)
+ expect(job_record.metrics).to eq(updated_metrics)
+ end
+ end
+ end
+
context 'reporting prometheus metrics' do
let(:labels) { job_record.batched_migration.prometheus_labels }
@@ -61,12 +104,26 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
subject
end
+ it 'reports interval' do
+ expect(described_class.metrics[:gauge_interval]).to receive(:set).with(labels, job_record.batched_migration.interval)
+
+ subject
+ end
+
it 'reports updated tuples (currently based on batch_size)' do
expect(described_class.metrics[:counter_updated_tuples]).to receive(:increment).with(labels, job_record.batch_size)
subject
end
+ it 'reports migrated tuples' do
+ count = double
+ expect(job_record.batched_migration).to receive(:migrated_tuple_count).and_return(count)
+ expect(described_class.metrics[:gauge_migrated_tuples]).to receive(:set).with(labels, count)
+
+ subject
+ end
+
it 'reports summary of query timings' do
metrics = { 'timings' => { 'update_all' => [1, 2, 3, 4, 5] } }
@@ -82,14 +139,26 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
subject
end
- it 'reports time efficiency' do
+ it 'reports job duration' do
freeze_time do
expect(Time).to receive(:current).and_return(Time.zone.now - 5.seconds).ordered
- expect(Time).to receive(:current).and_return(Time.zone.now).ordered
+ allow(Time).to receive(:current).and_call_original
+
+ expect(described_class.metrics[:gauge_job_duration]).to receive(:set).with(labels, 5.seconds)
- ratio = 5 / job_record.batched_migration.interval.to_f
+ subject
+ end
+ end
- expect(described_class.metrics[:histogram_time_efficiency]).to receive(:observe).with(labels, ratio)
+ it 'reports the total tuple count for the migration' do
+ expect(described_class.metrics[:gauge_total_tuple_count]).to receive(:set).with(labels, job_record.batched_migration.total_tuple_count)
+
+ subject
+ end
+
+ it 'reports last updated at timestamp' do
+ freeze_time do
+ expect(described_class.metrics[:gauge_last_update_time]).to receive(:set).with(labels, Time.current.to_i)
subject
end
@@ -98,7 +167,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job does not raise an error' do
it 'marks the tracking record as succeeded' do
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
freeze_time do
subject
@@ -112,19 +181,24 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
context 'when the migration job raises an error' do
- it 'marks the tracking record as failed before raising the error' do
- expect(job_instance).to receive(:perform)
- .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- .and_raise(RuntimeError, 'Something broke!')
+ shared_examples 'an error is raised' do |error_class|
+ it 'marks the tracking record as failed' do
+ expect(job_instance).to receive(:perform)
+ .with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
+ .and_raise(error_class)
- freeze_time do
- expect { subject }.to raise_error(RuntimeError, 'Something broke!')
+ freeze_time do
+ expect { subject }.to raise_error(error_class)
- reloaded_job_record = job_record.reload
+ reloaded_job_record = job_record.reload
- expect(reloaded_job_record).to be_failed
- expect(reloaded_job_record.finished_at).to eq(Time.current)
+ expect(reloaded_job_record).to be_failed
+ expect(reloaded_job_record.finished_at).to eq(Time.current)
+ end
end
end
+
+ it_behaves_like 'an error is raised', RuntimeError.new('Something broke!')
+ it_behaves_like 'an error is raised', SignalException.new('SIGTERM')
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb b/spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb
new file mode 100644
index 00000000000..e11ffe53c61
--- /dev/null
+++ b/spec/lib/gitlab/database/migration_helpers/cascading_namespace_settings_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::MigrationHelpers::CascadingNamespaceSettings do
+ let(:migration) do
+ ActiveRecord::Migration.new.extend(described_class)
+ end
+
+ describe '#add_cascading_namespace_setting' do
+ it 'creates the required columns', :aggregate_failures do
+ expect(migration).to receive(:add_column).with(:namespace_settings, :some_setting, :integer, null: true, default: nil)
+ expect(migration).to receive(:add_column).with(:namespace_settings, :lock_some_setting, :boolean, null: false, default: false)
+
+ expect(migration).to receive(:add_column).with(:application_settings, :some_setting, :integer, null: false, default: 5)
+ expect(migration).to receive(:add_column).with(:application_settings, :lock_some_setting, :boolean, null: false, default: false)
+
+ migration.add_cascading_namespace_setting(:some_setting, :integer, null: false, default: 5)
+ end
+
+ context 'when columns already exist' do
+ before do
+ migration.add_column(:namespace_settings, :cascading_setting, :integer)
+ migration.add_column(:application_settings, :lock_cascading_setting, :boolean)
+ end
+
+ it 'raises an error when some columns already exist' do
+ expect do
+ migration.add_cascading_namespace_setting(:cascading_setting, :integer)
+ end.to raise_error %r/Existing columns: namespace_settings.cascading_setting, application_settings.lock_cascading_setting/
+ end
+ end
+ end
+
+ describe '#remove_cascading_namespace_setting' do
+ before do
+ allow(migration).to receive(:column_exists?).and_return(true)
+ end
+
+ it 'removes the columns', :aggregate_failures do
+ expect(migration).to receive(:remove_column).with(:namespace_settings, :some_setting)
+ expect(migration).to receive(:remove_column).with(:namespace_settings, :lock_some_setting)
+
+ expect(migration).to receive(:remove_column).with(:application_settings, :some_setting)
+ expect(migration).to receive(:remove_column).with(:application_settings, :lock_some_setting)
+
+ migration.remove_cascading_namespace_setting(:some_setting)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 44293086e79..40720628a89 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers do
include Database::TableSchemaHelpers
+ include Database::TriggerHelpers
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
@@ -834,7 +835,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'renames a column concurrently' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, :new)
expect(model).to receive(:add_column)
@@ -946,7 +947,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'reverses the operations of rename_column_concurrently' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ expect(model).to receive(:remove_rename_triggers)
.with(:users, /trigger_.{12}/)
expect(model).to receive(:remove_column).with(:users, :new)
@@ -959,7 +960,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'cleans up the renaming procedure' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ expect(model).to receive(:remove_rename_triggers)
.with(:users, /trigger_.{12}/)
expect(model).to receive(:remove_column).with(:users, :old)
@@ -999,7 +1000,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'reverses the operations of cleanup_concurrent_column_rename' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, :new)
expect(model).to receive(:add_column)
@@ -1094,7 +1095,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'reverses the operations of change_column_type_concurrently' do
expect(model).to receive(:check_trigger_permissions!).with(:users)
- expect(model).to receive(:remove_rename_triggers_for_postgresql)
+ expect(model).to receive(:remove_rename_triggers)
.with(:users, /trigger_.{12}/)
expect(model).to receive(:remove_column).with(:users, "old_for_type_change")
@@ -1159,7 +1160,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:rename_column)
.with(:users, temp_undo_cleanup_column, :old)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(:users, :old, :string)
@@ -1185,7 +1186,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:rename_column)
.with(:users, temp_undo_cleanup_column, :old)
- expect(model).to receive(:install_rename_triggers_for_postgresql)
+ expect(model).to receive(:install_rename_triggers)
.with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(
@@ -1206,8 +1207,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- describe '#install_rename_triggers_for_postgresql' do
- it 'installs the triggers for PostgreSQL' do
+ describe '#install_rename_triggers' do
+ it 'installs the triggers' do
copy_trigger = double('copy trigger')
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
@@ -1215,11 +1216,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo')
- model.install_rename_triggers_for_postgresql(:users, :old, :new, trigger_name: 'foo')
+ model.install_rename_triggers(:users, :old, :new, trigger_name: 'foo')
end
end
- describe '#remove_rename_triggers_for_postgresql' do
+ describe '#remove_rename_triggers' do
it 'removes the function and trigger' do
copy_trigger = double('copy trigger')
@@ -1228,7 +1229,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(copy_trigger).to receive(:drop).with('foo')
- model.remove_rename_triggers_for_postgresql('bar', 'foo')
+ model.remove_rename_triggers('bar', 'foo')
end
end
@@ -1702,10 +1703,16 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ describe '#convert_to_bigint_column' do
+ it 'returns the name of the temporary column used to convert to bigint' do
+ expect(model.convert_to_bigint_column(:id)).to eq('id_convert_to_bigint')
+ end
+ end
+
describe '#initialize_conversion_of_integer_to_bigint' do
let(:table) { :test_table }
let(:column) { :id }
- let(:tmp_column) { "#{column}_convert_to_bigint" }
+ let(:tmp_column) { model.convert_to_bigint_column(column) }
before do
model.create_table table, id: false do |t|
@@ -1730,12 +1737,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
- context 'when the column to convert does not exist' do
- let(:column) { :foobar }
-
+ context 'when the column to migrate does not exist' do
it 'raises an error' do
- expect { model.initialize_conversion_of_integer_to_bigint(table, column) }
- .to raise_error("Column #{column} does not exist on #{table}")
+ expect { model.initialize_conversion_of_integer_to_bigint(table, :this_column_is_not_real) }
+ .to raise_error(ArgumentError, "Column this_column_is_not_real does not exist on #{table}")
end
end
@@ -1743,7 +1748,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'creates a not-null bigint column and installs triggers' do
expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
- expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+ expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column])
model.initialize_conversion_of_integer_to_bigint(table, column)
end
@@ -1755,7 +1760,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'creates a not-null bigint column and installs triggers' do
expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
- expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+ expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column])
model.initialize_conversion_of_integer_to_bigint(table, column)
end
@@ -1767,22 +1772,83 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'creates a nullable bigint column and installs triggers' do
expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: nil)
- expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+ expect(model).to receive(:install_rename_triggers).with(table, [column], [tmp_column])
model.initialize_conversion_of_integer_to_bigint(table, column)
end
end
+
+ context 'when multiple columns are given' do
+ it 'creates the correct columns and installs the trigger' do
+ columns_to_convert = %i[id non_nullable_column nullable_column]
+ temporary_columns = columns_to_convert.map { |column| model.convert_to_bigint_column(column) }
+
+ expect(model).to receive(:add_column).with(table, temporary_columns[0], :bigint, default: 0, null: false)
+ expect(model).to receive(:add_column).with(table, temporary_columns[1], :bigint, default: 0, null: false)
+ expect(model).to receive(:add_column).with(table, temporary_columns[2], :bigint, default: nil)
+
+ expect(model).to receive(:install_rename_triggers).with(table, columns_to_convert, temporary_columns)
+
+ model.initialize_conversion_of_integer_to_bigint(table, columns_to_convert)
+ end
+ end
+ end
+
+ describe '#revert_initialize_conversion_of_integer_to_bigint' do
+ let(:table) { :test_table }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.integer :other_id
+ end
+
+ model.initialize_conversion_of_integer_to_bigint(table, columns)
+ end
+
+ context 'when single column is given' do
+ let(:columns) { :id }
+
+ it 'removes column, trigger, and function' do
+ temporary_column = model.convert_to_bigint_column(:id)
+ trigger_name = model.rename_trigger_name(table, :id, temporary_column)
+
+ model.revert_initialize_conversion_of_integer_to_bigint(table, columns)
+
+ expect(model.column_exists?(table, temporary_column)).to eq(false)
+ expect_trigger_not_to_exist(table, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+ end
+ end
+
+ context 'when multiple columns are given' do
+ let(:columns) { [:id, :other_id] }
+
+ it 'removes column, trigger, and function' do
+ temporary_columns = columns.map { |column| model.convert_to_bigint_column(column) }
+ trigger_name = model.rename_trigger_name(table, columns, temporary_columns)
+
+ model.revert_initialize_conversion_of_integer_to_bigint(table, columns)
+
+ temporary_columns.each do |column|
+ expect(model.column_exists?(table, column)).to eq(false)
+ end
+ expect_trigger_not_to_exist(table, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+ end
+ end
end
describe '#backfill_conversion_of_integer_to_bigint' do
let(:table) { :_test_backfill_table }
let(:column) { :id }
- let(:tmp_column) { "#{column}_convert_to_bigint" }
+ let(:tmp_column) { model.convert_to_bigint_column(column) }
before do
model.create_table table, id: false do |t|
t.integer :id, primary_key: true
t.text :message, null: false
+ t.integer :other_id
t.timestamps
end
@@ -1808,14 +1874,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'raises an error' do
expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
- .to raise_error("Column #{column} does not exist on #{table}")
+ .to raise_error(ArgumentError, "Column #{column} does not exist on #{table}")
end
end
context 'when the temporary column does not exist' do
it 'raises an error' do
expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
- .to raise_error('The temporary column does not exist, initialize it with `initialize_conversion_of_integer_to_bigint`')
+ .to raise_error(ArgumentError, "Column #{tmp_column} does not exist on #{table}")
end
end
@@ -1829,48 +1895,112 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.active }
before do
- model.initialize_conversion_of_integer_to_bigint(table, column)
+ model.initialize_conversion_of_integer_to_bigint(table, columns)
model_class.create!(message: 'hello')
model_class.create!(message: 'so long')
end
- it 'creates the batched migration tracking record' do
- last_record = model_class.create!(message: 'goodbye')
+ context 'when a single column is being converted' do
+ let(:columns) { column }
- expect do
- model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
- end.to change { migration_relation.count }.by(1)
-
- expect(migration_relation.last).to have_attributes(
- job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
- table_name: table.to_s,
- column_name: column.to_s,
- min_value: 1,
- max_value: last_record.id,
- interval: 120,
- batch_size: 2,
- sub_batch_size: 1,
- job_arguments: [column.to_s, "#{column}_convert_to_bigint"]
- )
+ it 'creates the batched migration tracking record' do
+ last_record = model_class.create!(message: 'goodbye')
+
+ expect do
+ model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ end.to change { migration_relation.count }.by(1)
+
+ expect(migration_relation.last).to have_attributes(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table.to_s,
+ column_name: column.to_s,
+ min_value: 1,
+ max_value: last_record.id,
+ interval: 120,
+ batch_size: 2,
+ sub_batch_size: 1,
+ job_arguments: [[column.to_s], [model.convert_to_bigint_column(column)]]
+ )
+ end
end
- context 'when the migration should be performed inline' do
- it 'calls the runner to run the entire migration' do
- expect(model).to receive(:perform_background_migration_inline?).and_return(true)
+ context 'when multiple columns are being converted' do
+ let(:other_column) { :other_id }
+ let(:other_tmp_column) { model.convert_to_bigint_column(other_column) }
+ let(:columns) { [column, other_column] }
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |scheduler|
- expect(scheduler).to receive(:run_entire_migration) do |batched_migration|
- expect(batched_migration).to eq(migration_relation.last)
- end
- end
+ it 'creates the batched migration tracking record' do
+ last_record = model_class.create!(message: 'goodbye', other_id: 50)
- model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ expect do
+ model.backfill_conversion_of_integer_to_bigint(table, columns, batch_size: 2, sub_batch_size: 1)
+ end.to change { migration_relation.count }.by(1)
+
+ expect(migration_relation.last).to have_attributes(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table.to_s,
+ column_name: column.to_s,
+ min_value: 1,
+ max_value: last_record.id,
+ interval: 120,
+ batch_size: 2,
+ sub_batch_size: 1,
+ job_arguments: [[column.to_s, other_column.to_s], [tmp_column, other_tmp_column]]
+ )
end
end
end
end
+ describe '#revert_backfill_conversion_of_integer_to_bigint' do
+ let(:table) { :_test_backfill_table }
+ let(:primary_key) { :id }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer primary_key, primary_key: true
+ t.text :message, null: false
+ t.integer :other_id
+ t.timestamps
+ end
+
+ model.initialize_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
+ model.backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
+ end
+
+ context 'when a single column is being converted' do
+ let(:columns) { :id }
+
+ it 'deletes the batched migration tracking record' do
+ expect do
+ model.revert_backfill_conversion_of_integer_to_bigint(table, columns)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1)
+ end
+ end
+
+ context 'when a multiple columns are being converted' do
+ let(:columns) { [:id, :other_id] }
+
+ it 'deletes the batched migration tracking record' do
+ expect do
+ model.revert_backfill_conversion_of_integer_to_bigint(table, columns)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1)
+ end
+ end
+
+ context 'when primary key column has custom name' do
+ let(:primary_key) { :other_pk }
+ let(:columns) { :other_id }
+
+ it 'deletes the batched migration tracking record' do
+ expect do
+ model.revert_backfill_conversion_of_integer_to_bigint(table, columns, primary_key: primary_key)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(-1)
+ end
+ end
+ end
+
describe '#index_exists_by_name?' do
it 'returns true if an index exists' do
ActiveRecord::Base.connection.execute(
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 3804dc52a77..6d047eed3bb 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
before do
instance.observe(migration) { raise 'something went wrong' }
- rescue
+ rescue StandardError
# ignore
end
diff --git a/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
new file mode 100644
index 00000000000..195e7114582
--- /dev/null
+++ b/spec/lib/gitlab/database/migrations/observers/query_log_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Migrations::Observers::QueryLog do
+ subject { described_class.new }
+
+ let(:observation) { Gitlab::Database::Migrations::Observation.new(migration) }
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:query) { 'select 1' }
+ let(:directory_path) { Dir.mktmpdir }
+ let(:log_file) { "#{directory_path}/current.log" }
+ let(:migration) { 20210422152437 }
+
+ before do
+ stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory_path)
+ end
+
+ after do
+ FileUtils.remove_entry(directory_path)
+ end
+
+ it 'writes a file with the query log' do
+ observe
+
+ expect(File.read("#{directory_path}/#{migration}.log")).to include(query)
+ end
+
+ it 'does not change the default logger' do
+ expect { observe }.not_to change { ActiveRecord::Base.logger }
+ end
+
+ def observe
+ subject.before
+ connection.execute(query)
+ subject.after
+ subject.record(observation)
+ end
+end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index 93dbd9d7c30..83f2436043c 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
end
let_it_be(:connection) { ActiveRecord::Base.connection }
+
let(:referenced_table) { :issues }
let(:function_name) { '_test_partitioned_foreign_keys_function' }
let(:trigger_name) { '_test_partitioned_foreign_keys_trigger' }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
index 603f3dc41af..c3edc3a0c87 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/index_helpers_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::IndexHelpers do
def expect_add_concurrent_index_and_call_original(table, column, index)
expect(migration).to receive(:add_concurrent_index).ordered.with(table, column, name: index)
- .and_wrap_original { |_, table, column, options| connection.add_index(table, column, options) }
+ .and_wrap_original { |_, table, column, options| connection.add_index(table, column, **options) }
end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 5b2a29d1d2d..79ddb450d7a 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
let_it_be(:connection) { ActiveRecord::Base.connection }
+
let(:source_table) { :_test_original_table }
let(:partitioned_table) { '_test_migration_partitioned_table' }
let(:function_name) { '_test_migration_function_name' }
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
index 51fc7c6620b..d9077969003 100644
--- a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
end
it 'replaces the existing index with an identical index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').twice
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
expect_to_execute_concurrently_in_order(create_index)
@@ -123,6 +123,10 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(replacement_name, index.name)
expect_index_rename(replaced_name, replacement_name)
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
expect_to_execute_concurrently_in_order(drop_index)
subject.perform
@@ -136,7 +140,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
end
it 'rebuilds table statistics before dropping the original index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').twice
+ expect(connection).to receive(:execute).with('SET statement_timeout TO \'32400s\'')
expect_to_execute_concurrently_in_order(create_index)
@@ -152,7 +156,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(replacement_name, index.name)
expect_index_rename(replaced_name, replacement_name)
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
subject.perform
@@ -166,9 +170,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
end
it 'replaces the existing index with an identical index' do
- expect(connection).to receive(:execute).with('SET statement_timeout TO \'21600s\'').exactly(3).times
-
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
expect_to_execute_concurrently_in_order(create_index)
expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
@@ -179,7 +181,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(replacement_name, index.name)
expect_index_rename(replaced_name, replacement_name)
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
subject.perform
@@ -192,6 +194,10 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect(connection).to receive(:execute).with(create_index).ordered
.and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
expect_to_execute_concurrently_in_order(drop_index)
expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
@@ -207,6 +213,10 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_to_execute_concurrently_in_order(create_index)
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
expect_to_execute_concurrently_in_order(drop_index)
expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
@@ -228,7 +238,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
@@ -245,7 +255,7 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
.and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
end
- expect_to_execute_concurrently_in_order(drop_index)
+ expect_index_drop(drop_index)
expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
@@ -270,6 +280,14 @@ RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
SQL
end
+ def expect_index_drop(drop_index)
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: false).and_yield
+ end
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ end
+
def find_index_create_statement
ActiveRecord::Base.connection.select_value(<<~SQL)
SELECT indexdef
diff --git a/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb
new file mode 100644
index 00000000000..8c0c4155ccc
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaCacheWithRenamedTable do
+ let(:old_model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'projects'
+ end
+ end
+
+ let(:new_model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'projects_new'
+ end
+ end
+
+ before do
+ stub_const('Gitlab::Database::TABLES_TO_BE_RENAMED', { 'projects' => 'projects_new' })
+ end
+
+ context 'when table is not renamed yet' do
+ before do
+ old_model.reset_column_information
+ ActiveRecord::Base.connection.schema_cache.clear!
+ end
+
+ it 'uses the original table to look up metadata' do
+ expect(old_model.primary_key).to eq('id')
+ end
+ end
+
+ context 'when table is renamed' do
+ before do
+ ActiveRecord::Base.connection.execute("ALTER TABLE projects RENAME TO projects_new")
+ ActiveRecord::Base.connection.execute("CREATE VIEW projects AS SELECT * FROM projects_new")
+
+ old_model.reset_column_information
+ ActiveRecord::Base.connection.schema_cache.clear!
+ end
+
+ it 'uses the renamed table to look up metadata' do
+ expect(old_model.primary_key).to eq('id')
+ end
+
+ it 'has primary key' do
+ expect(old_model.primary_key).to eq('id')
+ expect(old_model.primary_key).to eq(new_model.primary_key)
+ end
+
+ it 'has the same column definitions' do
+ expect(old_model.columns).to eq(new_model.columns)
+ end
+
+ it 'has the same indexes' do
+ indexes_for_old_table = ActiveRecord::Base.connection.schema_cache.indexes('projects')
+ indexes_for_new_table = ActiveRecord::Base.connection.schema_cache.indexes('projects_new')
+
+ expect(indexes_for_old_table).to eq(indexes_for_new_table)
+ end
+
+ it 'has the same column_hash' do
+ columns_hash_for_old_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects')
+ columns_hash_for_new_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects_new')
+
+ expect(columns_hash_for_old_table).to eq(columns_hash_for_new_table)
+ end
+
+ describe 'when the table behind a model is actually a view' do
+ let(:group) { create(:group) }
+ let(:project_attributes) { attributes_for(:project, namespace_id: group.id).except(:creator) }
+ let(:record) { old_model.create!(project_attributes) }
+
+ it 'can persist records' do
+ expect(record.reload.attributes).to eq(new_model.find(record.id).attributes)
+ end
+
+ it 'can find records' do
+ expect(old_model.find_by_id(record.id)).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
new file mode 100644
index 00000000000..e93d8ab590d
--- /dev/null
+++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
@@ -0,0 +1,244 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
+ let(:env) { {} }
+ let(:logger) { Gitlab::Database::WithLockRetries::NULL_LOGGER }
+ let(:subject) { described_class.new(env: env, logger: logger, timing_configuration: timing_configuration) }
+
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second],
+ [1.second, 1.second]
+ ]
+ end
+
+ describe '#run' do
+ it 'requires block' do
+ expect { subject.run }.to raise_error(StandardError, 'no block given')
+ end
+
+ context 'when DISABLE_LOCK_RETRIES is set' do
+ let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
+
+ it 'executes the passed block without retrying' do
+ object = double
+
+ expect(object).to receive(:method).once
+
+ subject.run { object.method }
+ end
+ end
+
+ context 'when lock retry is enabled' do
+ let(:lock_fiber) do
+ Fiber.new do
+ # Initiating a second DB connection for the lock
+ conn = ActiveRecordSecond.establish_connection(Rails.configuration.database_configuration[Rails.env]).connection
+ conn.transaction do
+ conn.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+
+ Fiber.yield
+ end
+ ActiveRecordSecond.remove_connection # force disconnect
+ end
+ end
+
+ before do
+ stub_const('ActiveRecordSecond', Class.new(ActiveRecord::Base))
+
+ lock_fiber.resume # start the transaction and lock the table
+ end
+
+ after do
+ lock_fiber.resume if lock_fiber.alive?
+ end
+
+ context 'lock_fiber' do
+ it 'acquires lock successfully' do
+ check_exclusive_lock_query = """
+ SELECT 1
+ FROM pg_locks l
+ JOIN pg_class t ON l.relation = t.oid
+ WHERE t.relkind = 'r' AND l.mode = 'ExclusiveLock' AND t.relname = '#{Project.table_name}'
+ """
+
+ expect(ActiveRecord::Base.connection.execute(check_exclusive_lock_query).to_a).to be_present
+ end
+ end
+
+ shared_examples 'retriable exclusive lock on `projects`' do
+ it 'succeeds executing the given block' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ # the actual number of attempts to run_block_with_lock_timeout can never exceed the number of
+ # timings_configurations, so here we limit the retry_count if it exceeds that value
+ #
+ # also, there is no call to sleep after the final attempt, which is why it will always be one less
+ expected_runs_with_timeout = [retry_count, timing_configuration.size].min
+ expect(subject).to receive(:sleep).exactly(expected_runs_with_timeout - 1).times
+
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
+ lock_fiber.resume if lock_attempts == retry_count
+
+ method.call
+ end
+
+ subject.run do
+ lock_attempts += 1
+
+ if lock_attempts == retry_count # we reached the last retry iteration, if we kill the thread, the last try (no lock_timeout) will succeed
+ lock_fiber.resume
+ end
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+
+ expect(lock_attempts).to eq(retry_count)
+ expect(lock_acquired).to eq(true)
+ end
+ end
+
+ context 'after 3 iterations' do
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ let(:retry_count) { 4 }
+ end
+
+ context 'setting the idle transaction timeout' do
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the idle transaction timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once
+
+ expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the idle transaction timeout so the code can sleep and retry' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+
+ n = 0
+ allow(subject).to receive(:run_block_with_lock_timeout).twice do
+ n += 1
+ raise(ActiveRecord::LockWaitTimeout) if n == 1
+ end
+
+ expect(subject).to receive(:disable_idle_in_transaction_timeout).once
+
+ subject.run {}
+ end
+ end
+ end
+ end
+
+ context 'after the retries are exhausted' do
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second]
+ ]
+ end
+
+ it 'disables the lock_timeout' do
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'after the retries, without setting lock_timeout' do
+ let(:retry_count) { timing_configuration.size + 1 }
+
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ before do
+ expect(subject).to receive(:run_block_without_lock_timeout).and_call_original
+ end
+ end
+ end
+
+ context 'after the retries, when requested to raise an error' do
+ let(:expected_attempts_with_timeout) { timing_configuration.size }
+ let(:retry_count) { timing_configuration.size + 1 }
+
+ it 'raises an error instead of waiting indefinitely for the lock' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ expect(subject).to receive(:sleep).exactly(expected_attempts_with_timeout - 1).times
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_attempts_with_timeout).times.and_call_original
+
+ expect do
+ subject.run(raise_on_exhaustion: true) do
+ lock_attempts += 1
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+ end.to raise_error(described_class::AttemptsExhaustedError)
+
+ expect(lock_attempts).to eq(retry_count - 1)
+ expect(lock_acquired).to eq(false)
+ end
+ end
+
+ context 'when statement timeout is reached' do
+ it 'raises StatementInvalid error' do
+ lock_acquired = false
+ ActiveRecord::Base.connection.execute("SET statement_timeout='100ms'")
+
+ expect do
+ subject.run do
+ ActiveRecord::Base.connection.execute("SELECT 1 FROM pg_sleep(0.11)") # 110ms
+ lock_acquired = true
+ end
+ end.to raise_error(ActiveRecord::StatementInvalid)
+
+ expect(lock_acquired).to eq(false)
+ end
+ end
+ end
+ end
+
+ context 'restore local database variables' do
+ it do
+ expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW lock_timeout").to_a }
+ end
+
+ it do
+ expect { subject.run {} }.not_to change { ActiveRecord::Base.connection.execute("SHOW idle_in_transaction_session_timeout").to_a }
+ end
+ end
+
+ context 'casting durations correctly' do
+ let(:timing_configuration) { [[0.015.seconds, 0.025.seconds], [0.015.seconds, 0.025.seconds]] } # 15ms, 25ms
+
+ it 'executes `SET lock_timeout` using the configured timeout value in milliseconds' do
+ expect(ActiveRecord::Base.connection).to receive(:execute).with('RESET idle_in_transaction_session_timeout; RESET lock_timeout').and_call_original
+ expect(ActiveRecord::Base.connection).to receive(:execute).with("SET lock_timeout TO '15ms'").and_call_original
+
+ subject.run { }
+ end
+
+ it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
+ expect(subject).to receive(:run_block_with_lock_timeout).and_raise(ActiveRecord::LockWaitTimeout).twice
+ expect(subject).to receive(:sleep).with(0.025)
+
+ subject.run { }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 563399ff0d9..b08f39fc92a 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -76,14 +76,14 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_attempts = 0
lock_acquired = false
- # the actual number of attempts to run_block_with_transaction can never exceed the number of
+ # the actual number of attempts to run_block_with_lock_timeout can never exceed the number of
# timings_configurations, so here we limit the retry_count if it exceeds that value
#
# also, there is no call to sleep after the final attempt, which is why it will always be one less
expected_runs_with_timeout = [retry_count, timing_configuration.size].min
expect(subject).to receive(:sleep).exactly(expected_runs_with_timeout - 1).times
- expect(subject).to receive(:run_block_with_transaction).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
lock_fiber.resume if lock_attempts == retry_count
method.call
@@ -116,8 +116,8 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the idle transaction timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
- allow(subject).to receive(:run_block_with_transaction).once
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once
expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
@@ -130,7 +130,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
n = 0
- allow(subject).to receive(:run_block_with_transaction).twice do
+ allow(subject).to receive(:run_block_with_lock_timeout).twice do
n += 1
raise(ActiveRecord::LockWaitTimeout) if n == 1
end
@@ -153,7 +153,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
it 'does not disable the lock_timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).not_to receive(:disable_lock_timeout)
@@ -164,7 +164,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
it 'disables the lock_timeout' do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
- allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_lock_timeout).once.and_raise(ActiveRecord::LockWaitTimeout)
expect(subject).to receive(:disable_lock_timeout)
@@ -192,7 +192,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_acquired = false
expect(subject).to receive(:sleep).exactly(expected_attempts_with_timeout - 1).times
- expect(subject).to receive(:run_block_with_transaction).exactly(expected_attempts_with_timeout).times.and_call_original
+ expect(subject).to receive(:run_block_with_lock_timeout).exactly(expected_attempts_with_timeout).times.and_call_original
expect do
subject.run(raise_on_exhaustion: true) do
@@ -251,7 +251,7 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
- expect(subject).to receive(:run_block_with_transaction).and_raise(ActiveRecord::LockWaitTimeout).twice
+ expect(subject).to receive(:run_block_with_lock_timeout).and_raise(ActiveRecord::LockWaitTimeout).twice
expect(subject).to receive(:sleep).with(0.025)
subject.run { }
diff --git a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
index 417bf3e363a..28291508ac0 100644
--- a/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
+++ b/spec/lib/gitlab/database_importers/self_monitoring/project/create_service_spec.rb
@@ -137,11 +137,11 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
it 'creates a Prometheus service' do
expect(result[:status]).to eq(:success)
- services = result[:project].reload.services
+ integrations = result[:project].reload.integrations
- expect(services.count).to eq(1)
+ expect(integrations.count).to eq(1)
# Ensures PrometheusService#self_monitoring_project? is true
- expect(services.first.allow_local_api_url?).to be_truthy
+ expect(integrations.first.allow_local_api_url?).to be_truthy
end
it 'creates an environment for the project' do
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index b735ac7940b..663c8d69328 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -15,9 +15,29 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.default_pool_size' do
+ before do
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(7)
+ end
+
+ it 'returns the max thread size plus a fixed headroom of 10' do
+ expect(described_class.default_pool_size).to eq(17)
+ end
+
+ it 'returns the max thread size plus a DB_POOL_HEADROOM if this env var is present' do
+ stub_env('DB_POOL_HEADROOM', '7')
+
+ expect(described_class.default_pool_size).to eq(14)
+ end
+ end
+
describe '.config' do
- it 'returns a Hash' do
- expect(described_class.config).to be_an_instance_of(Hash)
+ it 'returns a HashWithIndifferentAccess' do
+ expect(described_class.config).to be_an_instance_of(HashWithIndifferentAccess)
+ end
+
+ it 'returns a default pool size' do
+ expect(described_class.config).to include(pool: described_class.default_pool_size)
end
end
@@ -176,7 +196,7 @@ RSpec.describe Gitlab::Database do
closed_pool = pool
- raise error.new('boom')
+ raise error, 'boom'
end
rescue error
end
@@ -395,13 +415,13 @@ RSpec.describe Gitlab::Database do
allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
end
- it 'detects a read only database' do
+ it 'detects a read-only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "t" }])
expect(described_class.db_read_only?).to be_truthy
end
- it 'detects a read only database' do
+ it 'detects a read-only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => true }])
expect(described_class.db_read_only?).to be_truthy
diff --git a/spec/lib/gitlab/default_branch_spec.rb b/spec/lib/gitlab/default_branch_spec.rb
new file mode 100644
index 00000000000..b066815612c
--- /dev/null
+++ b/spec/lib/gitlab/default_branch_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# We disabled main_branch_over_master feature for tests
+# In order to have consistent branch usages
+# When we migrate the branch name to main, we can enable it
+RSpec.describe Gitlab::DefaultBranch do
+ context 'main_branch_over_master is enabled' do
+ before do
+ stub_feature_flags(main_branch_over_master: true)
+ end
+
+ it 'returns main' do
+ expect(described_class.value).to eq('main')
+ end
+ end
+
+ context 'main_branch_over_master is disabled' do
+ it 'returns master' do
+ expect(described_class.value).to eq('master')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index 32ca6e4fde6..94d3b2ad0b3 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::Diff::Highlight do
include RepoHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:commit) { project.commit(sample_commit.id) }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
@@ -156,5 +157,34 @@ RSpec.describe Gitlab::Diff::Highlight do
it_behaves_like 'without inline diffs'
end
end
+
+ context 'when blob is too large' do
+ let(:subject) { described_class.new(diff_file, repository: project.repository).highlight }
+
+ before do
+ allow(Gitlab::Highlight).to receive(:too_large?).and_return(true)
+ end
+
+ it 'blobs are highlighted as plain text without loading all data' do
+ expect(diff_file.blob).not_to receive(:load_all_data!)
+
+ expect(subject[2].rich_text).to eq(%Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n})
+ expect(subject[2].rich_text).to be_html_safe
+ end
+
+ context 'when limited_diff_highlighting is disabled' do
+ before do
+ stub_feature_flags(limited_diff_highlighting: false)
+ stub_feature_flags(diff_line_syntax_highlighting: false)
+ end
+
+ it 'blobs are highlighted as plain text with loading all data' do
+ expect(diff_file.blob).to receive(:load_all_data!).twice
+
+ code = %Q{ <span id="LC7" class="line" lang=""> def popen(cmd, path=nil)</span>\n}
+ expect(subject[2].rich_text).to eq(code)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index 7436765e8ee..6d26b3e1064 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -90,11 +90,6 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
context 'when quick actions are present' do
let(:label) { create(:label, project: project, title: 'label1') }
let(:milestone) { create(:milestone, project: project) }
- let!(:user) { create(:user, username: 'user1') }
-
- before do
- project.add_developer(user)
- end
it 'applies quick action commands present on templates' do
file_content = %(Text from template \n/label ~#{label.title} \n/milestone %"#{milestone.name}"")
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
new file mode 100644
index 00000000000..42d84b3e4de
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do
+ let_it_be(:group) { build(:group) }
+
+ let(:series) { 0 }
+ let(:test_class) { Gitlab::Email::Message::InProductMarketing::Create }
+
+ describe 'initialize' do
+ subject { test_class.new(group: group, series: series) }
+
+ context 'when series does not exist' do
+ let(:series) { 3 }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when series exists' do
+ let(:series) { 0 }
+
+ it 'does not raise error' do
+ expect { subject }.not_to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ describe '#logo_path' do
+ subject { test_class.new(group: group, series: series).logo_path }
+
+ it { is_expected.to eq('mailers/in_product_marketing/create-0.png') }
+ end
+
+ describe '#unsubscribe' do
+ subject { test_class.new(group: group, series: series).unsubscribe }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include('%tag_unsubscribe_url%') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include(Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+
+ describe '#cta_link' do
+ subject(:cta_link) { test_class.new(group: group, series: series).cta_link }
+
+ it 'renders link' do
+ expect(CGI.unescapeHTML(cta_link)).to include(Gitlab::Routing.url_helpers.group_email_campaigns_url(group, track: :create, series: series))
+ end
+ end
+
+ describe '#progress' do
+ subject { test_class.new(group: group, series: series).progress }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it { is_expected.to include('This is email 1 of 3 in the Create series') }
+ end
+
+ context 'not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it { is_expected.to include('This is email 1 of 3 in the Create series', Gitlab::Routing.url_helpers.profile_notifications_url) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
new file mode 100644
index 00000000000..be8a33b18bd
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Create do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ where(series: [0, 1, 2])
+
+ with_them do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
new file mode 100644
index 00000000000..6251128f560
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ where(series: [0, 1])
+
+ with_them do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+
+ context 'with series 2' do
+ let(:series) { 2 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_nil
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
new file mode 100644
index 00000000000..2c435490765
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ where(series: [0, 1, 2])
+
+ with_them do
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
new file mode 100644
index 00000000000..73252c0dbdf
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing::Verify do
+ let_it_be(:group) { build(:group) }
+
+ subject(:message) { described_class.new(group: group, series: series)}
+
+ describe "public methods" do
+ context 'with series 0' do
+ let(:series) { 0 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_nil
+ expect(message.cta_text).to be_present
+ end
+ end
+
+ context 'with series 1' do
+ let(:series) { 1 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_present
+ expect(message.cta_text).to be_present
+ end
+ end
+
+ context 'with series 2' do
+ let(:series) { 2 }
+
+ it 'returns value for series', :aggregate_failures do
+ expect(message.subject_line).to be_present
+ expect(message.tagline).to be_present
+ expect(message.title).to be_present
+ expect(message.subtitle).to be_present
+ expect(message.body_line1).to be_present
+ expect(message.body_line2).to be_nil
+ expect(message.cta_text).to be_present
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
new file mode 100644
index 00000000000..9ffc4a340a3
--- /dev/null
+++ b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Email::Message::InProductMarketing do
+ describe '.for' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.for(track) }
+
+ context 'when track exists' do
+ where(:track, :expected_class) do
+ :create | described_class::Create
+ :verify | described_class::Verify
+ :trial | described_class::Trial
+ :team | described_class::Team
+ end
+
+ with_them do
+ it { is_expected.to eq(expected_class) }
+ end
+ end
+
+ context 'when track does not exist' do
+ let(:track) { :non_existent }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(described_class::UnknownTrackError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/receiver_spec.rb b/spec/lib/gitlab/email/receiver_spec.rb
index ccff902d290..9b05c12ef57 100644
--- a/spec/lib/gitlab/email/receiver_spec.rb
+++ b/spec/lib/gitlab/email/receiver_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Gitlab::Email::Receiver do
context 'when the email contains a valid email address in a header' do
let(:handler) { double(:handler) }
+ let(:metadata) { receiver.mail_metadata }
before do
allow(handler).to receive(:execute)
@@ -22,24 +23,38 @@ RSpec.describe Gitlab::Email::Receiver do
allow(handler).to receive(:metrics_event)
stub_incoming_email_setting(enabled: true, address: "incoming+%{key}@appmail.example.com")
+
+ expect(receiver.mail_metadata.keys).to match_array(%i(mail_uid from_address to_address mail_key references delivered_to envelope_to x_envelope_to))
end
context 'when in a Delivered-To header' do
let(:email_raw) { fixture_file('emails/forwarded_new_issue.eml') }
it_behaves_like 'correctly finds the mail key'
+
+ it 'parses the metadata' do
+ expect(metadata[:delivered_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com", "support@example.com"])
+ end
end
context 'when in an Envelope-To header' do
let(:email_raw) { fixture_file('emails/envelope_to_header.eml') }
it_behaves_like 'correctly finds the mail key'
+
+ it 'parses the metadata' do
+ expect(metadata[:envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
+ end
end
context 'when in an X-Envelope-To header' do
let(:email_raw) { fixture_file('emails/x_envelope_to_header.eml') }
it_behaves_like 'correctly finds the mail key'
+
+ it 'parses the metadata' do
+ expect(metadata[:x_envelope_to]). to eq(["incoming+gitlabhq/gitlabhq+auth_token@appmail.example.com"])
+ end
end
context 'when enclosed with angle brackets in an Envelope-To header' do
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 0ea974921bc..cf0d1577314 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -216,4 +216,63 @@ RSpec.describe Gitlab::EncodingHelper do
expect(test).not_to eq(io_stream)
end
end
+
+ describe '#detect_encoding' do
+ subject { ext_class.detect_encoding(data, **kwargs) }
+
+ let(:data) { binary_string }
+ let(:kwargs) { {} }
+
+ shared_examples 'detects encoding' do
+ it { is_expected.to be_a(Hash) }
+
+ it 'correctly detects the binary' do
+ expect(subject[:type]).to eq(:binary)
+ end
+
+ context 'data is nil' do
+ let(:data) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'limit is provided' do
+ let(:kwargs) do
+ { limit: 10 }
+ end
+
+ it 'correctly detects the binary' do
+ expect(subject[:type]).to eq(:binary)
+ end
+ end
+ end
+
+ context 'cached_encoding_detection is enabled' do
+ before do
+ stub_feature_flags(cached_encoding_detection: true)
+ end
+
+ it_behaves_like 'detects encoding'
+
+ context 'cache_key is provided' do
+ let(:kwargs) do
+ { cache_key: %w(foo bar) }
+ end
+
+ it 'uses that cache_key to serve from the cache' do
+ expect(Rails.cache).to receive(:fetch).with([:detect_binary, CharlockHolmes::VERSION, %w(foo bar)], expires_in: 1.week).and_call_original
+
+ expect(subject[:type]).to eq(:binary)
+ end
+ end
+ end
+
+ context 'cached_encoding_detection is disabled' do
+ before do
+ stub_feature_flags(cached_encoding_detection: false)
+ end
+
+ it_behaves_like 'detects encoding'
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
index 24f5299d357..584eadb24a7 100644
--- a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
- shared_examples 'processing an exception' do
+ describe '.call' do
+ let(:event) { Raven::Event.new(payload) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
before do
allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
allow(generator).to receive(:generate).and_return(
@@ -37,30 +40,4 @@ RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] })
end
end
-
- describe '.call' do
- let(:event) { Raven::Event.new(payload) }
- let(:result_hash) { described_class.call(event).to_hash }
-
- it_behaves_like 'processing an exception'
-
- context 'when followed by #process' do
- let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
-
- it_behaves_like 'processing an exception'
- end
- end
-
- describe '#process' do
- let(:event) { Raven::Event.new(payload) }
- let(:result_hash) { described_class.new.process(event.to_hash) }
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- it_behaves_like 'processing an exception'
- end
- end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 4808fdf2f06..727b603feda 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -3,7 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
- shared_examples 'processing an exception' do
+ describe '.call' do
+ let(:event) { Raven::Event.from_exception(exception, data) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
context 'when there is no GRPC exception' do
let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
@@ -56,30 +59,4 @@ RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
end
end
end
-
- describe '.call' do
- let(:event) { Raven::Event.from_exception(exception, data) }
- let(:result_hash) { described_class.call(event).to_hash }
-
- it_behaves_like 'processing an exception'
-
- context 'when followed by #process' do
- let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
-
- it_behaves_like 'processing an exception'
- end
- end
-
- describe '#process' do
- let(:event) { Raven::Event.from_exception(exception, data) }
- let(:result_hash) { described_class.new.process(event.to_hash) }
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- it_behaves_like 'processing an exception'
- end
- end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index 20fd5d085a9..c8a362fcf05 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -94,7 +94,10 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
end
- shared_examples 'processing an exception' do
+ describe '.call' do
+ let(:event) { Raven::Event.new(wrapped_value) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
context 'when there is Sidekiq data' do
let(:wrapped_value) { { extra: { sidekiq: value } } }
@@ -168,30 +171,4 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
end
end
-
- describe '.call' do
- let(:event) { Raven::Event.new(wrapped_value) }
- let(:result_hash) { described_class.call(event).to_hash }
-
- it_behaves_like 'processing an exception'
-
- context 'when followed by #process' do
- let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
-
- it_behaves_like 'processing an exception'
- end
- end
-
- describe '#process' do
- let(:event) { Raven::Event.new(wrapped_value) }
- let(:result_hash) { described_class.new.process(event.to_hash) }
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- it_behaves_like 'processing an exception'
- end
- end
end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 2e67a9f0874..7ad1f52780a 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -217,7 +217,7 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
- shared_examples 'event processors' do
+ context 'event processors' do
subject(:track_exception) { described_class.track_exception(exception, extra) }
before do
@@ -312,20 +312,4 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
end
-
- context 'with sentry_processors_before_send enabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: true)
- end
-
- include_examples 'event processors'
- end
-
- context 'with sentry_processors_before_send disabled' do
- before do
- stub_feature_flags(sentry_processors_before_send: false)
- end
-
- include_examples 'event processors'
- end
end
diff --git a/spec/lib/gitlab/etag_caching/router_spec.rb b/spec/lib/gitlab/etag_caching/router_spec.rb
index c748ee00721..ce728c41f48 100644
--- a/spec/lib/gitlab/etag_caching/router_spec.rb
+++ b/spec/lib/gitlab/etag_caching/router_spec.rb
@@ -22,6 +22,14 @@ RSpec.describe Gitlab::EtagCaching::Router do
expect(result.name).to eq 'pipelines_graph'
expect(result.router).to eq Gitlab::EtagCaching::Router::Graphql
end
+
+ it 'matches pipeline sha endpoint' do
+ result = match_route('/api/graphql', 'pipelines/sha/4asd12lla2jiwjdqw9as32glm8is8hiu8s2c5jsw')
+
+ expect(result).to be_present
+ expect(result.name).to eq 'ci_editor'
+ expect(result.router).to eq Gitlab::EtagCaching::Router::Graphql
+ end
end
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
index 3678aeb18b0..5419a01ea3e 100644
--- a/spec/lib/gitlab/experimentation/controller_concern_spec.rb
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -19,12 +19,15 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
}
)
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(is_gitlab_com)
+
Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
end
let(:enabled_percentage) { 10 }
let(:rollout_strategy) { nil }
+ let(:is_gitlab_com) { true }
controller(ApplicationController) do
include Gitlab::Experimentation::ControllerConcern
@@ -37,17 +40,17 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
describe '#set_experimentation_subject_id_cookie' do
let(:do_not_track) { nil }
let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
+ let(:cookie_value) { nil }
before do
request.headers['DNT'] = do_not_track if do_not_track.present?
+ request.cookies[:experimentation_subject_id] = cookie_value if cookie_value
get :index
end
context 'cookie is present' do
- before do
- cookies[:experimentation_subject_id] = 'test'
- end
+ let(:cookie_value) { 'test' }
it 'does not change the cookie' do
expect(cookies[:experimentation_subject_id]).to eq 'test'
@@ -75,6 +78,24 @@ RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
end
end
end
+
+ context 'when not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ context 'when cookie was set' do
+ let(:cookie_value) { 'test' }
+
+ it 'cookie gets deleted' do
+ expect(cookie).not_to be_present
+ end
+ end
+
+ context 'when no cookie was set before' do
+ it 'does nothing' do
+ expect(cookie).not_to be_present
+ end
+ end
+ end
end
describe '#push_frontend_experiment' do
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 5fef14bd2a0..10bfa9e8d0e 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -7,7 +7,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
it 'temporarily ensures we know what experiments exist for backwards compatibility' do
expected_experiment_keys = [
- :invite_members_version_b,
:invite_members_empty_group_version_a,
:contact_sales_btn_in_app
]
diff --git a/spec/lib/gitlab/external_authorization/access_spec.rb b/spec/lib/gitlab/external_authorization/access_spec.rb
index a6773cc19e1..812ef2b54e9 100644
--- a/spec/lib/gitlab/external_authorization/access_spec.rb
+++ b/spec/lib/gitlab/external_authorization/access_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
context 'when the request fails' do
before do
allow(fake_client).to receive(:request_access) do
- raise ::Gitlab::ExternalAuthorization::RequestFailed.new('Service unavailable')
+ raise ::Gitlab::ExternalAuthorization::RequestFailed, 'Service unavailable'
end
end
diff --git a/spec/lib/gitlab/external_authorization/client_spec.rb b/spec/lib/gitlab/external_authorization/client_spec.rb
index c08da382486..b907b0bb262 100644
--- a/spec/lib/gitlab/external_authorization/client_spec.rb
+++ b/spec/lib/gitlab/external_authorization/client_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
end
it 'wraps exceptions if the request fails' do
- expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError.new('the request broke') }
+ expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError, 'the request broke' }
expect { client.request_access }
.to raise_error(::Gitlab::ExternalAuthorization::RequestFailed)
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index 67d7b37dd45..495cb16ebab 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -53,7 +53,10 @@ RSpec.describe Gitlab::Git::Blame, :seed_helper do
end
it 'converts to UTF-8' do
- expect(CharlockHolmes::EncodingDetector).to receive(:detect).and_return(nil)
+ expect_next_instance_of(CharlockHolmes::EncodingDetector) do |detector|
+ expect(detector).to receive(:detect).and_return(nil)
+ end
+
data = []
blame.each do |commit, line|
data << {
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 9271f635b14..3cc52863976 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -44,6 +44,16 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
end
end
+ describe "#cache_key" do
+ subject { repository.branches.first }
+
+ it "returns a cache key that changes based on changeable values" do
+ digest = Digest::SHA1.hexdigest([subject.name, subject.target, subject.dereferenced_target.sha].join(":"))
+
+ expect(subject.cache_key).to eq("branch:#{digest}")
+ end
+ end
+
describe '#size' do
subject { super().size }
diff --git a/spec/lib/gitlab/git/lfs_changes_spec.rb b/spec/lib/gitlab/git/lfs_changes_spec.rb
index 286c5b98771..416a180ec8b 100644
--- a/spec/lib/gitlab/git/lfs_changes_spec.rb
+++ b/spec/lib/gitlab/git/lfs_changes_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Git::LfsChanges do
let_it_be(:project) { create(:project, :repository) }
+
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:blob_object_id) { '0c304a93cb8430108629bbbcaa27db3343299bc0' }
diff --git a/spec/lib/gitlab/git/merge_base_spec.rb b/spec/lib/gitlab/git/merge_base_spec.rb
index 1410e44a220..fda2232c2c3 100644
--- a/spec/lib/gitlab/git/merge_base_spec.rb
+++ b/spec/lib/gitlab/git/merge_base_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Git::MergeBase do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
subject(:merge_base) { described_class.new(repository, refs) }
diff --git a/spec/lib/gitlab/git/push_spec.rb b/spec/lib/gitlab/git/push_spec.rb
index 68cef558f6f..a0a4a22699b 100644
--- a/spec/lib/gitlab/git/push_spec.rb
+++ b/spec/lib/gitlab/git/push_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Git::Push do
let_it_be(:project) { create(:project, :repository) }
+
let(:oldrev) { project.commit('HEAD~2').id }
let(:newrev) { project.commit.id }
let(:ref) { 'refs/heads/some-branch' }
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 1e259c9c153..1ddbdda12b5 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -604,29 +604,29 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.to receive(:find_remote_root_ref).and_call_original
- expect(repository.find_remote_root_ref('origin')).to eq 'master'
+ expect(repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to eq 'master'
end
it 'returns UTF-8' do
- expect(repository.find_remote_root_ref('origin')).to be_utf8
+ expect(repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL)).to be_utf8
end
it 'returns nil when remote name is nil' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.not_to receive(:find_remote_root_ref)
- expect(repository.find_remote_root_ref(nil)).to be_nil
+ expect(repository.find_remote_root_ref(nil, nil)).to be_nil
end
it 'returns nil when remote name is empty' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
.not_to receive(:find_remote_root_ref)
- expect(repository.find_remote_root_ref('')).to be_nil
+ expect(repository.find_remote_root_ref('', '')).to be_nil
end
it_behaves_like 'wrapping gRPC errors', Gitlab::GitalyClient::RemoteService, :find_remote_root_ref do
- subject { repository.find_remote_root_ref('origin') }
+ subject { repository.find_remote_root_ref('origin', SeedHelper::GITLAB_GIT_TEST_REPO_URL) }
end
end
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 5ef964ac3c1..c44d7e44751 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -10,9 +10,9 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
it { expect(tree).to be_kind_of Array }
it { expect(tree.empty?).to be_falsey }
- it { expect(tree.select(&:dir?).size).to eq(2) }
- it { expect(tree.select(&:file?).size).to eq(10) }
- it { expect(tree.select(&:submodule?).size).to eq(2) }
+ it { expect(tree.count(&:dir?)).to eq(2) }
+ it { expect(tree.count(&:file?)).to eq(10) }
+ it { expect(tree.count(&:submodule?)).to eq(2) }
it 'returns an empty array when called with an invalid ref' do
expect(described_class.where(repository, 'foobar-does-not-exist')).to eq([])
diff --git a/spec/lib/gitlab/git/wiki_spec.rb b/spec/lib/gitlab/git/wiki_spec.rb
index 36bff42d937..eb7deb08063 100644
--- a/spec/lib/gitlab/git/wiki_spec.rb
+++ b/spec/lib/gitlab/git/wiki_spec.rb
@@ -58,22 +58,6 @@ RSpec.describe Gitlab::Git::Wiki do
end
end
- describe '#delete_page' do
- after do
- destroy_page('page1')
- end
-
- it 'only removes the page with the same path' do
- create_page('page1', 'content')
- create_page('*', 'content')
-
- subject.delete_page('*', commit_details('whatever'))
-
- expect(subject.list_pages.count).to eq 1
- expect(subject.list_pages.first.title).to eq 'page1'
- end
- end
-
describe '#preview_slug' do
where(:title, :format, :expected_slug) do
'The Best Thing' | :markdown | 'The-Best-Thing'
diff --git a/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb b/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
index e448277b307..2c9da0f6606 100644
--- a/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
+++ b/spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Git::WrapsGitalyErrors do
mapping.each do |grpc_error, error|
it "wraps #{grpc_error} in a #{error}" do
- expect { wrapper.wrapped_gitaly_errors { raise grpc_error.new('wrapped') } }
+ expect { wrapper.wrapped_gitaly_errors { raise grpc_error, 'wrapped' } }
.to raise_error(error)
end
end
diff --git a/spec/lib/gitlab/git_access_design_spec.rb b/spec/lib/gitlab/git_access_design_spec.rb
index ee25f6c2979..9fd1f2dcb0c 100644
--- a/spec/lib/gitlab/git_access_design_spec.rb
+++ b/spec/lib/gitlab/git_access_design_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::GitAccessDesign do
let_it_be(:project) { create(:project) }
let_it_be(:user) { project.owner }
+
let(:protocol) { 'web' }
let(:actor) { user }
diff --git a/spec/lib/gitlab/git_access_project_spec.rb b/spec/lib/gitlab/git_access_project_spec.rb
index 953b74cf1a9..ad593cbf005 100644
--- a/spec/lib/gitlab/git_access_project_spec.rb
+++ b/spec/lib/gitlab/git_access_project_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccessProject do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+
let(:container) { project }
let(:actor) { user }
let(:project_path) { project.path }
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 9a1ecfe6459..ae9c697e0b9 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -554,19 +554,19 @@ RSpec.describe Gitlab::GitAccess do
context 'when the repository is public' do
let(:options) { %i[repository_enabled] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
context 'when the repository is private' do
let(:options) { %i[repository_private] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
context 'when the repository is disabled' do
let(:options) { %i[repository_disabled] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
end
end
@@ -596,13 +596,13 @@ RSpec.describe Gitlab::GitAccess do
context 'when the repository is private' do
let(:options) { %i[repository_private] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
context 'when the repository is disabled' do
let(:options) { %i[repository_disabled] }
- it { expect { pull_access_check }.to raise_error('The project you were looking for could not be found.') }
+ it { expect { pull_access_check }.to raise_error("The project you were looking for could not be found or you don't have permission to view it.") }
end
end
end
@@ -1034,7 +1034,7 @@ RSpec.describe Gitlab::GitAccess do
end
end
- context 'when the repository is read only' do
+ context 'when the repository is read-only' do
let(:project) { create(:project, :repository, :read_only) }
it 'denies push access' do
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index b78d99269d3..5ada8a6ef40 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::GitAccessWiki do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :wiki_repo) }
let_it_be(:wiki) { create(:project_wiki, project: project) }
+
let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
let(:authentication_abilities) { %i[read_project download_code push_code] }
let(:redirected_path) { nil }
diff --git a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
index b9ef76e1f41..70fc4fe4416 100644
--- a/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/remote_service_spec.rb
@@ -35,22 +35,50 @@ RSpec.describe Gitlab::GitalyClient::RemoteService do
end
describe '#find_remote_root_ref' do
- it 'sends an find_remote_root_ref message and returns the root ref' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:find_remote_root_ref)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(ref: 'master'))
+ let(:remote) { 'origin' }
+ let(:url) { 'http://git.example.com/my-repo.git' }
+ let(:auth) { 'Basic secret' }
+
+ shared_examples 'a find_remote_root_ref call' do
+ it 'sends an find_remote_root_ref message and returns the root ref' do
+ expect_any_instance_of(Gitaly::RemoteService::Stub)
+ .to receive(:find_remote_root_ref)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return(double(ref: 'master'))
+
+ expect(client.find_remote_root_ref(remote, url, auth)).to eq 'master'
+ end
+
+ it 'ensure ref is a valid UTF-8 string' do
+ expect_any_instance_of(Gitaly::RemoteService::Stub)
+ .to receive(:find_remote_root_ref)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return(double(ref: "an_invalid_ref_\xE5"))
- expect(client.find_remote_root_ref('origin')).to eq 'master'
+ expect(client.find_remote_root_ref(remote, url, auth)).to eq "an_invalid_ref_å"
+ end
end
- it 'ensure ref is a valid UTF-8 string' do
- expect_any_instance_of(Gitaly::RemoteService::Stub)
- .to receive(:find_remote_root_ref)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(ref: "an_invalid_ref_\xE5"))
+ context 'with inmemory feature enabled' do
+ before do
+ stub_feature_flags(find_remote_root_refs_inmemory: true)
+ end
+
+ it_behaves_like 'a find_remote_root_ref call' do
+ let(:expected_params) { { remote_url: url, http_authorization_header: auth } }
+ end
+ end
- expect(client.find_remote_root_ref('origin')).to eq "an_invalid_ref_å"
+ context 'with inmemory feature disabled' do
+ before do
+ stub_feature_flags(find_remote_root_refs_inmemory: false)
+ end
+
+ it_behaves_like 'a find_remote_root_ref call' do
+ let(:expected_params) { { remote: remote } }
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 4000e0b2611..194dfb228ee 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -32,8 +32,9 @@ RSpec.describe Gitlab::GithubImport::Client do
it 'returns the pull request reviews' do
client = described_class.new('foo')
- expect(client.octokit).to receive(:pull_request_reviews).with('foo/bar', 999)
- expect(client).to receive(:with_rate_limit).and_yield
+ expect(client)
+ .to receive(:each_object)
+ .with(:pull_request_reviews, 'foo/bar', 999)
client.pull_request_reviews('foo/bar', 999)
end
diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
index 3bb57e152fe..ef0bb90db4a 100644
--- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb
@@ -8,13 +8,14 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
let(:user) { create(:user) }
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
+ let(:note_body) { 'This is my note' }
let(:github_note) do
Gitlab::GithubImport::Representation::Note.new(
noteable_id: 1,
noteable_type: 'Issue',
author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
- note: 'This is my note',
+ note: note_body,
created_at: created_at,
updated_at: updated_at,
github_id: 1
@@ -92,6 +93,24 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do
importer.execute
end
end
+
+ context 'when the note have invalid chars' do
+ let(:note_body) { %{There were an invalid char "\u0000" <= right here} }
+
+ it 'removes invalid chars' do
+ expect(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(github_note)
+ .and_return([user.id, true])
+
+ expect { importer.execute }
+ .to change(project.notes, :count)
+ .by(1)
+
+ expect(project.notes.last.note)
+ .to eq('There were an invalid char "" <= right here')
+ end
+ end
end
context 'when the noteable does not exist' do
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index 5002e0384f3..fa8b5e6ccf0 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
.to change(Note, :count).by(1)
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Approved")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Approved")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -153,6 +153,20 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
+ context 'when original author was deleted in github' do
+ let(:review) { create_review(type: 'APPROVED', note: '', author: nil) }
+
+ it 'creates a note for the review without the author information' do
+ expect { subject.execute }
+ .to change(Note, :count).by(1)
+
+ last_note = merge_request.notes.last
+ expect(last_note.note).to eq('**Review:** Approved')
+ expect(last_note.author).to eq(project.creator)
+ expect(last_note.created_at).to eq(submitted_at)
+ end
+ end
+
context 'when the review has a note text' do
context 'when the review is "APPROVED"' do
let(:review) { create_review(type: 'APPROVED') }
@@ -163,7 +177,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Approved\n\nnote")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Approved\n\nnote")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -178,7 +192,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Commented\n\nnote")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Commented\n\nnote")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -193,7 +207,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
last_note = merge_request.notes.last
- expect(last_note.note).to eq("*Created by author*\n\n**Review:** Changes requested\n\nnote")
+ expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Changes requested\n\nnote")
expect(last_note.author).to eq(project.creator)
expect(last_note.created_at).to eq(submitted_at)
end
@@ -201,13 +215,13 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
end
end
- def create_review(type:, note: 'note')
+ def create_review(type:, note: 'note', author: { id: 999, login: 'author' })
Gitlab::GithubImport::Representation::PullRequestReview.from_json_hash(
merge_request_id: merge_request.id,
review_type: type,
note: note,
submitted_at: submitted_at.to_s,
- author: { id: 999, login: 'author' }
+ author: author
)
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
index b859cc727a6..4a47d103cde 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
@@ -23,12 +23,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(number: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end
- describe '#each_object_to_import' do
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
- pull_request = double
create(
:merged_merge_request,
iid: 999,
@@ -36,12 +35,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
target_project: project
)
+ pull_request = double
+
allow(client)
.to receive(:pull_request)
+ .exactly(:once) # ensure to be cached on the second call
.with('http://somegithub.com', 999)
.and_return(pull_request)
- expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(pull_request)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(pull_request)
+
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
index 5e2302f9662..f18064f10aa 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
@@ -23,12 +23,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(github_id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end
- describe '#each_object_to_import' do
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
- merge_request = create(:merge_request, source_project: project)
+ merge_request = create(
+ :merged_merge_request,
+ iid: 999,
+ source_project: project,
+ target_project: project
+ )
+
review = double
expect(review)
@@ -37,10 +43,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
allow(client)
.to receive(:pull_request_reviews)
+ .exactly(:once) # ensure to be cached on the second call
.with('github/repo', merge_request.iid)
.and_return([review])
- expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(review)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(review)
+
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/lib/gitlab/github_import/markdown_text_spec.rb b/spec/lib/gitlab/github_import/markdown_text_spec.rb
index 4995caa0733..22bf10f36d8 100644
--- a/spec/lib/gitlab/github_import/markdown_text_spec.rb
+++ b/spec/lib/gitlab/github_import/markdown_text_spec.rb
@@ -20,11 +20,25 @@ RSpec.describe Gitlab::GithubImport::MarkdownText do
expect(text.to_s).to eq('Hello')
end
+ it 'returns the text when the author has no login' do
+ author = double(:author, login: nil)
+ text = described_class.new('Hello', author, true)
+
+ expect(text.to_s).to eq('Hello')
+ end
+
it 'returns the text with an extra header when the author was not found' do
author = double(:author, login: 'Alice')
text = described_class.new('Hello', author)
expect(text.to_s).to eq("*Created by: Alice*\n\nHello")
end
+
+ it 'cleans invalid chars' do
+ author = double(:author, login: 'Alice')
+ text = described_class.format("\u0000Hello", author)
+
+ expect(text.to_s).to eq("*Created by: Alice*\n\nHello")
+ end
end
end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 0dd2bd4df45..20e67a784e1 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -61,6 +61,10 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
expect(finder).to receive(:find).with(user.id, user.login).and_return(42)
expect(finder.user_id_for(user)).to eq(42)
end
+
+ it 'does not fail with empty input' do
+ expect(finder.user_id_for(nil)).to eq(nil)
+ end
end
describe '#find' do
diff --git a/spec/lib/gitlab/graphql/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecation_spec.rb
index 8b41145b855..2931e28a6ee 100644
--- a/spec/lib/gitlab/graphql/deprecation_spec.rb
+++ b/spec/lib/gitlab/graphql/deprecation_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecation do
context 'when the context is :inline' do
it 'renders on one line' do
- expectation = '**Deprecated** in 10.10. This was renamed. Use: `X.y`.'
+ expectation = '**Deprecated** in 10.10. This was renamed. Use: [`X.y`](#xy).'
expect(deprecation.markdown).to eq(expectation)
expect(deprecation.markdown(context: :inline)).to eq(expectation)
@@ -177,7 +177,7 @@ RSpec.describe ::Gitlab::Graphql::Deprecation do
WARNING:
**Deprecated** in 10.10.
This was renamed.
- Use: `X.y`.
+ Use: [`X.y`](#xy).
MD
expect(deprecation.markdown(context: :block)).to eq(expectation)
diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
index 8c0f7aac081..14db51deb88 100644
--- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb
+++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
@@ -1,10 +1,20 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Graphql::Docs::Renderer do
describe '#contents' do
+ shared_examples 'renders correctly as GraphQL documentation' do
+ it 'contains the expected section' do
+ # duplicative - but much better error messages!
+ section.lines.each { |line| expect(contents).to include(line) }
+ expect(contents).to include(section)
+ end
+ end
+
let(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/default.md.haml') }
+ let(:field_description) { 'List of objects.' }
+ let(:type) { ::GraphQL::INT_TYPE }
let(:query_type) do
Class.new(Types::BaseObject) { graphql_name 'Query' }.tap do |t|
@@ -15,6 +25,13 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
+ let(:mutation_root) do
+ Class.new(::Types::BaseObject) do
+ include ::Gitlab::Graphql::MountMutation
+ graphql_name 'Mutation'
+ end
+ end
+
let(:mock_schema) do
Class.new(GraphQL::Schema) do
def resolve_type(obj, ctx)
@@ -23,10 +40,9 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- let(:field_description) { 'List of objects.' }
-
subject(:contents) do
mock_schema.query(query_type)
+ mock_schema.mutation(mutation_root) if mutation_root.fields.any?
described_class.new(
mock_schema,
@@ -36,17 +52,18 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
describe 'headings' do
- let(:type) { ::GraphQL::INT_TYPE }
-
it 'contains the expected sections' do
expect(contents.lines.map(&:chomp)).to include(
'## `Query` type',
+ '## `Mutation` type',
+ '## Connections',
'## Object types',
'## Enumeration types',
'## Scalar types',
'## Abstract types',
'### Unions',
- '### Interfaces'
+ '### Interfaces',
+ '## Input types'
)
end
end
@@ -66,9 +83,11 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
expectation = <<~DOC
### `ArrayTest`
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `foo` | [`#{type_name}`](##{inner_type}) | A description. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="arraytestfoo"></a>`foo` | [`#{type_name}`](##{inner_type}) | A description. |
DOC
is_expected.to include(expectation)
@@ -77,7 +96,7 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
describe 'a top level query field' do
let(:expectation) do
<<~DOC
- ### `foo`
+ ### `Query.foo`
List of objects.
@@ -87,7 +106,7 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
| Name | Type | Description |
| ---- | ---- | ----------- |
- | `id` | [`ID`](#id) | ID of the object. |
+ | <a id="queryfooid"></a>`id` | [`ID`](#id) | ID of the object. |
DOC
end
@@ -119,20 +138,123 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
expectation = <<~DOC
### `OrderingTest`
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `bar` | [`String!`](#string) | A description of bar field. |
- | `foo` | [`String!`](#string) | A description of foo field. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="orderingtestbar"></a>`bar` | [`String!`](#string) | A description of bar field. |
+ | <a id="orderingtestfoo"></a>`foo` | [`String!`](#string) | A description of foo field. |
DOC
is_expected.to include(expectation)
end
end
+ context 'when a field has a documentation reference' do
+ let(:type) do
+ wibble = Class.new(::Types::BaseObject) do
+ graphql_name 'Wibble'
+ field :x, ::GraphQL::INT_TYPE, null: false
+ end
+
+ Class.new(Types::BaseObject) do
+ graphql_name 'DocRefSpec'
+ description 'Testing doc refs'
+
+ field :foo,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'The foo.',
+ see: { 'A list of foos' => 'https://example.com/foos' }
+ field :bar,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'The bar.',
+ see: { 'A list of bars' => 'https://example.com/bars' } do
+ argument :barity, ::GraphQL::INT_TYPE, required: false, description: '?'
+ end
+ field :wibbles,
+ type: wibble.connection_type,
+ null: true,
+ description: 'The wibbles',
+ see: { 'wibblance' => 'https://example.com/wibbles' }
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ### `DocRefSpec`
+
+ Testing doc refs.
+
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="docrefspecfoo"></a>`foo` | [`String!`](#string) | The foo. See [A list of foos](https://example.com/foos). |
+ | <a id="docrefspecwibbles"></a>`wibbles` | [`WibbleConnection`](#wibbleconnection) | The wibbles. See [wibblance](https://example.com/wibbles). (see [Connections](#connections)) |
+
+ #### Fields with arguments
+
+ ##### `DocRefSpec.bar`
+
+ The bar. See [A list of bars](https://example.com/bars).
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="docrefspecbarbarity"></a>`barity` | [`Int`](#int) | ?. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
+ context 'when an argument is deprecated' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name 'DeprecatedTest'
+ description 'A thing we used to use, but no longer support'
+
+ field :foo,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'A description.' do
+ argument :foo_arg, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'The argument.',
+ deprecated: { reason: 'Bad argument', milestone: '101.2' }
+ end
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ##### `DeprecatedTest.foo`
+
+ A description.
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="deprecatedtestfoofooarg"></a>`fooArg` **{warning-solid}** | [`String`](#string) | **Deprecated** in 101.2. Bad argument. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
context 'when a field is deprecated' do
let(:type) do
Class.new(Types::BaseObject) do
graphql_name 'DeprecatedTest'
+ description 'A thing we used to use, but no longer support'
field :foo,
type: GraphQL::STRING_TYPE,
@@ -142,9 +264,9 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
field :foo_with_args,
type: GraphQL::STRING_TYPE,
null: false,
- deprecated: { reason: 'Do not use', milestone: '1.10' },
+ deprecated: { reason: 'Do not use', milestone: '1.10', replacement: 'X.y' },
description: 'A description.' do
- argument :fooity, ::GraphQL::INT_TYPE, required: false, description: 'X'
+ argument :arg, GraphQL::INT_TYPE, required: false, description: 'Argity'
end
field :bar,
type: GraphQL::STRING_TYPE,
@@ -158,24 +280,44 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- it 'includes the deprecation' do
- expectation = <<~DOC
+ let(:section) do
+ <<~DOC
### `DeprecatedTest`
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `bar` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This was renamed. Use: `Query.boom`. |
- | `foo` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This is deprecated. |
- | `fooWithArgs` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. Do not use. |
- DOC
+ A thing we used to use, but no longer support.
- is_expected.to include(expectation)
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="deprecatedtestbar"></a>`bar` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This was renamed. Use: [`Query.boom`](#queryboom). |
+ | <a id="deprecatedtestfoo"></a>`foo` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This is deprecated. |
+
+ #### Fields with arguments
+
+ ##### `DeprecatedTest.fooWithArgs`
+
+ A description.
+
+ WARNING:
+ **Deprecated** in 1.10.
+ Do not use.
+ Use: [`X.y`](#xy).
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="deprecatedtestfoowithargsarg"></a>`arg` | [`Int`](#int) | Argity. |
+ DOC
end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
context 'when a Query.field is deprecated' do
- let(:type) { ::GraphQL::INT_TYPE }
-
before do
query_type.field(
name: :bar,
@@ -186,28 +328,30 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
)
end
- it 'includes the deprecation' do
- expectation = <<~DOC
- ### `bar`
+ let(:type) { ::GraphQL::INT_TYPE }
+ let(:section) do
+ <<~DOC
+ ### `Query.bar`
A bar.
WARNING:
**Deprecated** in 10.11.
This was renamed.
- Use: `Query.foo`.
+ Use: [`Query.foo`](#queryfoo).
Returns [`Int`](#int).
DOC
-
- is_expected.to include(expectation)
end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
context 'when a field has an Enumeration type' do
let(:type) do
enum_type = Class.new(Types::BaseEnum) do
graphql_name 'MyEnum'
+ description 'A test of an enum.'
value 'BAZ',
description: 'A description of BAZ.'
@@ -223,18 +367,20 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- it 'includes the description of the Enumeration' do
- expectation = <<~DOC
+ let(:section) do
+ <<~DOC
### `MyEnum`
+ A test of an enum.
+
| Value | Description |
| ----- | ----------- |
- | `BAR` **{warning-solid}** | **Deprecated:** This is deprecated. Deprecated in 1.10. |
- | `BAZ` | A description of BAZ. |
+ | <a id="myenumbar"></a>`BAR` **{warning-solid}** | **Deprecated:** This is deprecated. Deprecated in 1.10. |
+ | <a id="myenumbaz"></a>`BAZ` | A description of BAZ. |
DOC
-
- is_expected.to include(expectation)
end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
context 'when a field has a global ID type' do
@@ -247,29 +393,152 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
end
end
- it 'includes the field and the description of the ID, so we can link to it' do
- type_section = <<~DOC
- ### `IDTest`
+ describe 'section for IDTest' do
+ let(:section) do
+ <<~DOC
+ ### `IDTest`
- A test for rendering IDs.
+ A test for rendering IDs.
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `foo` | [`UserID`](#userid) | A user foo. |
- DOC
+ #### Fields
- id_section = <<~DOC
- ### `UserID`
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="idtestfoo"></a>`foo` | [`UserID`](#userid) | A user foo. |
+ DOC
+ end
- A `UserID` is a global ID. It is encoded as a string.
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
- An example `UserID` is: `"gid://gitlab/User/1"`.
- DOC
+ describe 'section for UserID' do
+ let(:section) do
+ <<~DOC
+ ### `UserID`
+
+ A `UserID` is a global ID. It is encoded as a string.
+
+ An example `UserID` is: `"gid://gitlab/User/1"`.
+ DOC
+ end
- is_expected.to include(type_section, id_section)
+ it_behaves_like 'renders correctly as GraphQL documentation'
end
end
+ context 'when there is a mutation' do
+ let(:mutation) do
+ mutation = Class.new(::Mutations::BaseMutation)
+
+ mutation.graphql_name 'MakeItPretty'
+ mutation.description 'Make everything very pretty.'
+
+ mutation.argument :prettiness_factor,
+ type: GraphQL::FLOAT_TYPE,
+ required: true,
+ description: 'How much prettier?'
+
+ mutation.argument :pulchritude,
+ type: GraphQL::FLOAT_TYPE,
+ required: false,
+ description: 'How much prettier?',
+ deprecated: {
+ reason: :renamed,
+ replacement: 'prettinessFactor',
+ milestone: '72.34'
+ }
+
+ mutation.field :everything,
+ type: GraphQL::STRING_TYPE,
+ null: true,
+ description: 'What we made prettier.'
+
+ mutation.field :omnis,
+ type: GraphQL::STRING_TYPE,
+ null: true,
+ description: 'What we made prettier.',
+ deprecated: {
+ reason: :renamed,
+ replacement: 'everything',
+ milestone: '72.34'
+ }
+
+ mutation
+ end
+
+ before do
+ mutation_root.mount_mutation mutation
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation' do
+ let(:section) do
+ <<~DOC
+ ### `Mutation.makeItPretty`
+
+ Make everything very pretty.
+
+ Input type: `MakeItPrettyInput`
+
+ #### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="mutationmakeitprettyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+ | <a id="mutationmakeitprettyprettinessfactor"></a>`prettinessFactor` | [`Float!`](#float) | How much prettier?. |
+ | <a id="mutationmakeitprettypulchritude"></a>`pulchritude` **{warning-solid}** | [`Float`](#float) | **Deprecated:** This was renamed. Please use `prettinessFactor`. Deprecated in 72.34. |
+
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="mutationmakeitprettyclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
+ | <a id="mutationmakeitprettyerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
+ | <a id="mutationmakeitprettyeverything"></a>`everything` | [`String`](#string) | What we made prettier. |
+ | <a id="mutationmakeitprettyomnis"></a>`omnis` **{warning-solid}** | [`String`](#string) | **Deprecated:** This was renamed. Please use `everything`. Deprecated in 72.34. |
+ DOC
+ end
+ end
+
+ it 'does not render the automatically generated payload type' do
+ expect(contents).not_to include('MakeItPrettyPayload')
+ end
+
+ it 'does not render the automatically generated input type as its own section' do
+ expect(contents).not_to include('# `MakeItPrettyInput`')
+ end
+ end
+
+ context 'when there is an input type' do
+ let(:type) do
+ Class.new(::Types::BaseObject) do
+ graphql_name 'Foo'
+ field :wibble, type: ::GraphQL::INT_TYPE, null: true do
+ argument :date_range,
+ type: ::Types::TimeframeInputType,
+ required: true,
+ description: 'When the foo happened.'
+ end
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ### `Timeframe`
+
+ A time-frame defined as a closed inclusive range of two dates.
+
+ #### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="timeframeend"></a>`end` | [`Date!`](#date) | The end of the range. |
+ | <a id="timeframestart"></a>`start` | [`Date!`](#date) | The start of the range. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
context 'when there is an interface and a union' do
let(:type) do
user = Class.new(::Types::BaseObject)
@@ -297,7 +566,7 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
interface.orphan_types african_swallow
Class.new(::Types::BaseObject) do
- graphql_name 'AbstactTypeTest'
+ graphql_name 'AbstractTypeTest'
description 'A test for abstract types.'
field :foo, union, null: true, description: 'The foo.'
@@ -307,14 +576,16 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
it 'lists the fields correctly, and includes descriptions of all the types' do
type_section = <<~DOC
- ### `AbstactTypeTest`
+ ### `AbstractTypeTest`
A test for abstract types.
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `flying` | [`Flying`](#flying) | A flying thing. |
- | `foo` | [`UserOrGroup`](#userorgroup) | The foo. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="abstracttypetestflying"></a>`flying` | [`Flying`](#flying) | A flying thing. |
+ | <a id="abstracttypetestfoo"></a>`foo` | [`UserOrGroup`](#userorgroup) | The foo. |
DOC
union_section = <<~DOC
@@ -337,9 +608,11 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
- [`AfricanSwallow`](#africanswallow)
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `flightSpeed` | [`Int`](#int) | Speed in mph. |
+ ##### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="flyingflightspeed"></a>`flightSpeed` | [`Int`](#int) | Speed in mph. |
DOC
implementation_section = <<~DOC
@@ -347,9 +620,11 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
A swallow from Africa.
- | Field | Type | Description |
- | ----- | ---- | ----------- |
- | `flightSpeed` | [`Int`](#int) | Speed in mph. |
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="africanswallowflightspeed"></a>`flightSpeed` | [`Int`](#int) | Speed in mph. |
DOC
is_expected.to include(
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 839ad9110cc..03030728834 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -357,9 +357,10 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
it 'is added to end' do
sliced = subject.sliced_nodes
- last_order_name = sliced.order_values.last.expr.name
- expect(last_order_name).to eq sliced.primary_key
+ order_sql = sliced.order_values.last.to_sql
+
+ expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
end
end
diff --git a/spec/lib/gitlab/graphql/present/field_extension_spec.rb b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
index 5e66e16d655..6ea313d30b3 100644
--- a/spec/lib/gitlab/graphql/present/field_extension_spec.rb
+++ b/spec/lib/gitlab/graphql/present/field_extension_spec.rb
@@ -33,6 +33,48 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
end
end
+ context 'when the field is declared on an interface, and implemented by a presenter' do
+ let(:interface) do
+ Module.new do
+ include ::Types::BaseInterface
+
+ field :interface_field, GraphQL::STRING_TYPE, null: true
+ end
+ end
+
+ let(:implementation) do
+ type = fresh_object_type('Concrete')
+ type.present_using(concrete_impl)
+ type.implements(interface)
+ type
+ end
+
+ def concrete_impl
+ Class.new(base_presenter) do
+ def interface_field
+ 'made of concrete'
+ end
+ end
+ end
+
+ it 'resolves the interface field using the implementation from the presenter' do
+ field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
+ value = resolve_field(field, object, object_type: implementation)
+
+ expect(value).to eq 'made of concrete'
+ end
+
+ context 'when the implementation is inherited' do
+ it 'resolves the interface field using the implementation from the presenter' do
+ subclass = Class.new(implementation) { graphql_name 'Subclass' }
+ field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
+ value = resolve_field(field, object, object_type: subclass)
+
+ expect(value).to eq 'made of concrete'
+ end
+ end
+ end
+
describe 'interactions with inheritance' do
def parent
type = fresh_object_type('Parent')
diff --git a/spec/lib/gitlab/graphql/queries_spec.rb b/spec/lib/gitlab/graphql/queries_spec.rb
index a140a283c1b..a1cd2cdb2de 100644
--- a/spec/lib/gitlab/graphql/queries_spec.rb
+++ b/spec/lib/gitlab/graphql/queries_spec.rb
@@ -1,5 +1,6 @@
# frozen_string_literal: true
+require 'spec_helper'
require 'fast_spec_helper'
require "test_prof/recipes/rspec/let_it_be"
@@ -124,6 +125,18 @@ RSpec.describe Gitlab::Graphql::Queries do
expect(described_class.find(path)).to be_empty
end
+ it 'ignores customer.query.graphql' do
+ path = root / 'plans.customer.query.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
+ it 'ignores customer.mutation.graphql' do
+ path = root / 'plans.customer.mutation.graphql'
+
+ expect(described_class.find(path)).to be_empty
+ end
+
it 'finds all query definitions under a root directory' do
found = described_class.find(root)
@@ -137,7 +150,9 @@ RSpec.describe Gitlab::Graphql::Queries do
expect(found).not_to include(
definition_of(root / 'typedefs.graphql'),
- definition_of(root / 'author.fragment.graphql')
+ definition_of(root / 'author.fragment.graphql'),
+ definition_of(root / 'plans.customer.query.graphql'),
+ definition_of(root / 'plans.customer.mutation.graphql')
)
end
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 1a929373716..a5e4d37d306 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -46,11 +46,20 @@ RSpec.describe Gitlab::Highlight do
expect(result).to eq(%[<span id="LC1" class="line" lang="plaintext">plain text contents</span>])
end
- it 'returns plain version for long content' do
- stub_const('Gitlab::Highlight::MAXIMUM_TEXT_HIGHLIGHT_SIZE', 1)
- result = described_class.highlight(file_name, content)
+ context 'when content is too long to be highlighted' do
+ let(:result) { described_class.highlight(file_name, content) } # content is 44 bytes
- expect(result).to eq(%[<span id="LC1" class="line" lang="">(make-pathname :defaults name</span>\n<span id="LC2" class="line" lang="">:type "assem")</span>])
+ before do
+ stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => 0.0001 } ) # 1.024 bytes
+ end
+
+ it 'increments the metric for oversized files' do
+ expect { result }.to change { over_highlight_size_limit('text highlighter') }.by(1)
+ end
+
+ it 'returns plain version for long content' do
+ expect(result).to eq(%[<span id="LC1" class="line" lang="">(make-pathname :defaults name</span>\n<span id="LC2" class="line" lang="">:type "assem")</span>])
+ end
end
it 'highlights multi-line comments' do
@@ -132,5 +141,46 @@ RSpec.describe Gitlab::Highlight do
subject.highlight("Content")
end
end
+
+ describe 'highlight timeouts' do
+ context 'when there is a timeout error while highlighting' do
+ let(:result) { described_class.highlight(file_name, content) }
+
+ before do
+ allow(Timeout).to receive(:timeout).twice.and_raise(Timeout::Error)
+ # This is done twice because it's rescued first and then
+ # calls the original exception
+ end
+
+ it "increments the foreground counter if it's in the foreground" do
+ expect { result }
+ .to raise_error(Timeout::Error)
+ .and change { highlight_timeout_total('foreground') }.by(1)
+ .and not_change { highlight_timeout_total('background') }
+ end
+
+ it "increments the background counter if it's in the background" do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+
+ expect { result }
+ .to raise_error(Timeout::Error)
+ .and change { highlight_timeout_total('background') }.by(1)
+ .and not_change { highlight_timeout_total('foreground') }
+ end
+ end
+ end
+ end
+
+ def highlight_timeout_total(source)
+ Gitlab::Metrics
+ .counter(:highlight_timeout, 'Counts the times highlights have timed out')
+ .get(source: source)
+ end
+
+ def over_highlight_size_limit(source)
+ Gitlab::Metrics
+ .counter(:over_highlight_size_limit,
+ 'Count the times text has been over the highlight size limit')
+ .get(source: source)
end
end
diff --git a/spec/lib/gitlab/hook_data/key_builder_spec.rb b/spec/lib/gitlab/hook_data/key_builder_spec.rb
new file mode 100644
index 00000000000..86f33df115f
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/key_builder_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::KeyBuilder do
+ let_it_be(:personal_key) { create(:personal_key) }
+ let_it_be(:other_key) { create(:key) }
+
+ describe '#build' do
+ let(:data) { described_class.new(key).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:common_attributes) do
+ [
+ :event_name, :created_at, :updated_at, :key, :id
+ ]
+ end
+
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data.keys).to contain_exactly(*attributes)
+
+ expect(data[:key]).to eq(key.key)
+ expect(data[:id]).to eq(key.id)
+ expect(data[:created_at]).to eq(key.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(key.updated_at.xmlschema)
+ end
+ end
+
+ context 'for keys that belong to a user' do
+ let(:key) { personal_key }
+ let(:attributes) { common_attributes.append(:username) }
+
+ context 'data' do
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('key_create') }
+ it { expect(data[:username]).to eq(key.user.username) }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('key_destroy') }
+ it { expect(data[:username]).to eq(key.user.username) }
+ it_behaves_like 'includes the required attributes'
+ end
+ end
+ end
+
+ context 'for keys that do not belong to a user' do
+ let(:key) { other_key }
+ let(:attributes) { common_attributes }
+
+ context 'data' do
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('key_create') }
+ it_behaves_like 'includes the required attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('key_destroy') }
+ it_behaves_like 'includes the required attributes'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/project_builder_spec.rb b/spec/lib/gitlab/hook_data/project_builder_spec.rb
new file mode 100644
index 00000000000..672dbab918f
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/project_builder_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::ProjectBuilder do
+ let_it_be(:user) { create(:user, name: 'John', email: 'john@example.com') }
+ let_it_be(:namespace) { create(:namespace, owner: user) }
+ let_it_be(:project) { create(:project, :internal, name: 'my_project', namespace: namespace) }
+
+ describe '#build' do
+ let(:data) { described_class.new(project).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:attributes) do
+ [
+ :event_name, :created_at, :updated_at, :name, :path, :path_with_namespace, :project_id,
+ :owner_name, :owner_email, :project_visibility
+ ]
+ end
+
+ context 'data' do
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data).to include(*attributes)
+
+ expect(data[:created_at]).to eq(project.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(project.updated_at.xmlschema)
+ expect(data[:name]).to eq('my_project')
+ expect(data[:path]).to eq(project.path)
+ expect(data[:path_with_namespace]).to eq(project.full_path)
+ expect(data[:project_id]).to eq(project.id)
+ expect(data[:owner_name]).to eq('John')
+ expect(data[:owner_email]).to eq('john@example.com')
+ expect(data[:project_visibility]).to eq('internal')
+ end
+ end
+
+ shared_examples_for 'does not include `old_path_with_namespace` attribute' do
+ it 'does not include `old_path_with_namespace` attribute' do
+ expect(data).not_to include(:old_path_with_namespace)
+ end
+ end
+
+ shared_examples_for 'includes `old_path_with_namespace` attribute' do
+ it 'includes `old_path_with_namespace` attribute' do
+ allow(project).to receive(:old_path_with_namespace).and_return('old-path-with-namespace')
+ expect(data[:old_path_with_namespace]).to eq('old-path-with-namespace')
+ end
+ end
+
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('project_create') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('project_destroy') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include `old_path_with_namespace` attribute'
+ end
+
+ context 'on rename' do
+ let(:event) { :rename }
+
+ it { expect(event_name).to eq('project_rename') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+
+ context 'on transfer' do
+ let(:event) { :transfer }
+
+ it { expect(event_name).to eq('project_transfer') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'includes `old_path_with_namespace` attribute'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/i18n_spec.rb b/spec/lib/gitlab/i18n_spec.rb
index ee10739195a..aae4a13bd73 100644
--- a/spec/lib/gitlab/i18n_spec.rb
+++ b/spec/lib/gitlab/i18n_spec.rb
@@ -3,13 +3,18 @@
require 'spec_helper'
RSpec.describe Gitlab::I18n do
- let(:user) { create(:user, preferred_language: 'es') }
+ let(:user) { create(:user, preferred_language: :es) }
describe '.selectable_locales' do
- it 'does not return languages that should not be available in the UI' do
- Gitlab::I18n::NOT_AVAILABLE_IN_UI.each do |language|
- expect(described_class.selectable_locales).not_to include(language)
- end
+ include StubLanguagesTranslationPercentage
+
+ it 'does not return languages with low translation levels' do
+ stub_languages_translation_percentage(pt_BR: 0, en: 100, es: 65)
+
+ expect(described_class.selectable_locales).to eq({
+ 'en' => 'English',
+ 'es' => 'Spanish - español'
+ })
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 5d1e3c79474..f81db1413c2 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -113,6 +113,7 @@ releases:
- author
- project
- links
+- sorted_links
- milestone_releases
- milestones
- evidences
@@ -124,6 +125,7 @@ project_members:
- source
- project
merge_requests:
+- status_check_responses
- subscriptions
- award_emoji
- author
@@ -258,6 +260,8 @@ ci_pipelines:
- latest_statuses
- dast_profile
- dast_profiles_pipeline
+- dast_site_profile
+- dast_site_profiles_pipeline
ci_refs:
- project
- ci_pipelines
@@ -302,7 +306,7 @@ deploy_keys:
- user
- deploy_keys_projects
- projects
-services:
+integrations:
- project
- service_hook
- jira_tracker_data
@@ -341,8 +345,9 @@ project:
- external_approval_rules
- taggings
- base_tags
-- tag_taggings
- tags
+- topic_taggings
+- topics
- chat_services
- cluster
- clusters
@@ -350,12 +355,13 @@ project:
- cluster_project
- creator
- cycle_analytics_stages
+- value_streams
- group
- namespace
- management_clusters
- boards
- last_event
-- services
+- integrations
- campfire_service
- confluence_service
- datadog_service
@@ -369,7 +375,6 @@ project:
- packagist_service
- pivotaltracker_service
- prometheus_service
-- hipchat_service
- flowdock_service
- assembla_service
- asana_service
@@ -569,6 +574,7 @@ project:
- debian_distributions
- merge_request_metrics
- security_orchestration_policy_configuration
+- timelogs
award_emoji:
- awardable
- user
@@ -745,3 +751,5 @@ issuable_sla:
- issue
push_rule:
- group
+bulk_import_export:
+ - group
diff --git a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
index 0581f07dd3f..7e17d56def0 100644
--- a/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/attribute_configuration_spec.rb
@@ -35,8 +35,9 @@ RSpec.describe 'Import/Export attribute configuration' do
<<-MSG
It looks like #{relation_class}, which is exported using the project Import/Export, has new attributes: #{new_attributes.join(',')}
- Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if you consider this can be exported.
- Please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
+ Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if they can be exported.
+
+ Please denylist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its corresponding
model in the +excluded_attributes+ section.
SAFE_MODEL_ATTRIBUTES: #{File.expand_path(safe_attributes_file)}
diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
index 09e6e5a03bb..df33b4896a4 100644
--- a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do
subject do
described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper,
user: user,
diff --git a/spec/lib/gitlab/import_export/command_line_util_spec.rb b/spec/lib/gitlab/import_export/command_line_util_spec.rb
index b00a2597681..4000e303816 100644
--- a/spec/lib/gitlab/import_export/command_line_util_spec.rb
+++ b/spec/lib/gitlab/import_export/command_line_util_spec.rb
@@ -35,4 +35,19 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil do
it 'has the right mask for uploads' do
expect(file_permissions("#{path}/uploads")).to eq(0755) # originally 555
end
+
+ describe '#gzip' do
+ it 'compresses specified file' do
+ tempfile = Tempfile.new('test', path)
+ filename = File.basename(tempfile.path)
+
+ subject.gzip(dir: path, filename: filename)
+ end
+
+ context 'when exception occurs' do
+ it 'raises an exception' do
+ expect { subject.gzip(dir: path, filename: 'test') }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/config_spec.rb b/spec/lib/gitlab/import_export/config_spec.rb
index 40cf75779b6..7ad5d3d846c 100644
--- a/spec/lib/gitlab/import_export/config_spec.rb
+++ b/spec/lib/gitlab/import_export/config_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::ImportExport::Config do
expect { subject }.not_to raise_error
expect(subject).to be_a(Hash)
expect(subject.keys).to contain_exactly(
- :tree, :excluded_attributes, :included_attributes, :methods, :preloads)
+ :tree, :excluded_attributes, :included_attributes, :methods, :preloads, :export_reorders)
end
end
end
diff --git a/spec/lib/gitlab/import_export/file_importer_spec.rb b/spec/lib/gitlab/import_export/file_importer_spec.rb
index dc668e972cf..ed4436b7257 100644
--- a/spec/lib/gitlab/import_export/file_importer_spec.rb
+++ b/spec/lib/gitlab/import_export/file_importer_spec.rb
@@ -71,6 +71,22 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
it 'creates the file in the right subfolder' do
expect(shared.export_path).to include('test/abcd')
end
+
+ context 'when the import file is remote' do
+ include AfterNextHelpers
+
+ it 'downloads the file from a remote object storage' do
+ file_url = 'https://remote.url/file'
+ import_export_upload = build(:import_export_upload, remote_import_url: file_url)
+ project = build( :project, import_export_upload: import_export_upload)
+
+ expect_next(described_class)
+ .to receive(:download)
+ .with(file_url, kind_of(String))
+
+ described_class.import(importable: project, archive_file: nil, shared: shared)
+ end
+ end
end
context 'error' do
diff --git a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
index 6b2f80cc80a..63286fc0719 100644
--- a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
described_class.create(
relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
members_mapper: members_mapper,
object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
user: importer_user,
diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb
index 9c7f41cbb89..e092891f236 100644
--- a/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb
+++ b/spec/lib/gitlab/import_export/json/legacy_reader/file_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_example.rb'
+require_relative 'shared_example'
RSpec.describe Gitlab::ImportExport::JSON::LegacyReader::File do
it_behaves_like 'import/export json legacy reader' do
diff --git a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb
index d0899accf59..e47122b6151 100644
--- a/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb
+++ b/spec/lib/gitlab/import_export/json/legacy_reader/hash_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_example.rb'
+require_relative 'shared_example'
RSpec.describe Gitlab::ImportExport::JSON::LegacyReader::Hash do
it_behaves_like 'import/export json legacy reader' do
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 762687beedb..a0b2faaecfe 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -30,12 +30,14 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do
let(:json_writer) { instance_double('Gitlab::ImportExport::JSON::LegacyWriter') }
let(:hash) { { name: exportable.name, description: exportable.description }.stringify_keys }
let(:include) { [] }
+ let(:custom_orderer) { nil }
let(:relations_schema) do
{
only: [:name, :description],
include: include,
- preload: { issues: nil }
+ preload: { issues: nil },
+ export_reorder: custom_orderer
}
end
@@ -57,19 +59,63 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do
[{ issues: { include: [] } }]
end
+ before do
+ create_list(:issue, 3, project: exportable, relative_position: 10000) # ascending ids, same position positive
+ create_list(:issue, 3, project: exportable, relative_position: -5000) # ascending ids, same position negative
+ create_list(:issue, 3, project: exportable, relative_position: 0) # ascending ids, duplicate positions
+ create_list(:issue, 3, project: exportable, relative_position: nil) # no position
+ create_list(:issue, 3, :with_desc_relative_position, project: exportable ) # ascending ids, descending position
+ end
+
it 'calls json_writer.write_relation_array with proper params' do
expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, array_including(issue.to_json))
subject.execute
end
- context 'relation ordering' do
- before do
- create_list(:issue, 5, project: exportable)
+ context 'default relation ordering' do
+ it 'orders exported issues by primary key(:id)' do
+ expected_issues = exportable.issues.reorder(:id).map(&:to_json)
+
+ expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
+
+ subject.execute
end
+ end
- it 'orders exported issues by primary key' do
- expected_issues = exportable.issues.reorder(:id).map(&:to_json)
+ context 'custom relation ordering ascending' do
+ let(:custom_orderer) do
+ {
+ issues: {
+ column: :relative_position,
+ direction: :asc,
+ nulls_position: :nulls_last
+ }
+ }
+ end
+
+ it 'orders exported issues by custom column(relative_position)' do
+ expected_issues = exportable.issues.reorder(:relative_position, :id).map(&:to_json)
+
+ expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
+
+ subject.execute
+ end
+ end
+
+ context 'custom relation ordering descending' do
+ let(:custom_orderer) do
+ {
+ issues: {
+ column: :relative_position,
+ direction: :desc,
+ nulls_position: :nulls_first
+ }
+ }
+ end
+
+ it 'orders exported issues by custom column(relative_position)' do
+ expected_issues = exportable.issues.order_relative_position_desc.order(id: :desc).map(&:to_json)
expect(json_writer).to receive(:write_relation_array).with(exportable_path, :issues, expected_issues)
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 56ba730e893..38e700e8f9e 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
+RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_memory_store_caching do
let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
described_class.create(
relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper,
user: importer_user,
@@ -171,6 +172,75 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
end
end
+ context 'issue object' do
+ let(:relation_sym) { :issues }
+
+ let(:exported_member) do
+ {
+ "id" => 111,
+ "access_level" => 30,
+ "source_id" => 1,
+ "source_type" => "Project",
+ "user_id" => 3,
+ "notification_level" => 3,
+ "created_at" => "2016-11-18T09:29:42.634Z",
+ "updated_at" => "2016-11-18T09:29:42.634Z",
+ "user" => {
+ "id" => admin.id,
+ "email" => admin.email,
+ "username" => admin.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member],
+ user: importer_user,
+ importable: project)
+ end
+
+ let(:relation_hash) do
+ {
+ 'id' => 20,
+ 'target_branch' => "feature",
+ 'source_branch' => "feature_conflict",
+ 'project_id' => project.id,
+ 'author_id' => admin.id,
+ 'assignee_id' => admin.id,
+ 'updated_by_id' => admin.id,
+ 'title' => "Issue 1",
+ 'created_at' => "2016-06-14T15:02:36.568Z",
+ 'updated_at' => "2016-06-14T15:02:56.815Z",
+ 'state' => "opened",
+ 'description' => "Description",
+ "relative_position" => 25111 # just a random position
+ }
+ end
+
+ it 'has preloaded project' do
+ expect(created_object.project).to equal(project)
+ end
+
+ context 'computing relative position' do
+ context 'when max relative position in the hierarchy is not cached' do
+ it 'has computed new relative_position' do
+ expect(created_object.relative_position).to equal(1026) # 513*2 - ideal distance
+ end
+ end
+
+ context 'when max relative position in the hierarchy is cached' do
+ before do
+ Rails.cache.write("import:#{project.model_name.plural}:#{project.id}:hierarchy_max_issues_relative_position", 10000)
+ end
+
+ it 'has computed new relative_position' do
+ expect(created_object.relative_position).to equal(10000 + 1026) # 513*2 - ideal distance
+ end
+ end
+ end
+ end
+
context 'label object' do
let(:relation_sym) { :labels }
let(:relation_hash) do
diff --git a/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
index 86d5f2402f8..9dde09a7602 100644
--- a/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/sample/relation_factory_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::ImportExport::Project::Sample::RelationFactory do
described_class.create( # rubocop:disable Rails/SaveBang
relation_sym: relation_sym,
relation_hash: relation_hash,
+ relation_index: 1,
object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
members_mapper: members_mapper,
user: importer_user,
diff --git a/spec/lib/gitlab/import_export/references_configuration_spec.rb b/spec/lib/gitlab/import_export/references_configuration_spec.rb
index 2934d0059ee..6320fbed975 100644
--- a/spec/lib/gitlab/import_export/references_configuration_spec.rb
+++ b/spec/lib/gitlab/import_export/references_configuration_spec.rb
@@ -38,8 +38,9 @@ RSpec.describe 'Import/Export Project configuration' do
<<-MSG
It looks like #{relation_class}, which is exported using the project Import/Export, has references: #{prohibited_keys.join(',')}
- Please replace it with actual relation in IMPORT_EXPORT_CONFIG if you consider this can be exported.
- Please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
+ Please replace it with actual relation in IMPORT_EXPORT_CONFIG if they can be exported.
+
+ Please denylist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its corresponding
model in the +excluded_attributes+ section.
IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file}
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index b159d0cfc76..70ebff2a54e 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -102,6 +102,7 @@ ProjectLabel:
- template
- description
- priority
+- remove_on_close
Milestone:
- id
- title
@@ -208,6 +209,7 @@ MergeRequest:
- discussion_locked
- allow_maintainer_to_push
- merge_ref_sha
+- draft
MergeRequestDiff:
- id
- state
@@ -640,6 +642,7 @@ Timelog:
- time_spent
- merge_request_id
- user_id
+- project_id
- spent_at
- created_at
- updated_at
diff --git a/spec/lib/gitlab/integrations/sti_type_spec.rb b/spec/lib/gitlab/integrations/sti_type_spec.rb
new file mode 100644
index 00000000000..3154872ed04
--- /dev/null
+++ b/spec/lib/gitlab/integrations/sti_type_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Integrations::StiType do
+ let(:types) { ['AsanaService', 'Integrations::Asana', Integrations::Asana] }
+
+ describe '#serialize' do
+ context 'SQL SELECT' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ SELECT "services".* FROM "services" WHERE "services"."type" = 'AsanaService'
+ SQL
+ end
+
+ it 'forms SQL SELECT statements correctly' do
+ sql_statements = types.map do |type|
+ Integration.where(type: type).to_sql
+ end
+
+ expect(sql_statements).to all(eq(expected_sql))
+ end
+ end
+
+ context 'SQL CREATE' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ INSERT INTO "services" ("type") VALUES ('AsanaService')
+ SQL
+ end
+
+ it 'forms SQL CREATE statements correctly' do
+ sql_statements = types.map do |type|
+ record = ActiveRecord::QueryRecorder.new { Integration.insert({ type: type }) }
+ record.log.first
+ end
+
+ expect(sql_statements).to all(include(expected_sql))
+ end
+ end
+
+ context 'SQL UPDATE' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ UPDATE "services" SET "type" = 'AsanaService'
+ SQL
+ end
+
+ let_it_be(:service) { create(:service) }
+
+ it 'forms SQL UPDATE statements correctly' do
+ sql_statements = types.map do |type|
+ record = ActiveRecord::QueryRecorder.new { service.update_column(:type, type) }
+ record.log.first
+ end
+
+ expect(sql_statements).to all(include(expected_sql))
+ end
+ end
+
+ context 'SQL DELETE' do
+ let(:expected_sql) do
+ <<~SQL.strip
+ DELETE FROM "services" WHERE "services"."type" = 'AsanaService'
+ SQL
+ end
+
+ let(:service) { create(:service) }
+
+ it 'forms SQL DELETE statements correctly' do
+ sql_statements = types.map do |type|
+ record = ActiveRecord::QueryRecorder.new { Integration.delete_by(type: type) }
+ record.log.first
+ end
+
+ expect(sql_statements).to all(match(expected_sql))
+ end
+ end
+ end
+
+ describe '#deserialize' do
+ specify 'it deserializes type correctly', :aggregate_failures do
+ types.each do |type|
+ service = create(:service, type: type)
+
+ expect(service.type).to eq('AsanaService')
+ end
+ end
+ end
+
+ describe '#cast' do
+ it 'casts type as model correctly', :aggregate_failures do
+ create(:service, type: 'AsanaService')
+
+ types.each do |type|
+ expect(Integration.find_by(type: type)).to be_kind_of(Integrations::Asana)
+ end
+ end
+ end
+
+ describe '#changed?' do
+ it 'detects changes correctly', :aggregate_failures do
+ service = create(:service, type: 'AsanaService')
+
+ types.each do |type|
+ service.type = type
+
+ expect(service).not_to be_changed
+ end
+
+ service.type = 'NewType'
+
+ expect(service).to be_changed
+ end
+ end
+end
diff --git a/spec/lib/gitlab/jwt_token_spec.rb b/spec/lib/gitlab/jwt_token_spec.rb
new file mode 100644
index 00000000000..d89ca127393
--- /dev/null
+++ b/spec/lib/gitlab/jwt_token_spec.rb
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::JWTToken do
+ it_behaves_like 'a gitlab jwt token'
+end
diff --git a/spec/lib/gitlab/kas_spec.rb b/spec/lib/gitlab/kas_spec.rb
index 01ced407883..e323f76b42e 100644
--- a/spec/lib/gitlab/kas_spec.rb
+++ b/spec/lib/gitlab/kas_spec.rb
@@ -33,6 +33,46 @@ RSpec.describe Gitlab::Kas do
end
end
+ describe '.enabled?' do
+ before do
+ allow(Gitlab).to receive(:config).and_return(gitlab_config)
+ end
+
+ subject { described_class.enabled? }
+
+ context 'gitlab_config is not enabled' do
+ let(:gitlab_config) { { 'gitlab_kas' => { 'enabled' => false } } }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'gitlab_config is enabled' do
+ let(:gitlab_config) { { 'gitlab_kas' => { 'enabled' => true } } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'enabled is unset' do
+ let(:gitlab_config) { { 'gitlab_kas' => {} } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.external_url' do
+ it 'returns gitlab_kas external_url config' do
+ expect(described_class.external_url).to eq(Gitlab.config.gitlab_kas.external_url)
+ end
+ end
+
+ describe '.version' do
+ it 'returns gitlab_kas version config' do
+ version_file = Rails.root.join(described_class::VERSION_FILE)
+
+ expect(described_class.version).to eq(version_file.read.chomp)
+ end
+ end
+
describe '.ensure_secret!' do
context 'secret file exists' do
before do
diff --git a/spec/lib/gitlab/lfs/client_spec.rb b/spec/lib/gitlab/lfs/client_spec.rb
index 1c50a2a7500..0f9637e8ca4 100644
--- a/spec/lib/gitlab/lfs/client_spec.rb
+++ b/spec/lib/gitlab/lfs/client_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Lfs::Client do
let(:base_url) { "https://example.com" }
let(:username) { 'user' }
let(:password) { 'password' }
- let(:credentials) { { user: username, password: password, auth_method: 'password' } }
+ let(:credentials) { { user: username, password: password } }
let(:git_lfs_content_type) { 'application/vnd.git-lfs+json' }
let(:git_lfs_user_agent) { "GitLab #{Gitlab::VERSION} LFS client" }
diff --git a/spec/lib/gitlab/memory/instrumentation_spec.rb b/spec/lib/gitlab/memory/instrumentation_spec.rb
index 6b53550a3d0..0dbe9a8e275 100644
--- a/spec/lib/gitlab/memory/instrumentation_spec.rb
+++ b/spec/lib/gitlab/memory/instrumentation_spec.rb
@@ -69,10 +69,12 @@ RSpec.describe Gitlab::Memory::Instrumentation do
end
it 'a hash is returned' do
- is_expected.to include(
+ result = subject
+ expect(result).to include(
mem_objects: be > 1000,
mem_mallocs: be > 1000,
- mem_bytes: be > 100_000 # 100 items * 100 bytes each
+ mem_bytes: be > 100_000, # 100 items * 100 bytes each
+ mem_total_bytes: eq(result[:mem_bytes] + 40 * result[:mem_objects])
)
end
end
diff --git a/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb b/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
index b9d00b556c5..b868207e67c 100644
--- a/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
+++ b/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
@@ -42,44 +42,20 @@ RSpec.describe Gitlab::Middleware::RackMultipartTempfileFactory do
context 'for a multipart request' do
let(:env) { Rack::MockRequest.env_for('/', multipart_fixture) }
- context 'when the environment variable is enabled' do
- before do
- stub_env('GITLAB_TEMPFILE_IMMEDIATE_UNLINK', '1')
- end
-
- it 'immediately unlinks the temporary file' do
- tempfile = Tempfile.new('foo')
-
- expect(tempfile.path).not_to be(nil)
- expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
- expect(tempfile).to receive(:unlink).and_call_original
+ it 'immediately unlinks the temporary file' do
+ tempfile = Tempfile.new('foo')
- subject.call(env)
+ expect(tempfile.path).not_to be(nil)
+ expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
+ expect(tempfile).to receive(:unlink).and_call_original
- expect(tempfile.path).to be(nil)
- end
+ subject.call(env)
- it 'processes the request as normal' do
- expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
- end
+ expect(tempfile.path).to be(nil)
end
- context 'when the environment variable is disabled' do
- it 'does not immediately unlink the temporary file' do
- tempfile = Tempfile.new('foo')
-
- expect(tempfile.path).not_to be(nil)
- expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
- expect(tempfile).not_to receive(:unlink).and_call_original
-
- subject.call(env)
-
- expect(tempfile.path).not_to be(nil)
- end
-
- it 'processes the request as normal' do
- expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
- end
+ it 'processes the request as normal' do
+ expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
end
end
diff --git a/spec/lib/gitlab/middleware/speedscope_spec.rb b/spec/lib/gitlab/middleware/speedscope_spec.rb
new file mode 100644
index 00000000000..bb830a2fbda
--- /dev/null
+++ b/spec/lib/gitlab/middleware/speedscope_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'stackprof'
+
+RSpec.describe Gitlab::Middleware::Speedscope do
+ let(:app) { proc { |env| [200, { 'Content-Type' => 'text/plain' }, ['Hello world!']] } }
+ let(:middleware) { described_class.new(app) }
+
+ describe '#call' do
+ shared_examples 'returns original response' do
+ it 'returns original response' do
+ expect(StackProf).not_to receive(:run)
+
+ status, headers, body = middleware.call(env)
+
+ expect(status).to eq(200)
+ expect(headers).to eq({ 'Content-Type' => 'text/plain' })
+ expect(body.first).to eq('Hello world!')
+ end
+ end
+
+ context 'when flamegraph is not requested' do
+ let(:env) { Rack::MockRequest.env_for('/') }
+
+ it_behaves_like 'returns original response'
+ end
+
+ context 'when flamegraph requested' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'performance_bar' => 'flamegraph' }) }
+
+ before do
+ allow(env).to receive(:[]).and_call_original
+ end
+
+ context 'when user is not allowed' do
+ before do
+ allow(env).to receive(:[]).with('warden').and_return(double('Warden', user: create(:user)))
+ end
+
+ it_behaves_like 'returns original response'
+ end
+
+ context 'when user is allowed' do
+ before do
+ allow(env).to receive(:[]).with('warden').and_return(double('Warden', user: create(:admin)))
+ end
+
+ it 'runs StackProf and returns a flamegraph' do
+ expect(StackProf).to receive(:run).and_call_original
+
+ status, headers, body = middleware.call(env)
+
+ expect(status).to eq(200)
+ expect(headers).to eq({ 'Content-Type' => 'text/html' })
+ expect(body.first).to include('speedscope-iframe')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb
new file mode 100644
index 00000000000..26f9ea3a637
--- /dev/null
+++ b/spec/lib/gitlab/nav/top_nav_menu_item_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Gitlab::Nav::TopNavMenuItem do
+ describe '.build' do
+ it 'builds a hash from the given args' do
+ item = {
+ id: 'id',
+ title: 'Title',
+ active: true,
+ icon: 'icon',
+ href: 'href',
+ method: 'method',
+ view: 'view',
+ css_class: 'css_class',
+ data: {}
+ }
+
+ expect(described_class.build(**item)).to eq(item)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index eebd67695e0..7615b37521a 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::ObjectHierarchy do
- let_it_be(:parent) { create(:group) }
+ let_it_be(:parent, reload: true) { create(:group) }
let_it_be(:child1) { create(:group, parent: parent) }
let_it_be(:child2) { create(:group, parent: child1) }
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
index c89bf9ff206..1a7c808d1bf 100644
--- a/spec/lib/gitlab/pages/settings_spec.rb
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -47,12 +47,4 @@ RSpec.describe Gitlab::Pages::Settings do
end
end
end
-
- describe '#local_store' do
- subject(:local_store) { described_class.new(settings).local_store }
-
- it 'is an instance of Gitlab::Pages::Stores::LocalStore' do
- expect(local_store).to be_a(Gitlab::Pages::Stores::LocalStore)
- end
- end
end
diff --git a/spec/lib/gitlab/pages/stores/local_store_spec.rb b/spec/lib/gitlab/pages/stores/local_store_spec.rb
deleted file mode 100644
index adab81b2589..00000000000
--- a/spec/lib/gitlab/pages/stores/local_store_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Pages::Stores::LocalStore do
- describe '#enabled' do
- let(:local_store) { double(enabled: true) }
-
- subject(:local_store_enabled) { described_class.new(local_store).enabled }
-
- context 'when the pages_update_legacy_storage FF is disabled' do
- before do
- stub_feature_flags(pages_update_legacy_storage: false)
- end
-
- it { is_expected.to be_falsey }
- end
-
- context 'when the pages_update_legacy_storage FF is enabled' do
- it 'is equal to the original value' do
- expect(local_store_enabled).to eq(local_store.enabled)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/pagination/keyset/iterator_spec.rb b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
new file mode 100644
index 00000000000..656ae73945e
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/iterator_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::Iterator do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue_list_with_same_pos) { create_list(:issue, 3, project: project, relative_position: 100, updated_at: 1.day.ago) }
+ let_it_be(:issue_list_with_null_pos) { create_list(:issue, 3, project: project, relative_position: nil, updated_at: 1.day.ago) }
+ let_it_be(:issue_list_with_asc_pos) { create_list(:issue, 3, :with_asc_relative_position, project: project, updated_at: 1.day.ago) }
+
+ let(:klass) { Issue }
+ let(:column) { 'relative_position' }
+ let(:direction) { :asc }
+ let(:reverse_direction) { ::Gitlab::Pagination::Keyset::ColumnOrderDefinition::REVERSED_ORDER_DIRECTIONS[direction] }
+ let(:nulls_position) { :nulls_last }
+ let(:reverse_nulls_position) { ::Gitlab::Pagination::Keyset::ColumnOrderDefinition::REVERSED_NULL_POSITIONS[nulls_position] }
+ let(:custom_reorder) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: column,
+ column_expression: klass.arel_table[column],
+ order_expression: ::Gitlab::Database.nulls_order(column, direction, nulls_position),
+ reversed_order_expression: ::Gitlab::Database.nulls_order(column, reverse_direction, reverse_nulls_position),
+ order_direction: direction,
+ nullable: nulls_position,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: klass.arel_table[:id].send(direction),
+ add_to_projections: true
+ )
+ ])
+ end
+
+ let(:scope) { project.issues.reorder(custom_reorder) }
+
+ subject { described_class.new(scope: scope) }
+
+ describe '.each_batch' do
+ it 'yields an ActiveRecord::Relation when a block is given' do
+ subject.each_batch(of: 1) do |relation|
+ expect(relation).to be_a_kind_of(ActiveRecord::Relation)
+ end
+ end
+
+ it 'accepts a custom batch size' do
+ count = 0
+
+ subject.each_batch(of: 2) { |relation| count += relation.count(:all) }
+
+ expect(count).to eq(9)
+ end
+
+ it 'allows updating of the yielded relations' do
+ time = Time.current
+
+ subject.each_batch(of: 2) do |relation|
+ relation.update_all(updated_at: time)
+ end
+
+ expect(Issue.where(updated_at: time).count).to eq(9)
+ end
+
+ context 'with ordering direction' do
+ context 'when ordering asc' do
+ it 'orders ascending by default, including secondary order column' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.order_relative_position_asc.order(id: :asc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when reversing asc order' do
+ let(:scope) { project.issues.order(custom_reorder.reversed_order) }
+
+ it 'orders in reverse of ascending' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.order_relative_position_desc.order(id: :desc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when asc order, with nulls first' do
+ let(:nulls_position) { :nulls_first }
+
+ it 'orders ascending with nulls first' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_first_order('relative_position', 'ASC')).order(id: :asc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when ordering desc' do
+ let(:direction) { :desc }
+ let(:nulls_position) { :nulls_last }
+
+ it 'orders descending' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:relative_position, :id)) }
+
+ expect(positions).to eq(project.issues.reorder(::Gitlab::Database.nulls_last_order('relative_position', 'DESC')).order(id: :desc).pluck(:relative_position, :id))
+ end
+ end
+
+ context 'when ordering by columns are repeated twice' do
+ let(:direction) { :desc }
+ let(:column) { :id }
+
+ it 'orders descending' do
+ positions = []
+
+ subject.each_batch(of: 2) { |rel| positions.concat(rel.pluck(:id)) }
+
+ expect(positions).to eq(project.issues.reorder(id: :desc).pluck(:id))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 06a8aee1048..26f52745b54 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -3,76 +3,77 @@
require 'spec_helper'
RSpec.describe Gitlab::Pagination::Keyset::Order do
- let(:table) { Arel::Table.new(:my_table) }
- let(:order) { nil }
+ describe 'paginate over items correctly' do
+ let(:table) { Arel::Table.new(:my_table) }
+ let(:order) { nil }
- def run_query(query)
- ActiveRecord::Base.connection.execute(query).to_a
- end
+ def run_query(query)
+ ActiveRecord::Base.connection.execute(query).to_a
+ end
- def build_query(order:, where_conditions: nil, limit: nil)
- <<-SQL
+ def build_query(order:, where_conditions: nil, limit: nil)
+ <<-SQL
SELECT id, year, month
FROM (#{table_data}) my_table (id, year, month)
WHERE #{where_conditions || '1=1'}
ORDER BY #{order}
LIMIT #{limit || 999};
- SQL
- end
-
- def iterate_and_collect(order:, page_size:, where_conditions: nil)
- all_items = []
-
- loop do
- paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
- break if paginated_items.empty?
-
- all_items.concat(paginated_items)
- last_item = paginated_items.last
- cursor_attributes = order.cursor_attributes_for_node(last_item)
- where_conditions = order.build_where_values(cursor_attributes).to_sql
+ SQL
end
- all_items
- end
+ def iterate_and_collect(order:, page_size:, where_conditions: nil)
+ all_items = []
- subject do
- run_query(build_query(order: order))
- end
+ loop do
+ paginated_items = run_query(build_query(order: order, where_conditions: where_conditions, limit: page_size))
+ break if paginated_items.empty?
+
+ all_items.concat(paginated_items)
+ last_item = paginated_items.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.where_values_with_or_query(cursor_attributes).to_sql
+ end
- shared_examples 'order examples' do
- it { expect(subject).to eq(expected) }
+ all_items
+ end
- context 'when paginating forwards' do
- subject { iterate_and_collect(order: order, page_size: 2) }
+ subject do
+ run_query(build_query(order: order))
+ end
+ shared_examples 'order examples' do
it { expect(subject).to eq(expected) }
- context 'with different page size' do
- subject { iterate_and_collect(order: order, page_size: 5) }
+ context 'when paginating forwards' do
+ subject { iterate_and_collect(order: order, page_size: 2) }
it { expect(subject).to eq(expected) }
- end
- end
- context 'when paginating backwards' do
- subject do
- last_item = expected.last
- cursor_attributes = order.cursor_attributes_for_node(last_item)
- where_conditions = order.reversed_order.build_where_values(cursor_attributes)
+ context 'with different page size' do
+ subject { iterate_and_collect(order: order, page_size: 5) }
- iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ it { expect(subject).to eq(expected) }
+ end
end
- it do
- expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ context 'when paginating backwards' do
+ subject do
+ last_item = expected.last
+ cursor_attributes = order.cursor_attributes_for_node(last_item)
+ where_conditions = order.reversed_order.where_values_with_or_query(cursor_attributes)
+
+ iterate_and_collect(order: order.reversed_order, page_size: 2, where_conditions: where_conditions.to_sql)
+ end
+
+ it do
+ expect(subject).to eq(expected.reverse[1..-1]) # removing one item because we used it to calculate cursor data for the "last" page in subject
+ end
end
end
- end
- context 'when ordering by a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 0, 0),
(2, 0, 0),
(3, 0, 0),
@@ -82,41 +83,41 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(7, 0, 0),
(8, 0, 0),
(9, 0, 0)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 9, "year" => 0, "month" => 0 },
- { "id" => 8, "year" => 0, "month" => 0 },
- { "id" => 7, "year" => 0, "month" => 0 },
- { "id" => 6, "year" => 0, "month" => 0 },
- { "id" => 5, "year" => 0, "month" => 0 },
- { "id" => 4, "year" => 0, "month" => 0 },
- { "id" => 3, "year" => 0, "month" => 0 },
- { "id" => 2, "year" => 0, "month" => 0 },
- { "id" => 1, "year" => 0, "month" => 0 }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 9, "year" => 0, "month" => 0 },
+ { "id" => 8, "year" => 0, "month" => 0 },
+ { "id" => 7, "year" => 0, "month" => 0 },
+ { "id" => 6, "year" => 0, "month" => 0 },
+ { "id" => 5, "year" => 0, "month" => 0 },
+ { "id" => 4, "year" => 0, "month" => 0 },
+ { "id" => 3, "year" => 0, "month" => 0 },
+ { "id" => 2, "year" => 0, "month" => 0 },
+ { "id" => 1, "year" => 0, "month" => 0 }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by two non-nullable columns and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by two non-nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, 2),
(2, 2011, 1),
(3, 2009, 2),
@@ -126,55 +127,55 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(7, 2010, 3),
(8, 2012, 4),
(9, 2013, 5)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: table['month'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: table['month'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { 'year' => 2009, 'month' => 2, 'id' => 3 },
- { 'year' => 2009, 'month' => 2, 'id' => 6 },
- { 'year' => 2010, 'month' => 2, 'id' => 1 },
- { 'year' => 2010, 'month' => 3, 'id' => 7 },
- { 'year' => 2011, 'month' => 1, 'id' => 2 },
- { 'year' => 2011, 'month' => 1, 'id' => 4 },
- { 'year' => 2011, 'month' => 1, 'id' => 5 },
- { 'year' => 2012, 'month' => 4, 'id' => 8 },
- { 'year' => 2013, 'month' => 5, 'id' => 9 }
- ]
- end
+ let(:expected) do
+ [
+ { 'year' => 2009, 'month' => 2, 'id' => 3 },
+ { 'year' => 2009, 'month' => 2, 'id' => 6 },
+ { 'year' => 2010, 'month' => 2, 'id' => 1 },
+ { 'year' => 2010, 'month' => 3, 'id' => 7 },
+ { 'year' => 2011, 'month' => 1, 'id' => 2 },
+ { 'year' => 2011, 'month' => 1, 'id' => 4 },
+ { 'year' => 2011, 'month' => 1, 'id' => 5 },
+ { 'year' => 2012, 'month' => 4, 'id' => 8 },
+ { 'year' => 2013, 'month' => 5, 'id' => 9 }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by nullable columns and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by nullable columns and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, null),
(2, 2011, 2),
(3, null, null),
@@ -186,61 +187,61 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(9, null, 2),
(10, null, null),
(11, 2010, 2)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: Gitlab::Database.nulls_last_order('year', :asc),
- reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc),
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: Gitlab::Database.nulls_last_order('month', :asc),
- reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc),
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_last_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_last_order('month', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_first_order('month', :desc),
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 7, "year" => 2010, "month" => 2 },
- { "id" => 11, "year" => 2010, "month" => 2 },
- { "id" => 1, "year" => 2010, "month" => nil },
- { "id" => 5, "year" => 2010, "month" => nil },
- { "id" => 2, "year" => 2011, "month" => 2 },
- { "id" => 6, "year" => 2011, "month" => 2 },
- { "id" => 8, "year" => 2012, "month" => 2 },
- { "id" => 9, "year" => nil, "month" => 2 },
- { "id" => 4, "year" => nil, "month" => 5 },
- { "id" => 3, "year" => nil, "month" => nil },
- { "id" => 10, "year" => nil, "month" => nil }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, null),
(2, 2011, 2),
(3, null, null),
@@ -252,61 +253,61 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(9, null, 2),
(10, null, null),
(11, 2010, 2)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: Gitlab::Database.nulls_first_order('year', :asc),
- reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc),
- order_direction: :asc,
- nullable: :nulls_first,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'month',
- column_expression: table['month'],
- order_expression: Gitlab::Database.nulls_first_order('month', :asc),
- order_direction: :asc,
- reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc),
- nullable: :nulls_first,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].asc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: Gitlab::Database.nulls_first_order('year', :asc),
+ reversed_order_expression: Gitlab::Database.nulls_last_order('year', :desc),
+ order_direction: :asc,
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'month',
+ column_expression: table['month'],
+ order_expression: Gitlab::Database.nulls_first_order('month', :asc),
+ order_direction: :asc,
+ reversed_order_expression: Gitlab::Database.nulls_last_order('month', :desc),
+ nullable: :nulls_first,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].asc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 3, "year" => nil, "month" => nil },
- { "id" => 10, "year" => nil, "month" => nil },
- { "id" => 9, "year" => nil, "month" => 2 },
- { "id" => 4, "year" => nil, "month" => 5 },
- { "id" => 1, "year" => 2010, "month" => nil },
- { "id" => 5, "year" => 2010, "month" => nil },
- { "id" => 7, "year" => 2010, "month" => 2 },
- { "id" => 11, "year" => 2010, "month" => 2 },
- { "id" => 2, "year" => 2011, "month" => 2 },
- { "id" => 6, "year" => 2011, "month" => 2 },
- { "id" => 8, "year" => 2012, "month" => 2 }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 3, "year" => nil, "month" => nil },
+ { "id" => 10, "year" => nil, "month" => nil },
+ { "id" => 9, "year" => nil, "month" => 2 },
+ { "id" => 4, "year" => nil, "month" => 5 },
+ { "id" => 1, "year" => 2010, "month" => nil },
+ { "id" => 5, "year" => 2010, "month" => nil },
+ { "id" => 7, "year" => 2010, "month" => 2 },
+ { "id" => 11, "year" => 2010, "month" => 2 },
+ { "id" => 2, "year" => 2011, "month" => 2 },
+ { "id" => 6, "year" => 2011, "month" => 2 },
+ { "id" => 8, "year" => 2012, "month" => 2 }
+ ]
+ end
- it_behaves_like 'order examples'
- end
+ it_behaves_like 'order examples'
+ end
- context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
- let(:table_data) do
- <<-SQL
+ context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
+ let(:table_data) do
+ <<-SQL
VALUES (1, 2010, 0),
(2, 2011, 0),
(3, 2010, 0),
@@ -318,158 +319,216 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
(9, 2013, 0),
(10, 2014, 0),
(11, 2013, 0)
- SQL
- end
+ SQL
+ end
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- let(:expected) do
- [
- { "id" => 7, "year" => 2010, "month" => 0 },
- { "id" => 4, "year" => 2010, "month" => 0 },
- { "id" => 3, "year" => 2010, "month" => 0 },
- { "id" => 1, "year" => 2010, "month" => 0 },
- { "id" => 8, "year" => 2011, "month" => 0 },
- { "id" => 2, "year" => 2011, "month" => 0 },
- { "id" => 6, "year" => 2012, "month" => 0 },
- { "id" => 5, "year" => 2012, "month" => 0 },
- { "id" => 11, "year" => 2013, "month" => 0 },
- { "id" => 9, "year" => 2013, "month" => 0 },
- { "id" => 10, "year" => 2014, "month" => 0 }
- ]
- end
+ let(:expected) do
+ [
+ { "id" => 7, "year" => 2010, "month" => 0 },
+ { "id" => 4, "year" => 2010, "month" => 0 },
+ { "id" => 3, "year" => 2010, "month" => 0 },
+ { "id" => 1, "year" => 2010, "month" => 0 },
+ { "id" => 8, "year" => 2011, "month" => 0 },
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 },
+ { "id" => 5, "year" => 2012, "month" => 0 },
+ { "id" => 11, "year" => 2013, "month" => 0 },
+ { "id" => 9, "year" => 2013, "month" => 0 },
+ { "id" => 10, "year" => 2014, "month" => 0 }
+ ]
+ end
- it 'takes out a slice between two cursors' do
- after_cursor = { "id" => 8, "year" => 2011 }
- before_cursor = { "id" => 5, "year" => 2012 }
+ it 'takes out a slice between two cursors' do
+ after_cursor = { "id" => 8, "year" => 2011 }
+ before_cursor = { "id" => 5, "year" => 2012 }
- after_conditions = order.build_where_values(after_cursor)
- reversed = order.reversed_order
- before_conditions = reversed.build_where_values(before_cursor)
+ after_conditions = order.where_values_with_or_query(after_cursor)
+ reversed = order.reversed_order
+ before_conditions = reversed.where_values_with_or_query(before_cursor)
- query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
+ query = build_query(order: order, where_conditions: "(#{after_conditions.to_sql}) AND (#{before_conditions.to_sql})", limit: 100)
- expect(run_query(query)).to eq([
- { "id" => 2, "year" => 2011, "month" => 0 },
- { "id" => 6, "year" => 2012, "month" => 0 }
- ])
+ expect(run_query(query)).to eq([
+ { "id" => 2, "year" => 2011, "month" => 0 },
+ { "id" => 6, "year" => 2012, "month" => 0 }
+ ])
+ end
end
- end
- context 'when the passed cursor values do not match with the order definition' do
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'year',
- column_expression: table['year'],
- order_expression: table['year'].asc,
- nullable: :not_nullable,
- distinct: false
- ),
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- column_expression: table['id'],
- order_expression: table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
- end
+ context 'when the passed cursor values do not match with the order definition' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'year',
+ column_expression: table['year'],
+ order_expression: table['year'].asc,
+ nullable: :not_nullable,
+ distinct: false
+ ),
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ column_expression: table['id'],
+ order_expression: table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- context 'when values are missing' do
- it 'raises error' do
- expect { order.build_where_values(id: 1) }.to raise_error(/Missing items: year/)
+ context 'when values are missing' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1) }.to raise_error(/Missing items: year/)
+ end
end
- end
- context 'when extra values are present' do
- it 'raises error' do
- expect { order.build_where_values(id: 1, year: 2, foo: 3) }.to raise_error(/Extra items: foo/)
+ context 'when extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(id: 1, year: 2, foo: 3) }.to raise_error(/Extra items: foo/)
+ end
end
- end
- context 'when values are missing and extra values are present' do
- it 'raises error' do
- expect { order.build_where_values(year: 2, foo: 3) }.to raise_error(/Extra items: foo\. Missing items: id/)
+ context 'when values are missing and extra values are present' do
+ it 'raises error' do
+ expect { order.build_where_values(year: 2, foo: 3) }.to raise_error(/Extra items: foo\. Missing items: id/)
+ end
end
- end
- context 'when no values are passed' do
- it 'returns nil' do
- expect(order.build_where_values({})).to eq(nil)
+ context 'when no values are passed' do
+ it 'returns empty array' do
+ expect(order.build_where_values({})).to eq([])
+ end
end
end
- end
- context 'extract and apply cursor attributes' do
- let(:model) { Project.new(id: 100) }
- let(:scope) { Project.all }
+ context 'extract and apply cursor attributes' do
+ let(:model) { Project.new(id: 100) }
+ let(:scope) { Project.all }
- shared_examples 'cursor attribute examples' do
- describe '#cursor_attributes_for_node' do
- it { expect(order.cursor_attributes_for_node(model)).to eq({ id: '100' }.with_indifferent_access) }
- end
+ shared_examples 'cursor attribute examples' do
+ describe '#cursor_attributes_for_node' do
+ it { expect(order.cursor_attributes_for_node(model)).to eq({ id: '100' }.with_indifferent_access) }
+ end
+
+ describe '#apply_cursor_conditions' do
+ context 'when params with string keys are passed' do
+ subject(:sql) { order.apply_cursor_conditions(scope, { 'id' => '100' }).to_sql }
- describe '#apply_cursor_conditions' do
- context 'when params with string keys are passed' do
- subject(:sql) { order.apply_cursor_conditions(scope, { 'id' => '100' }).to_sql }
+ it { is_expected.to include('"projects"."id" < 100)') }
+ end
- it { is_expected.to include('"projects"."id" < 100)') }
+ context 'when params with symbol keys are passed' do
+ subject(:sql) { order.apply_cursor_conditions(scope, { id: '100' }).to_sql }
+
+ it { is_expected.to include('"projects"."id" < 100)') }
+ end
end
+ end
- context 'when params with symbol keys are passed' do
- subject(:sql) { order.apply_cursor_conditions(scope, { id: '100' }).to_sql }
+ context 'when string attribute name is given' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
- it { is_expected.to include('"projects"."id" < 100)') }
+ it_behaves_like 'cursor attribute examples'
+ end
+
+ context 'when symbol attribute name is given' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
end
+
+ it_behaves_like 'cursor attribute examples'
end
end
+ end
- context 'when string attribute name is given' do
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'id',
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ describe 'UNION optimization' do
+ let_it_be(:five_months_ago) { 5.months.ago }
+
+ let_it_be(:user_1) { create(:user, created_at: five_months_ago) }
+ let_it_be(:user_2) { create(:user, created_at: five_months_ago) }
+ let_it_be(:user_3) { create(:user, created_at: 1.month.ago) }
+ let_it_be(:user_4) { create(:user, created_at: 2.months.ago) }
+
+ let(:expected_results) { [user_3, user_4, user_2, user_1] }
+ let(:scope) { User.order(created_at: :desc, id: :desc) }
+ let(:keyset_aware_scope) { Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(scope).first }
+ let(:iterator_options) { { scope: keyset_aware_scope } }
+
+ subject(:items) do
+ [].tap do |collector|
+ Gitlab::Pagination::Keyset::Iterator.new(**iterator_options).each_batch(of: 2) do |models|
+ collector.concat(models)
+ end
end
+ end
- it_behaves_like 'cursor attribute examples'
+ context 'when UNION optimization is off' do
+ it 'returns items in the correct order' do
+ iterator_options[:use_union_optimization] = false
+
+ expect(items).to eq(expected_results)
+ end
end
- context 'when symbol attribute name is given' do
- let(:order) do
- Gitlab::Pagination::Keyset::Order.build([
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: :id,
- order_expression: Project.arel_table['id'].desc,
- nullable: :not_nullable,
- distinct: true
- )
- ])
+ context 'when UNION optimization is on' do
+ before do
+ iterator_options[:use_union_optimization] = true
end
- it_behaves_like 'cursor attribute examples'
+ it 'returns items in the correct order' do
+ expect(items).to eq(expected_results)
+ end
+
+ it 'calls Gitlab::SQL::Union' do
+ expect_next_instances_of(Gitlab::SQL::Union, 2) do |instance|
+ expect(instance.send(:remove_order)).to eq(false) # Do not remove order from the queries
+ expect(instance.send(:remove_duplicates)).to eq(false) # Do not deduplicate the results
+ end
+
+ items
+ end
+
+ it 'builds UNION query' do
+ cursor_attributes = { created_at: five_months_ago, id: user_2.id }
+ order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(keyset_aware_scope)
+
+ query = order.apply_cursor_conditions(scope, cursor_attributes, use_union_optimization: true).to_sql
+ expect(query).to include('UNION ALL')
+ end
end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
new file mode 100644
index 00000000000..5af86cb2dc0
--- /dev/null
+++ b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
+ let(:ordered_scope) { described_class.build(scope).first }
+ let(:order_object) { Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(ordered_scope) }
+
+ subject(:sql_with_order) { ordered_scope.to_sql }
+
+ context 'when no order present' do
+ let(:scope) { Project.where(id: [1, 2, 3]) }
+
+ it 'orders by primary key' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."id" DESC')
+ end
+
+ it 'sets the column definition distinct and not nullable' do
+ column_definition = order_object.column_definitions.first
+
+ expect(column_definition).to be_not_nullable
+ expect(column_definition).to be_distinct
+ end
+ end
+
+ context 'when primary key order present' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(id: :asc) }
+
+ it 'orders by primary key without altering the direction' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."id" ASC')
+ end
+ end
+
+ context 'when ordered by other column' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(created_at: :asc) }
+
+ it 'adds extra primary key order as tie-breaker' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."created_at" ASC, "projects"."id" DESC')
+ end
+
+ it 'sets the column definition for created_at non-distinct and nullable' do
+ column_definition = order_object.column_definitions.first
+
+ expect(column_definition.attribute_name).to eq('created_at')
+ expect(column_definition.nullable?).to eq(true) # be_nullable calls non_null? method for some reason
+ expect(column_definition).not_to be_distinct
+ end
+ end
+
+ context 'when ordered by two columns where the last one is the tie breaker' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(created_at: :asc, id: :asc) }
+
+ it 'preserves the order' do
+ expect(sql_with_order).to end_with('ORDER BY "projects"."created_at" ASC, "projects"."id" ASC')
+ end
+ end
+
+ context 'when non-nullable column is given' do
+ let(:scope) { Project.where(id: [1, 2, 3]).order(namespace_id: :asc, id: :asc) }
+
+ it 'sets the column definition for namespace_id non-distinct and non-nullable' do
+ column_definition = order_object.column_definitions.first
+
+ expect(column_definition.attribute_name).to eq('namespace_id')
+ expect(column_definition).to be_not_nullable
+ expect(column_definition).not_to be_distinct
+ end
+ end
+
+ context 'return :unable_to_order symbol when order cannot be built' do
+ subject(:success) { described_class.build(scope).last }
+
+ context 'when raw SQL order is given' do
+ let(:scope) { Project.order('id DESC') }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when NULLS LAST order is given' do
+ let(:scope) { Project.order(::Gitlab::Database.nulls_last_order('created_at', 'ASC')) }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when more than 2 columns are given for the order' do
+ let(:scope) { Project.order(created_at: :asc, updated_at: :desc, id: :asc) }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/performance_bar_spec.rb b/spec/lib/gitlab/performance_bar_spec.rb
index 12916c41f0f..b7564dc95f8 100644
--- a/spec/lib/gitlab/performance_bar_spec.rb
+++ b/spec/lib/gitlab/performance_bar_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::PerformanceBar do
it { expect(described_class.l1_cache_backend).to eq(Gitlab::ProcessMemoryCache.cache_backend) }
it { expect(described_class.l2_cache_backend).to eq(Rails.cache) }
- describe '.enabled_for_user?' do
+ describe '.allowed_for_user?' do
let(:user) { create(:user) }
before do
@@ -14,24 +14,24 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns false when given user is nil' do
- expect(described_class.enabled_for_user?(nil)).to be_falsy
+ expect(described_class.allowed_for_user?(nil)).to be_falsy
end
it 'returns true when given user is an admin' do
user = build_stubbed(:user, :admin)
- expect(described_class.enabled_for_user?(user)).to be_truthy
+ expect(described_class.allowed_for_user?(user)).to be_truthy
end
it 'returns false when allowed_group_id is nil' do
expect(described_class).to receive(:allowed_group_id).and_return(nil)
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
context 'when allowed group ID does not exist' do
it 'returns false' do
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
end
@@ -44,15 +44,15 @@ RSpec.describe Gitlab::PerformanceBar do
context 'when user is not a member of the allowed group' do
it 'returns false' do
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
context 'caching of allowed user IDs' do
- subject { described_class.enabled_for_user?(user) }
+ subject { described_class.allowed_for_user?(user) }
before do
# Warm the caches
- described_class.enabled_for_user?(user)
+ described_class.allowed_for_user?(user)
end
it_behaves_like 'allowed user IDs are cached'
@@ -65,15 +65,15 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns true' do
- expect(described_class.enabled_for_user?(user)).to be_truthy
+ expect(described_class.allowed_for_user?(user)).to be_truthy
end
context 'caching of allowed user IDs' do
- subject { described_class.enabled_for_user?(user) }
+ subject { described_class.allowed_for_user?(user) }
before do
# Warm the caches
- described_class.enabled_for_user?(user)
+ described_class.allowed_for_user?(user)
end
it_behaves_like 'allowed user IDs are cached'
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns the nested group' do
- expect(described_class.enabled_for_user?(user)).to be_truthy
+ expect(described_class.allowed_for_user?(user)).to be_truthy
end
end
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::PerformanceBar do
end
it 'returns false' do
- expect(described_class.enabled_for_user?(user)).to be_falsy
+ expect(described_class.allowed_for_user?(user)).to be_falsy
end
end
end
diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 9d4806ea73b..259202178a2 100644
--- a/spec/lib/gitlab/prometheus/adapter_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -58,8 +58,8 @@ RSpec.describe Gitlab::Prometheus::Adapter do
context 'with cluster with prometheus integration' do
let!(:prometheus_integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
- it 'returns the integration instead' do
- expect(subject.prometheus_adapter).to eq(prometheus_integration)
+ it 'returns the application' do
+ expect(subject.prometheus_adapter).to eq(prometheus)
end
end
end
diff --git a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
index 3c7496cabd0..559557f9313 100644
--- a/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
+++ b/spec/lib/gitlab/prometheus/additional_metrics_parser_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
end
before do
- allow(described_class).to receive(:load_yaml_file) { YAML.load(sample_yaml) }
+ allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(sample_yaml) }
end
it 'parses to two metric groups with 2 and 1 metric respectively' do
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
shared_examples 'required field' do |field_name|
context "when #{field_name} is nil" do
before do
- allow(described_class).to receive(:load_yaml_file) { YAML.load(field_missing) }
+ allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_missing) }
end
it 'throws parsing error' do
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::Prometheus::AdditionalMetricsParser do
context "when #{field_name} are not specified" do
before do
- allow(described_class).to receive(:load_yaml_file) { YAML.load(field_nil) }
+ allow(described_class).to receive(:load_yaml_file) { YAML.safe_load(field_nil) }
end
it 'throws parsing error' do
diff --git a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
index 0b012bfd970..7e28649e634 100644
--- a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
+++ b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
@@ -13,7 +13,9 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
shared_examples 'arg line with valid parameters' do
it 'return time and date array' do
- expect(subject.new(valid_arg).execute).to eq(expected_response)
+ freeze_time do
+ expect(subject.new(valid_arg).execute).to eq(expected_response)
+ end
end
end
@@ -53,7 +55,7 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
it_behaves_like 'arg line with valid parameters' do
let(:valid_arg) { '2m 3m 5m 1h' }
let(:time) { Gitlab::TimeTrackingFormatter.parse(valid_arg) }
- let(:date) { DateTime.now.to_date }
+ let(:date) { DateTime.current }
let(:expected_response) { [time, date] }
end
end
diff --git a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
index 2cb31b00f39..bd167ee2e3e 100644
--- a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
+++ b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Gitlab::RackAttack::InstrumentedCacheStore do
begin
test_proc.call(subject)
- rescue => e
+ rescue StandardError => e
exception = e
end
ensure
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index f62a3c74005..28447d5c2a9 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -427,6 +427,19 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2fmy_package') }
end
+ describe '.terraform_module_package_name_regex' do
+ subject { described_class.terraform_module_package_name_regex }
+
+ it { is_expected.to match('my-module/my-system') }
+ it { is_expected.to match('my/module') }
+ it { is_expected.not_to match('my-module') }
+ it { is_expected.not_to match('My-Module') }
+ it { is_expected.not_to match('my_module') }
+ it { is_expected.not_to match('my.module') }
+ it { is_expected.not_to match('../../../my-module') }
+ it { is_expected.not_to match('%2e%2e%2fmy-module') }
+ end
+
describe '.pypi_version_regex' do
subject { described_class.pypi_version_regex }
@@ -628,6 +641,50 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('hé') }
end
+ describe '.helm_channel_regex' do
+ subject { described_class.helm_channel_regex }
+
+ it { is_expected.to match('release') }
+ it { is_expected.to match('my-repo') }
+ it { is_expected.to match('my-repo42') }
+
+ # Do not allow empty
+ it { is_expected.not_to match('') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
+ describe '.helm_package_regex' do
+ subject { described_class.helm_package_regex }
+
+ it { is_expected.to match('release') }
+ it { is_expected.to match('my-repo') }
+ it { is_expected.to match('my-repo42') }
+
+ # Do not allow empty
+ it { is_expected.not_to match('') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+
+ it { is_expected.not_to match('my/../repo') }
+ it { is_expected.not_to match('me%2f%2e%2e%2f') }
+ end
+
+ describe '.helm_version_regex' do
+ subject { described_class.helm_version_regex }
+
+ it { is_expected.to match('v1.2.3') }
+ it { is_expected.to match('v1.2.3-beta') }
+ it { is_expected.to match('v1.2.3-alpha.3') }
+ it { is_expected.not_to match('v1') }
+ it { is_expected.not_to match('v1.2') }
+ it { is_expected.not_to match('v1./2.3') }
+ it { is_expected.not_to match('v../../../../../1.2.3') }
+ it { is_expected.not_to match('v%2e%2e%2f1.2.3') }
+ end
+
describe '.semver_regex' do
subject { described_class.semver_regex }
@@ -726,4 +783,134 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('v../../../../../1.2.3') }
it { is_expected.not_to match('v%2e%2e%2f1.2.3') }
end
+
+ describe 'Packages::API_PATH_REGEX' do
+ subject { described_class::Packages::API_PATH_REGEX }
+
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/p/123456789') }
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/p2/pkg_name') }
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/packages') }
+ it { is_expected.to match('/api/v4/group/12345/-/packages/composer/pkg_name') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/maven/a/path/file.jar') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/index') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/metadata/pkg_name/1.3.4') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/metadata/pkg_name/index') }
+ it { is_expected.to match('/api/v4/groups/1234/-/packages/nuget/query') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/digest') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/download_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/digest') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/download_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/upload_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/upload_urls') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/conans/search') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name/authorize') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name/authorize') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/ping') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/users/authenticate') }
+ it { is_expected.to match('/api/v4/packages/conan/v1/users/check_credentials') }
+ it { is_expected.to match('/api/v4/packages/maven/a/path/file.jar') }
+ it { is_expected.to match('/api/v4/packages/npm/-/package/pkg_name/dist-tags') }
+ it { is_expected.to match('/api/v4/packages/npm/-/package/pkg_name/dist-tags/tag') }
+ it { is_expected.to match('/api/v4/packages/npm/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/composer') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/composer/archives/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/digest') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/download_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/digest') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/download_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/packages/pkg_ref/upload_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/pkg_name/1.2.3/username/stable/upload_urls') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/conans/search') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/export/file.name/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/files/pkg_name/1.2.3/username/stable/2.3/package/pkg_ref/pkg_revision/file.name/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/ping') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/users/authenticate') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/conan/v1/users/check_credentials') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/compon/binary-x64/Packages') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/InRelease') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/Release') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/dists/stable/Release.gpg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/file.name/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/debian/pool/compon/e/pkg/file.name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/generic/pkg_name/1.3.4/myfile.txt') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/generic/pkg_name/1.3.4/myfile.txt/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/11.2.3.info') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/11.2.3.mod') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/11.2.3.zip') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/go/my_module/@v/list') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/maven/a/path/file.jar') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/maven/a/path/file.jar/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/-/package/pkg_name/dist-tags') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/-/package/pkg_name/dist-tags/tag') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/npm/pkg_name/-/tarball.tgz') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/download/pkg_name/1.3.4/pkg.npkg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/download/pkg_name/index') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/index') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/metadata/pkg_name/1.3.4') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/metadata/pkg_name/index') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/nuget/query') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi/files/1234567890/file.identifier') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/pypi/simple/pkg_name') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/api/v1/dependencies') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/api/v1/gems') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/api/v1/gems/authorize') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/gems/pkg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/pkg') }
+ it { is_expected.to match('/api/v4/projects/1234/packages/rubygems/quick/Marshal.4.8/pkg') }
+ it { is_expected.not_to match('') }
+ it { is_expected.not_to match('foo') }
+ it { is_expected.not_to match('/api/v4') }
+ it { is_expected.not_to match('/api/v4/version') }
+ it { is_expected.not_to match('/api/v4/packages') }
+ it { is_expected.not_to match('/api/v4/packages/') }
+ it { is_expected.not_to match('/api/v4/group') }
+ it { is_expected.not_to match('/api/v4/group/12345') }
+ it { is_expected.not_to match('/api/v4/group/12345/-') }
+ it { is_expected.not_to match('/api/v4/group/12345/-/packages') }
+ it { is_expected.not_to match('/api/v4/group/12345/-/packages/') }
+ it { is_expected.not_to match('/api/v4/group/12345/-/packages/50') }
+ it { is_expected.not_to match('/api/v4/groups') }
+ it { is_expected.not_to match('/api/v4/groups/12345') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-/packages') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-/packages/') }
+ it { is_expected.not_to match('/api/v4/groups/12345/-/packages/50') }
+ it { is_expected.not_to match('/api/v4/groups/12345/packages') }
+ it { is_expected.not_to match('/api/v4/groups/12345/packages/') }
+ it { is_expected.not_to match('/api/v4/groups/12345/badges') }
+ it { is_expected.not_to match('/api/v4/groups/12345/issues') }
+ it { is_expected.not_to match('/api/v4/projects') }
+ it { is_expected.not_to match('/api/v4/projects/1234') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages/') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages/50') }
+ it { is_expected.not_to match('/api/v4/projects/1234/packages/50/package_files') }
+ it { is_expected.not_to match('/api/v4/projects/1234/merge_requests') }
+ it { is_expected.not_to match('/api/v4/projects/1234/registry/repositories') }
+ it { is_expected.not_to match('/api/v4/projects/1234/issues') }
+ it { is_expected.not_to match('/api/v4/projects/1234/members') }
+ it { is_expected.not_to match('/api/v4/projects/1234/milestones') }
+
+ # Group level Debian API endpoints are not matched as it's not using the correct prefix (groups/:id/-/packages/)
+ # TODO: Update Debian group level endpoints urls and adjust this specs: https://gitlab.com/gitlab-org/gitlab/-/issues/326805
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/compon/binary-compo/Packages') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/InRelease') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/Release') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/dists/stable/Release.gpg') }
+ it { is_expected.not_to match('/api/v4/groups/1234/packages/debian/pool/compon/a/pkg/file.name') }
+ end
end
diff --git a/spec/lib/gitlab/relative_positioning/item_context_spec.rb b/spec/lib/gitlab/relative_positioning/item_context_spec.rb
index daea8d8470d..3a469e53cb1 100644
--- a/spec/lib/gitlab/relative_positioning/item_context_spec.rb
+++ b/spec/lib/gitlab/relative_positioning/item_context_spec.rb
@@ -212,4 +212,20 @@ RSpec.describe Gitlab::RelativePositioning::ItemContext do
end
end
end
+
+ describe '#at_position' do
+ let_it_be(:issue) { create_issue(500) }
+ let_it_be(:issue_2) { create_issue(510) }
+
+ let(:subject) { described_class.new(issue, range) }
+
+ it 'finds the item at the specified position' do
+ expect(subject.at_position(500)).to eq(described_class.new(issue, range))
+ expect(subject.at_position(510)).to eq(described_class.new(issue_2, range))
+ end
+
+ it 'raises InvalidPosition when the item cannot be found' do
+ expect { subject.at_position(501) }.to raise_error Gitlab::RelativePositioning::InvalidPosition
+ end
+ end
end
diff --git a/spec/lib/gitlab/repository_cache_spec.rb b/spec/lib/gitlab/repository_cache_spec.rb
index 80285a6c732..3277135246d 100644
--- a/spec/lib/gitlab/repository_cache_spec.rb
+++ b/spec/lib/gitlab/repository_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositoryCache do
let_it_be(:project) { create(:project) }
+
let(:backend) { double('backend').as_null_object }
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
@@ -38,6 +39,7 @@ RSpec.describe Gitlab::RepositoryCache do
describe 'personal snippet repository' do
let_it_be(:personal_snippet) { create(:personal_snippet) }
+
let(:namespace) { repository.full_path }
it_behaves_like 'cache_key examples' do
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
index 9b4ca3f9dca..6b52c315a70 100644
--- a/spec/lib/gitlab/repository_hash_cache_spec.rb
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository) }
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index eaecbb0233d..881591ae805 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:repository) { project.repository }
let(:namespace) { "#{repository.full_path}:#{project.id}" }
let(:cache) { described_class.new(repository) }
@@ -34,6 +35,7 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
describe 'personal snippet repository' do
let_it_be(:personal_snippet) { create(:personal_snippet) }
+
let(:namespace) { repository.full_path }
it_behaves_like 'cache_key examples' do
diff --git a/spec/lib/gitlab/repository_size_checker_spec.rb b/spec/lib/gitlab/repository_size_checker_spec.rb
index 20c08da6c54..559f5fa66c6 100644
--- a/spec/lib/gitlab/repository_size_checker_spec.rb
+++ b/spec/lib/gitlab/repository_size_checker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositorySizeChecker do
let_it_be(:namespace) { nil }
+
let(:current_size) { 0 }
let(:limit) { 50 }
let(:enabled) { true }
diff --git a/spec/lib/gitlab/repository_size_error_message_spec.rb b/spec/lib/gitlab/repository_size_error_message_spec.rb
index 78504d201d4..633ec41ab00 100644
--- a/spec/lib/gitlab/repository_size_error_message_spec.rb
+++ b/spec/lib/gitlab/repository_size_error_message_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RepositorySizeErrorMessage do
let_it_be(:namespace) { build(:namespace) }
+
let(:checker) do
Gitlab::RepositorySizeChecker.new(
current_size_proc: -> { 15.megabytes },
diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb
index 1ec14092c63..0fcb7db7d5f 100644
--- a/spec/lib/gitlab/runtime_spec.rb
+++ b/spec/lib/gitlab/runtime_spec.rb
@@ -42,7 +42,19 @@ RSpec.describe Gitlab::Runtime do
end
end
- context "puma" do
+ # Puma has no cli_config method unless `puma/cli` is required
+ context "puma without cli_config" do
+ let(:puma_type) { double('::Puma') }
+
+ before do
+ stub_const('::Puma', puma_type)
+ stub_env('ACTION_CABLE_IN_APP', 'false')
+ end
+
+ it_behaves_like "valid runtime", :puma, 1
+ end
+
+ context "puma with cli_config" do
let(:puma_type) { double('::Puma') }
let(:max_workers) { 2 }
diff --git a/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
index 75e9c8c100b..dfe9358f70b 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
[
{
name: 'a',
+ worker_name: 'WorkerA',
feature_category: :category_a,
has_external_dependencies: false,
urgency: :low,
@@ -19,6 +20,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
},
{
name: 'a:2',
+ worker_name: 'WorkerA2',
feature_category: :category_a,
has_external_dependencies: false,
urgency: :high,
@@ -27,6 +29,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
},
{
name: 'b',
+ worker_name: 'WorkerB',
feature_category: :category_b,
has_external_dependencies: true,
urgency: :high,
@@ -35,6 +38,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
},
{
name: 'c',
+ worker_name: 'WorkerC',
feature_category: :category_c,
has_external_dependencies: false,
urgency: :throttled,
@@ -46,56 +50,62 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
context 'with valid input' do
where(:query, :expected_metadatas) do
+ # worker_name
+ 'worker_name=WorkerA' | %w(WorkerA)
+ 'worker_name=WorkerA2' | %w(WorkerA2)
+ 'worker_name=WorkerB|worker_name=WorkerD' | %w(WorkerB)
+ 'worker_name!=WorkerA' | %w(WorkerA2 WorkerB WorkerC)
+
# feature_category
- 'feature_category=category_a' | %w(a a:2)
- 'feature_category=category_a,category_c' | %w(a a:2 c)
- 'feature_category=category_a|feature_category=category_c' | %w(a a:2 c)
- 'feature_category!=category_a' | %w(b c)
+ 'feature_category=category_a' | %w(WorkerA WorkerA2)
+ 'feature_category=category_a,category_c' | %w(WorkerA WorkerA2 WorkerC)
+ 'feature_category=category_a|feature_category=category_c' | %w(WorkerA WorkerA2 WorkerC)
+ 'feature_category!=category_a' | %w(WorkerB WorkerC)
# has_external_dependencies
- 'has_external_dependencies=true' | %w(b)
- 'has_external_dependencies=false' | %w(a a:2 c)
- 'has_external_dependencies=true,false' | %w(a a:2 b c)
- 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a:2 b c)
- 'has_external_dependencies!=true' | %w(a a:2 c)
+ 'has_external_dependencies=true' | %w(WorkerB)
+ 'has_external_dependencies=false' | %w(WorkerA WorkerA2 WorkerC)
+ 'has_external_dependencies=true,false' | %w(WorkerA WorkerA2 WorkerB WorkerC)
+ 'has_external_dependencies=true|has_external_dependencies=false' | %w(WorkerA WorkerA2 WorkerB WorkerC)
+ 'has_external_dependencies!=true' | %w(WorkerA WorkerA2 WorkerC)
# urgency
- 'urgency=high' | %w(a:2 b)
- 'urgency=low' | %w(a)
- 'urgency=high,low,throttled' | %w(a a:2 b c)
- 'urgency=low|urgency=throttled' | %w(a c)
- 'urgency!=high' | %w(a c)
+ 'urgency=high' | %w(WorkerA2 WorkerB)
+ 'urgency=low' | %w(WorkerA)
+ 'urgency=high,low,throttled' | %w(WorkerA WorkerA2 WorkerB WorkerC)
+ 'urgency=low|urgency=throttled' | %w(WorkerA WorkerC)
+ 'urgency!=high' | %w(WorkerA WorkerC)
# name
- 'name=a' | %w(a)
- 'name=a,b' | %w(a b)
- 'name=a,a:2|name=b' | %w(a a:2 b)
- 'name!=a,a:2' | %w(b c)
+ 'name=a' | %w(WorkerA)
+ 'name=a,b' | %w(WorkerA WorkerB)
+ 'name=a,a:2|name=b' | %w(WorkerA WorkerA2 WorkerB)
+ 'name!=a,a:2' | %w(WorkerB WorkerC)
# resource_boundary
- 'resource_boundary=memory' | %w(b c)
- 'resource_boundary=memory,cpu' | %w(a b c)
- 'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
- 'resource_boundary!=memory,cpu' | %w(a:2)
+ 'resource_boundary=memory' | %w(WorkerB WorkerC)
+ 'resource_boundary=memory,cpu' | %w(WorkerA WorkerB WorkerC)
+ 'resource_boundary=memory|resource_boundary=cpu' | %w(WorkerA WorkerB WorkerC)
+ 'resource_boundary!=memory,cpu' | %w(WorkerA2)
# tags
- 'tags=no_disk_io' | %w(a b)
- 'tags=no_disk_io,git_access' | %w(a a:2 b)
- 'tags=no_disk_io|tags=git_access' | %w(a a:2 b)
- 'tags=no_disk_io&tags=git_access' | %w(a)
- 'tags!=no_disk_io' | %w(a:2 c)
- 'tags!=no_disk_io,git_access' | %w(c)
+ 'tags=no_disk_io' | %w(WorkerA WorkerB)
+ 'tags=no_disk_io,git_access' | %w(WorkerA WorkerA2 WorkerB)
+ 'tags=no_disk_io|tags=git_access' | %w(WorkerA WorkerA2 WorkerB)
+ 'tags=no_disk_io&tags=git_access' | %w(WorkerA)
+ 'tags!=no_disk_io' | %w(WorkerA2 WorkerC)
+ 'tags!=no_disk_io,git_access' | %w(WorkerC)
'tags=unknown_tag' | []
- 'tags!=no_disk_io' | %w(a:2 c)
- 'tags!=no_disk_io,git_access' | %w(c)
- 'tags!=unknown_tag' | %w(a a:2 b c)
+ 'tags!=no_disk_io' | %w(WorkerA2 WorkerC)
+ 'tags!=no_disk_io,git_access' | %w(WorkerC)
+ 'tags!=unknown_tag' | %w(WorkerA WorkerA2 WorkerB WorkerC)
# combinations
- 'feature_category=category_a&urgency=high' | %w(a:2)
- 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(a:2 c)
+ 'feature_category=category_a&urgency=high' | %w(WorkerA2)
+ 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(WorkerA2 WorkerC)
# Match all
- '*' | %w(a a:2 b c)
+ '*' | %w(WorkerA WorkerA2 WorkerB WorkerC)
end
with_them do
@@ -103,7 +113,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
matched_metadatas = worker_metadatas.select do |metadata|
described_class.new(query).match?(metadata)
end
- expect(matched_metadatas.map { |m| m[:name] }).to match_array(expected_metadatas)
+ expect(matched_metadatas.map { |m| m[:worker_name] }).to match_array(expected_metadatas)
end
end
end
@@ -113,7 +123,7 @@ RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
'feature_category="category_a"' | described_class::InvalidTerm
'feature_category=' | described_class::InvalidTerm
'feature_category~category_a' | described_class::InvalidTerm
- 'worker_name=a' | described_class::UnknownPredicate
+ 'invalid_term=a' | described_class::UnknownPredicate
end
with_them do
diff --git a/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
new file mode 100644
index 00000000000..687e35813b1
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_config/worker_router_spec.rb
@@ -0,0 +1,212 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::SidekiqConfig::WorkerRouter do
+ describe '.queue_name_from_worker_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ def create_worker(name, namespace = nil)
+ Class.new.tap do |worker|
+ worker.define_singleton_method(:name) { name }
+ worker.define_singleton_method(:queue_namespace) { namespace }
+ end
+ end
+
+ where(:worker, :expected_name) do
+ create_worker('PagesWorker') | 'pages'
+ create_worker('PipelineNotificationWorker') | 'pipeline_notification'
+ create_worker('PostReceive') | 'post_receive'
+ create_worker('PostReceive', :git) | 'git:post_receive'
+ create_worker('PipelineHooksWorker', :pipeline_hooks) | 'pipeline_hooks:pipeline_hooks'
+ create_worker('Gitlab::JiraImport::AdvanceStageWorker') | 'jira_import_advance_stage'
+ create_worker('Gitlab::PhabricatorImport::ImportTasksWorker', :importer) | 'importer:phabricator_import_import_tasks'
+ end
+
+ with_them do
+ it 'generates a valid queue name from worker name' do
+ expect(described_class.queue_name_from_worker_name(worker)).to eql(expected_name)
+ end
+ end
+ end
+
+ shared_context 'router examples setup' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::BarWorker'
+ end
+
+ include ApplicationWorker
+
+ feature_category :feature_a
+ urgency :low
+ worker_resource_boundary :cpu
+ tags :expensive
+ end
+ end
+
+ where(:routing_rules, :expected_queue) do
+ # Default, no configuration
+ [] | 'foo_bar'
+ # Does not match, fallback to the named queue
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=memory', 'queue_b'],
+ ['tags=cheap', 'queue_c']
+ ] | 'foo_bar'
+ # Match a nil queue, fallback to named queue
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=cpu', nil],
+ ['tags=cheap', 'queue_c']
+ ] | 'foo_bar'
+ # Match an empty string, fallback to named queue
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=cpu', ''],
+ ['tags=cheap', 'queue_c']
+ ] | 'foo_bar'
+ # Match the first rule
+ [
+ ['feature_category=feature_a|urgency=high', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=cheap', 'queue_c']
+ ] | 'queue_a'
+ # Match the first rule 2
+ [
+ ['feature_category=feature_b|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=cheap', 'queue_c']
+ ] | 'queue_a'
+ # Match the third rule
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=memory', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_c'
+ # Match all, first match wins
+ [
+ ['feature_category=feature_a|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_a'
+ # Match the same rule multiple times, the first match wins
+ [
+ ['feature_category=feature_a', 'queue_a'],
+ ['feature_category=feature_a', 'queue_b'],
+ ['feature_category=feature_a', 'queue_c']
+ ] | 'queue_a'
+ # Match wildcard
+ [
+ ['feature_category=feature_b|urgency=high', 'queue_a'],
+ ['resource_boundary=memory', 'queue_b'],
+ ['tags=cheap', 'queue_c'],
+ ['*', 'default']
+ ] | 'default'
+ # Match wildcard at the top of the chain. It makes the following rules useless
+ [
+ ['*', 'queue_foo'],
+ ['feature_category=feature_a|urgency=low', 'queue_a'],
+ ['resource_boundary=cpu', 'queue_b'],
+ ['tags=expensive', 'queue_c']
+ ] | 'queue_foo'
+ end
+ end
+
+ describe '.global' do
+ before do
+ described_class.remove_instance_variable(:@global_worker_router) if described_class.instance_variable_defined?(:@global_worker_router)
+ end
+
+ after do
+ described_class.remove_instance_variable(:@global_worker_router)
+ end
+
+ context 'valid routing rules' do
+ include_context 'router examples setup'
+
+ with_them do
+ before do
+ stub_config(sidekiq: { routing_rules: routing_rules })
+ end
+
+ it 'routes the worker to the correct queue' do
+ expect(described_class.global.route(worker)).to eql(expected_queue)
+ end
+ end
+ end
+
+ context 'invalid routing rules' do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'Gitlab::Foo::BarWorker'
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ before do
+ stub_config(sidekiq: { routing_rules: routing_rules })
+ end
+
+ context 'invalid routing rules format' do
+ let(:routing_rules) { ['feature_category=a'] }
+
+ it 'captures the error and falls back to an empty route' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(be_a(described_class::InvalidRoutingRuleError))
+
+ expect(described_class.global.route(worker)).to eql('foo_bar')
+ end
+ end
+
+ context 'invalid predicate' do
+ let(:routing_rules) { [['invalid_term=a', 'queue_a']] }
+
+ it 'captures the error and falls back to an empty route' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(
+ be_a(Gitlab::SidekiqConfig::WorkerMatcher::UnknownPredicate)
+ )
+
+ expect(described_class.global.route(worker)).to eql('foo_bar')
+ end
+ end
+ end
+ end
+
+ describe '#route' do
+ context 'valid routing rules' do
+ include_context 'router examples setup'
+
+ with_them do
+ it 'routes the worker to the correct queue' do
+ router = described_class.new(routing_rules)
+
+ expect(router.route(worker)).to eql(expected_queue)
+ end
+ end
+ end
+
+ context 'invalid routing rules' do
+ it 'raises an exception' do
+ expect { described_class.new(nil) }.to raise_error(described_class::InvalidRoutingRuleError)
+ expect { described_class.new(['feature_category=a']) }.to raise_error(described_class::InvalidRoutingRuleError)
+ expect { described_class.new([['feature_category=a', 'queue_a', 'queue_b']]) }.to raise_error(described_class::InvalidRoutingRuleError)
+ expect do
+ described_class.new(
+ [
+ ['feature_category=a', 'queue_b'],
+ ['feature_category=b']
+ ]
+ )
+ end.to raise_error(described_class::InvalidRoutingRuleError)
+ expect { described_class.new([['invalid_term=a', 'queue_a']]) }.to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::UnknownPredicate)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
index 05987f95b33..0c43c33ff8c 100644
--- a/spec/lib/gitlab/sidekiq_config/worker_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
def create_worker(queue:, **attributes)
namespace = queue.include?(':') && queue.split(':').first
inner_worker = double(
+ name: attributes[:worker_name] || 'Foo::BarWorker',
queue: queue,
queue_namespace: namespace,
get_feature_category: attributes[:feature_category],
@@ -87,6 +88,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
describe 'YAML encoding' do
it 'encodes the worker in YAML as a hash of the queue' do
attributes_a = {
+ worker_name: 'WorkerA',
feature_category: :source_code_management,
has_external_dependencies: false,
urgency: :low,
@@ -97,6 +99,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do
}
attributes_b = {
+ worker_name: 'WorkerB',
feature_category: :not_owned,
has_external_dependencies: true,
urgency: :high,
diff --git a/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb b/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb
index 749c7af6f59..f93c0e28fc0 100644
--- a/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/monitor_spec.rb
@@ -225,7 +225,7 @@ RSpec.describe Gitlab::SidekiqDaemon::Monitor do
after do
thread.kill
- rescue
+ rescue StandardError
end
it 'does log cancellation message' do
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 537844df72f..731c509e221 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect do
call_subject(job, 'test_queue') do
raise ArgumentError, 'Something went wrong'
- rescue
+ rescue StandardError
raise Sidekiq::JobRetry::Skip
end
end.to raise_error(Sidekiq::JobRetry::Skip)
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect do
call_subject(job, 'test_queue') do
raise ArgumentError, 'Something went wrong'
- rescue
+ rescue StandardError
raise Sidekiq::JobRetry::Handled
end
end.to raise_error(Sidekiq::JobRetry::Handled)
@@ -293,6 +293,16 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
+
+ context 'when instrumentation data is not loaded' do
+ before do
+ allow(logger).to receive(:info)
+ end
+
+ it 'does not raise exception' do
+ expect { subject.call(job.dup, 'test_queue') {} }.not_to raise_error
+ end
+ end
end
describe '#add_time_keys!' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
index 75b1d9fd87e..7f11e8df97f 100644
--- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::ExceedLimitError do
it 'encapsulates worker info' do
exception = described_class.new(TestSizeLimiterWorker, 500, 300)
- expect(exception.message).to eql("TestSizeLimiterWorker job exceeds payload size limit (500/300)")
+ expect(exception.message).to eql("TestSizeLimiterWorker job exceeds payload size limit")
expect(exception.worker_class).to eql(TestSizeLimiterWorker)
expect(exception.size).to be(500)
expect(exception.size_limit).to be(300)
diff --git a/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
new file mode 100644
index 00000000000..b30143ed196
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_migrate_jobs_spec.rb
@@ -0,0 +1,215 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMigrateJobs, :clean_gitlab_redis_queues do
+ def clear_queues
+ Sidekiq::Queue.new('authorized_projects').clear
+ Sidekiq::Queue.new('post_receive').clear
+ Sidekiq::RetrySet.new.clear
+ Sidekiq::ScheduledSet.new.clear
+ end
+
+ around do |example|
+ clear_queues
+ Sidekiq::Testing.disable!(&example)
+ clear_queues
+ end
+
+ describe '#execute', :aggregate_failures do
+ shared_examples 'processing a set' do
+ let(:migrator) { described_class.new(set_name) }
+
+ let(:set_after) do
+ Sidekiq.redis { |c| c.zrange(set_name, 0, -1, with_scores: true) }
+ .map { |item, score| [Sidekiq.load_json(item), score] }
+ end
+
+ context 'when the set is empty' do
+ it 'returns the number of scanned and migrated jobs' do
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue')).to eq(scanned: 0, migrated: 0)
+ end
+ end
+
+ context 'when the set is not empty' do
+ it 'returns the number of scanned and migrated jobs' do
+ create_jobs
+
+ expect(migrator.execute({})).to eq(scanned: 4, migrated: 0)
+ end
+ end
+
+ context 'when there are no matching jobs' do
+ it 'does not change any queue names' do
+ create_jobs(include_post_receive: false)
+
+ expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 3, migrated: 0)
+
+ expect(set_after.length).to eq(3)
+ expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects',
+ 'class' => 'AuthorizedProjectsWorker'))
+ end
+ end
+
+ context 'when there are matching jobs' do
+ it 'migrates only the workers matching the given worker from the set' do
+ freeze_time do
+ create_jobs
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue')).to eq(scanned: 4, migrated: 3)
+
+ set_after.each.with_index do |(item, score), i|
+ if item['class'] == 'AuthorizedProjectsWorker'
+ expect(item).to include('queue' => 'new_queue', 'args' => [i])
+ else
+ expect(item).to include('queue' => 'post_receive', 'args' => [i])
+ end
+
+ expect(score).to eq(i.succ.hours.from_now.to_i)
+ end
+ end
+ end
+
+ it 'allows migrating multiple workers at once' do
+ freeze_time do
+ create_jobs
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'another_queue'))
+ .to eq(scanned: 4, migrated: 4)
+
+ set_after.each.with_index do |(item, score), i|
+ if item['class'] == 'AuthorizedProjectsWorker'
+ expect(item).to include('queue' => 'new_queue', 'args' => [i])
+ else
+ expect(item).to include('queue' => 'another_queue', 'args' => [i])
+ end
+
+ expect(score).to eq(i.succ.hours.from_now.to_i)
+ end
+ end
+ end
+
+ it 'allows migrating multiple workers to the same queue' do
+ freeze_time do
+ create_jobs
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'new_queue'))
+ .to eq(scanned: 4, migrated: 4)
+
+ set_after.each.with_index do |(item, score), i|
+ expect(item).to include('queue' => 'new_queue', 'args' => [i])
+ expect(score).to eq(i.succ.hours.from_now.to_i)
+ end
+ end
+ end
+
+ it 'does not try to migrate jobs that are removed from the set during the migration' do
+ freeze_time do
+ create_jobs
+
+ allow(migrator).to receive(:migrate_job).and_wrap_original do |meth, *args|
+ Sidekiq.redis { |c| c.zrem(set_name, args.first) }
+
+ meth.call(*args)
+ end
+
+ expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 0)
+
+ expect(set_after.length).to eq(3)
+ expect(set_after.map(&:first)).to all(include('queue' => 'authorized_projects'))
+ end
+ end
+
+ it 'does not try to migrate unmatched jobs that are added to the set during the migration' do
+ create_jobs
+
+ calls = 0
+
+ allow(migrator).to receive(:migrate_job).and_wrap_original do |meth, *args|
+ if calls == 0
+ travel_to(5.hours.from_now) { create_jobs(include_post_receive: false) }
+ end
+
+ calls += 1
+
+ meth.call(*args)
+ end
+
+ expect(migrator.execute('PostReceive' => 'new_queue')).to eq(scanned: 4, migrated: 1)
+
+ expect(set_after.group_by { |job| job.first['queue'] }.transform_values(&:count))
+ .to eq('authorized_projects' => 6, 'new_queue' => 1)
+ end
+
+ it 'iterates through the entire set of jobs' do
+ 50.times do |i|
+ travel_to(i.hours.from_now) { create_jobs }
+ end
+
+ expect(migrator.execute('NonExistentWorker' => 'new_queue')).to eq(scanned: 200, migrated: 0)
+
+ expect(set_after.length).to eq(200)
+ end
+
+ it 'logs output at the start, finish, and every LOG_FREQUENCY jobs' do
+ freeze_time do
+ create_jobs
+
+ stub_const("#{described_class}::LOG_FREQUENCY", 2)
+
+ logger = Logger.new(StringIO.new)
+ migrator = described_class.new(set_name, logger: logger)
+
+ expect(logger).to receive(:info).with(a_string_matching('Processing')).once.ordered
+ expect(logger).to receive(:info).with(a_string_matching('In progress')).once.ordered
+ expect(logger).to receive(:info).with(a_string_matching('Done')).once.ordered
+
+ expect(migrator.execute('AuthorizedProjectsWorker' => 'new_queue', 'PostReceive' => 'new_queue'))
+ .to eq(scanned: 4, migrated: 4)
+ end
+ end
+ end
+ end
+
+ context 'scheduled jobs' do
+ let(:set_name) { 'schedule' }
+
+ def create_jobs(include_post_receive: true)
+ AuthorizedProjectsWorker.perform_in(1.hour, 0)
+ AuthorizedProjectsWorker.perform_in(2.hours, 1)
+ PostReceive.perform_in(3.hours, 2) if include_post_receive
+ AuthorizedProjectsWorker.perform_in(4.hours, 3)
+ end
+
+ it_behaves_like 'processing a set'
+ end
+
+ context 'retried jobs' do
+ let(:set_name) { 'retry' }
+
+ # Try to mimic as closely as possible what Sidekiq will actually
+ # do to retry a job.
+ def retry_in(klass, time, args)
+ # In Sidekiq 6, this argument will become a JSON string
+ message = { 'class' => klass, 'args' => [args], 'retry' => true }
+
+ allow(klass).to receive(:sidekiq_retry_in_block).and_return(proc { time })
+
+ begin
+ Sidekiq::JobRetry.new.local(klass, message, klass.queue) { raise 'boom' }
+ rescue Sidekiq::JobRetry::Skip
+ # Sidekiq scheduled the retry
+ end
+ end
+
+ def create_jobs(include_post_receive: true)
+ retry_in(AuthorizedProjectsWorker, 1.hour, 0)
+ retry_in(AuthorizedProjectsWorker, 2.hours, 1)
+ retry_in(PostReceive, 3.hours, 2) if include_post_receive
+ retry_in(AuthorizedProjectsWorker, 4.hours, 3)
+ end
+
+ it_behaves_like 'processing a set'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
index 7b3440b40a7..7d36e67ddbf 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::SlashCommands::Presenters::IssueMove do
let_it_be(:other_project) { create(:project) }
let_it_be(:old_issue, reload: true) { create(:issue, project: project) }
- let(:new_issue) { Issues::MoveService.new(project, user).execute(old_issue, other_project) }
+ let(:new_issue) { Issues::MoveService.new(project: project, current_user: user).execute(old_issue, other_project) }
let(:attachment) { subject[:attachments].first }
subject { described_class.new(new_issue).present(old_issue) }
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
new file mode 100644
index 00000000000..491e5e9a662
--- /dev/null
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Spamcheck::Client do
+ include_context 'includes Spam constants'
+
+ let(:endpoint) { 'grpc://grpc.test.url' }
+ let_it_be(:user) { create(:user, organization: 'GitLab') }
+ let(:verdict_value) { nil }
+ let(:error_value) { "" }
+
+ let(:attribs_value) do
+ extra_attributes = Google::Protobuf::Map.new(:string, :string)
+ extra_attributes["monitorMode"] = "false"
+ extra_attributes
+ end
+
+ let_it_be(:issue) { create(:issue, description: 'Test issue description') }
+
+ let(:response) do
+ verdict = ::Spamcheck::SpamVerdict.new
+ verdict.verdict = verdict_value
+ verdict.error = error_value
+ verdict.extra_attributes = attribs_value
+ verdict
+ end
+
+ subject { described_class.new.issue_spam?(spam_issue: issue, user: user) }
+
+ before do
+ stub_application_setting(spam_check_endpoint_url: endpoint)
+ end
+
+ describe '#issue_spam?' do
+ before do
+ allow_next_instance_of(::Spamcheck::SpamcheckService::Stub) do |instance|
+ allow(instance).to receive(:check_for_spam_issue).and_return(response)
+ end
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:verdict, :expected) do
+ ::Spamcheck::SpamVerdict::Verdict::ALLOW | Spam::SpamConstants::ALLOW
+ ::Spamcheck::SpamVerdict::Verdict::CONDITIONAL_ALLOW | Spam::SpamConstants::CONDITIONAL_ALLOW
+ ::Spamcheck::SpamVerdict::Verdict::DISALLOW | Spam::SpamConstants::DISALLOW
+ ::Spamcheck::SpamVerdict::Verdict::BLOCK | Spam::SpamConstants::BLOCK_USER
+ ::Spamcheck::SpamVerdict::Verdict::NOOP | Spam::SpamConstants::NOOP
+ end
+
+ with_them do
+ let(:verdict_value) { verdict }
+
+ it "returns expected spam constant" do
+ expect(subject).to eq([expected, { "monitorMode" => "false" }, ""])
+ end
+ end
+ end
+
+ describe "#build_issue_protobuf", :aggregate_failures do
+ it 'builds the expected protobuf object' do
+ cxt = { action: :create }
+ issue_pb = described_class.new.send(:build_issue_protobuf,
+ issue: issue, user: user,
+ context: cxt)
+ expect(issue_pb.title).to eq issue.title
+ expect(issue_pb.description).to eq issue.description
+ expect(issue_pb.user_in_project). to be false
+ expect(issue_pb.project.project_id).to eq issue.project_id
+ expect(issue_pb.created_at).to eq timestamp_to_protobuf_timestamp(issue.created_at)
+ expect(issue_pb.updated_at).to eq timestamp_to_protobuf_timestamp(issue.updated_at)
+ expect(issue_pb.action).to be ::Spamcheck::Action.lookup(::Spamcheck::Action::CREATE)
+ expect(issue_pb.user.username).to eq user.username
+ end
+ end
+
+ describe '#build_user_proto_buf', :aggregate_failures do
+ it 'builds the expected protobuf object' do
+ user_pb = described_class.new.send(:build_user_protobuf, user)
+ expect(user_pb.username).to eq user.username
+ expect(user_pb.org).to eq user.organization
+ expect(user_pb.created_at).to eq timestamp_to_protobuf_timestamp(user.created_at)
+ expect(user_pb.emails.count).to be 1
+ expect(user_pb.emails.first.email).to eq user.email
+ expect(user_pb.emails.first.verified).to eq user.confirmed?
+ end
+
+ context 'when user has multiple email addresses' do
+ let(:secondary_email) {create(:email, :confirmed, user: user)}
+
+ before do
+ user.emails << secondary_email
+ end
+
+ it 'adds emails to the user pb object' do
+ user_pb = described_class.new.send(:build_user_protobuf, user)
+ expect(user_pb.emails.count).to eq 2
+ expect(user_pb.emails.first.email).to eq user.email
+ expect(user_pb.emails.first.verified).to eq user.confirmed?
+ expect(user_pb.emails.last.email).to eq secondary_email.email
+ expect(user_pb.emails.last.verified).to eq secondary_email.confirmed?
+ end
+ end
+ end
+
+ describe "#build_project_protobuf", :aggregate_failures do
+ it 'builds the expected protobuf object' do
+ project_pb = described_class.new.send(:build_project_protobuf, issue)
+ expect(project_pb.project_id).to eq issue.project_id
+ expect(project_pb.project_path).to eq issue.project.full_path
+ end
+ end
+
+ private
+
+ def timestamp_to_protobuf_timestamp(timestamp)
+ Google::Protobuf::Timestamp.new(seconds: timestamp.to_time.to_i,
+ nanos: timestamp.to_time.nsec)
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
index 0b2055d3db5..8cd3feba339 100644
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
@@ -54,13 +54,14 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
path,
'',
message: 'message',
- branch_name: 'master'
+ branch_name: ref
)
end
+ let(:ref) { 'main' }
let(:path) { 'README.md.erb' }
- it { is_expected.to include(is_supported_content: true) }
+ it { is_expected.to include(branch: ref, is_supported_content: true) }
end
context 'when file path is nested' do
@@ -69,7 +70,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
it { is_expected.to include(base_url: '/namespace/project/-/sse/master%2Flib%2FREADME.md') }
end
- context 'when branch is not master' do
+ context 'when branch is not master or main' do
let(:ref) { 'my-branch' }
it { is_expected.to include(is_supported_content: false) }
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index ad1affdac0b..ed551521b1d 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -2,43 +2,26 @@
require 'spec_helper'
-RSpec.describe ::Gitlab::SubscriptionPortal do
- unless Gitlab.jh?
- describe '.default_subscriptions_url' do
- subject { described_class.default_subscriptions_url }
-
- context 'on non test and non dev environments' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
- end
-
- it 'returns production subscriptions app URL' do
- is_expected.to eq('https://customers.gitlab.com')
- end
- end
-
- context 'on dev environment' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
- end
-
- it 'returns staging subscriptions app url' do
- is_expected.to eq('https://customers.stg.gitlab.com')
- end
- end
+RSpec.describe ::Gitlab::SubscriptionPortal, skip: Gitlab.jh? do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:method_name, :test, :development, :result) do
+ :default_subscriptions_url | false | false | 'https://customers.gitlab.com'
+ :default_subscriptions_url | false | true | 'https://customers.stg.gitlab.com'
+ :default_subscriptions_url | true | false | 'https://customers.stg.gitlab.com'
+ :payment_form_url | false | false | 'https://customers.gitlab.com/payment_forms/cc_validation'
+ :payment_form_url | false | true | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
+ :payment_form_url | true | false | 'https://customers.stg.gitlab.com/payment_forms/cc_validation'
+ end
- context 'on test environment' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(true)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
- end
+ with_them do
+ subject { described_class.method(method_name).call }
- it 'returns staging subscriptions app url' do
- is_expected.to eq('https://customers.stg.gitlab.com')
- end
- end
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(test)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(development)
end
+
+ it { is_expected.to eq(result) }
end
end
diff --git a/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb
deleted file mode 100644
index d1024019a9f..00000000000
--- a/spec/lib/gitlab/template/gitlab_ci_syntax_yml_template_spec.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Template::GitlabCiSyntaxYmlTemplate do
- subject { described_class }
-
- describe '#content' do
- it 'loads the full file' do
- template = subject.new(Rails.root.join('lib/gitlab/ci/syntax_templates/Artifacts example.gitlab-ci.yml'))
-
- expect(template.content).to start_with('#')
- end
- end
-
- it_behaves_like 'file template shared examples', 'Artifacts example', '.gitlab-ci.yml'
-end
diff --git a/spec/lib/gitlab/terraform_registry_token_spec.rb b/spec/lib/gitlab/terraform_registry_token_spec.rb
new file mode 100644
index 00000000000..49c1c07e942
--- /dev/null
+++ b/spec/lib/gitlab/terraform_registry_token_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::TerraformRegistryToken do
+ let_it_be(:user) { create(:user) }
+
+ describe '.from_token' do
+ let(:jwt_token) { described_class.from_token(token) }
+
+ subject { described_class.decode(jwt_token.encoded) }
+
+ context 'with a deploy token' do
+ let(:deploy_token) { create(:deploy_token, username: 'deployer') }
+ let(:token) { deploy_token }
+
+ it 'returns the correct token' do
+ expect(subject['token']).to eq jwt_token['token']
+ end
+ end
+
+ context 'with a job' do
+ let_it_be(:job) { create(:ci_build) }
+
+ let(:token) { job }
+
+ it 'returns the correct token' do
+ expect(subject['token']).to eq jwt_token['token']
+ end
+ end
+
+ context 'with a personal access token' do
+ let(:token) { create(:personal_access_token) }
+
+ it 'returns the correct token' do
+ expect(subject['token']).to eq jwt_token['token']
+ end
+ end
+ end
+
+ it_behaves_like 'a gitlab jwt token'
+end
diff --git a/spec/lib/gitlab/tracking/docs/helper_spec.rb b/spec/lib/gitlab/tracking/docs/helper_spec.rb
new file mode 100644
index 00000000000..5f7965502f1
--- /dev/null
+++ b/spec/lib/gitlab/tracking/docs/helper_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::Docs::Helper do
+ let_it_be(:klass) do
+ Class.new do
+ include Gitlab::Tracking::Docs::Helper
+ end
+ end
+
+ describe '#auto_generated_comment' do
+ it 'renders information about missing description' do
+ expect(klass.new.auto_generated_comment).to match /This documentation is auto generated by a script/
+ end
+ end
+
+ describe '#render_description' do
+ context 'description is empty' do
+ it 'renders information about missing description' do
+ object = double(description: '')
+
+ expect(klass.new.render_description(object)).to eq('Missing description')
+ end
+ end
+
+ context 'description is present' do
+ it 'render description' do
+ object = double(description: 'some description')
+
+ expect(klass.new.render_description(object)).to eq('some description')
+ end
+ end
+ end
+
+ describe '#render_event_taxonomy' do
+ it 'render table with event taxonomy' do
+ attributes = {
+ category: 'epics',
+ action: 'promote',
+ label: nil,
+ property_description: 'String with issue id',
+ value_description: 'Integer issue id'
+ }
+ object = double(attributes: attributes)
+ event_taxonomy = <<~MD.chomp
+ | category | action | label | property | value |
+ |---|---|---|---|---|
+ | `epics` | `promote` | `` | `String with issue id` | `Integer issue id` |
+ MD
+
+ expect(klass.new.render_event_taxonomy(object)).to eq(event_taxonomy)
+ end
+ end
+
+ describe '#md_link_to' do
+ it 'render link in md format' do
+ expect(klass.new.md_link_to('zelda', 'link')).to eq('[zelda](link)')
+ end
+ end
+
+ describe '#render_owner' do
+ it 'render information about group owning event' do
+ object = double(product_group: "group::product intelligence")
+
+ expect(klass.new.render_owner(object)).to eq("Owner: `group::product intelligence`")
+ end
+ end
+
+ describe '#render_tiers' do
+ it 'render information about tiers' do
+ object = double(tiers: %w[bronze silver gold])
+
+ expect(klass.new.render_tiers(object)).to eq("Tiers: `bronze`, `silver`, `gold`")
+ end
+ end
+
+ describe '#render_yaml_definition_path' do
+ it 'render relative location of yaml definition' do
+ object = double(yaml_path: 'config/events/button_click.yaml')
+
+ expect(klass.new.render_yaml_definition_path(object)).to eq("YAML definition: `config/events/button_click.yaml`")
+ end
+ end
+
+ describe '#backtick' do
+ it 'wraps string in backticks chars' do
+ expect(klass.new.backtick('test')).to eql("`test`")
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/docs/renderer_spec.rb b/spec/lib/gitlab/tracking/docs/renderer_spec.rb
new file mode 100644
index 00000000000..386aea6c23a
--- /dev/null
+++ b/spec/lib/gitlab/tracking/docs/renderer_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::Docs::Renderer do
+ describe 'contents' do
+ let(:dictionary_path) { described_class::DICTIONARY_PATH }
+ let(:items) { Gitlab::Tracking::EventDefinition.definitions.first(10).to_h }
+
+ it 'generates dictionary for given items' do
+ generated_dictionary = described_class.new(items).contents
+ table_of_contents_items = items.values.map { |item| "#{item.category} #{item.action}"}
+
+ generated_dictionary_keys = RDoc::Markdown
+ .parse(generated_dictionary)
+ .table_of_contents
+ .select { |metric_doc| metric_doc.level == 3 }
+ .map { |item| item.text.match(%r{<code>(.*)</code>})&.captures&.first }
+
+ expect(generated_dictionary_keys).to match_array(table_of_contents_items)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/event_definition_spec.rb b/spec/lib/gitlab/tracking/event_definition_spec.rb
new file mode 100644
index 00000000000..51c62840819
--- /dev/null
+++ b/spec/lib/gitlab/tracking/event_definition_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Tracking::EventDefinition do
+ let(:attributes) do
+ {
+ description: 'Created issues',
+ category: 'issues',
+ action: 'create',
+ label_description: 'API',
+ property_description: 'The string "issue_id"',
+ value_description: 'ID of the issue',
+ extra_properties: { confidential: false },
+ product_category: 'collection',
+ product_stage: 'growth',
+ product_section: 'dev',
+ product_group: 'group::product analytics',
+ distribution: %w(ee ce),
+ tier: %w(free premium ultimate)
+ }
+ end
+
+ let(:path) { File.join('events', 'issues_create.yml') }
+ let(:definition) { described_class.new(path, attributes) }
+ let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+
+ def write_metric(metric, path, content)
+ path = File.join(metric, path)
+ dir = File.dirname(path)
+ FileUtils.mkdir_p(dir)
+ File.write(path, content)
+ end
+
+ it 'has all definitions valid' do
+ expect { described_class.definitions }.not_to raise_error(Gitlab::Tracking::InvalidEventError)
+ end
+
+ describe '#validate' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:attribute, :value) do
+ :description | 1
+ :category | nil
+ :action | nil
+ :label_description | 1
+ :property_description | 1
+ :value_description | 1
+ :extra_properties | 'smth'
+ :product_category | 1
+ :product_stage | 1
+ :product_section | nil
+ :product_group | nil
+ :distributions | %[be eb]
+ :tiers | %[pro]
+ end
+
+ with_them do
+ before do
+ attributes[attribute] = value
+ end
+
+ it 'raise exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once).with(instance_of(Gitlab::Tracking::InvalidEventError))
+
+ described_class.new(path, attributes).validate!
+ end
+ end
+ end
+
+ describe '.definitions' do
+ let(:metric1) { Dir.mktmpdir('metric1') }
+ let(:metric2) { Dir.mktmpdir('metric2') }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(metric1, '**', '*.yml'),
+ File.join(metric2, '**', '*.yml')
+ ]
+ )
+ end
+
+ subject { described_class.definitions }
+
+ it 'has empty list when there are no definition files' do
+ is_expected.to be_empty
+ end
+
+ it 'has one metric when there is one file' do
+ write_metric(metric1, path, yaml_content)
+
+ is_expected.to be_one
+ end
+
+ after do
+ FileUtils.rm_rf(metric1)
+ FileUtils.rm_rf(metric2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index dacd08cf12b..289818266bd 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -57,6 +57,22 @@ RSpec.describe Gitlab::Tracking::StandardContext do
expect(snowplow_context.to_json.dig(:data, :source)).to eq(described_class::GITLAB_RAILS_SOURCE)
end
+ context 'plan' do
+ context 'when namespace is not available' do
+ it 'is nil' do
+ expect(snowplow_context.to_json.dig(:data, :plan)).to be_nil
+ end
+ end
+
+ context 'when namespace is available' do
+ subject { described_class.new(namespace: create(:namespace)) }
+
+ it 'contains plan name' do
+ expect(snowplow_context.to_json.dig(:data, :plan)).to eq(Plan::DEFAULT)
+ end
+ end
+ end
+
context 'with extra data' do
subject { described_class.new(extra_key_1: 'extra value 1', extra_key_2: 'extra value 2') }
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 4d856205609..994316f38ee 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -36,6 +36,8 @@ RSpec.describe Gitlab::Tracking do
end
describe '.event' do
+ let(:namespace) { create(:namespace) }
+
shared_examples 'delegates to destination' do |klass|
before do
allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow).to receive(:event)
@@ -47,7 +49,6 @@ RSpec.describe Gitlab::Tracking do
project = double(:project)
user = double(:user)
- namespace = double(:namespace)
expect(Gitlab::Tracking::StandardContext)
.to receive(:new)
diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb
index a86afa9cba5..3021d92244e 100644
--- a/spec/lib/gitlab/tree_summary_spec.rb
+++ b/spec/lib/gitlab/tree_summary_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::TreeSummary do
'long.txt',
'',
message: message,
- branch_name: project.default_branch_or_master
+ branch_name: project.default_branch
)
end
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index e99d720058a..92e51b8ea23 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -25,6 +25,12 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
let(:definition) { described_class.new(path, attributes) }
let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+ around do |example|
+ described_class.instance_variable_set(:@definitions, nil)
+ example.run
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
def write_metric(metric, path, content)
path = File.join(metric, path)
dir = File.dirname(path)
@@ -32,6 +38,11 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
File.write(path, content)
end
+ after do
+ # Reset memoized `definitions` result
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
it 'has all definitons valid' do
expect { described_class.definitions }.not_to raise_error(Gitlab::Usage::Metric::InvalidMetricError)
end
@@ -62,6 +73,9 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:distribution | 'test'
:tier | %w(test ee)
:name | 'count_<adjective_describing>_boards'
+
+ :instrumentation_class | 'Metric_Class'
+ :instrumentation_class | 'metricClass'
end
with_them do
@@ -184,8 +198,6 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
File.join(metric2, '**', '*.yml')
]
)
- # Reset memoized `definitions` result
- described_class.instance_variable_set(:@definitions, nil)
end
after do
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
index 7d8e3056384..0fb3a69df05 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -65,76 +65,35 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
context 'there are aggregated metrics defined' do
+ let(:aggregated_metrics) do
+ [
+ aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: operator)
+ ]
+ end
+
+ let(:results) { { 'gmau_1' => 5 } }
+ let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
+
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:aggregated_metrics).and_return(aggregated_metrics)
end
end
- context 'with AND operator' do
- let(:aggregated_metrics) do
- params = { source: datasource, operator: "AND", time_frame: time_frame }
- [
- aggregated_metric(**params.merge(name: "gmau_1", events: %w[event3 event5])),
- aggregated_metric(**params.merge(name: "gmau_2"))
- ]
- end
-
- it 'returns the number of unique events recorded for every metric in aggregate', :aggregate_failures do
- results = {
- 'gmau_1' => 2,
- 'gmau_2' => 1
- }
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
-
- # gmau_1 data is as follow
- # |A| => 4
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
- # |B| => 6
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
- # |A + B| => 8
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
- # Exclusion inclusion principle formula to calculate intersection of 2 sets
- # |A & B| = (|A| + |B|) - |A + B| => (4 + 6) - 8 => 2
-
- # gmau_2 data is as follow:
- # |A| => 2
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
- # |B| => 3
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
- # |C| => 5
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
-
- # |A + B| => 4 therefore |A & B| = (|A| + |B|) - |A + B| => 2 + 3 - 4 => 1
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
- # |A + C| => 6 therefore |A & C| = (|A| + |C|) - |A + C| => 2 + 5 - 6 => 1
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
- # |B + C| => 7 therefore |B & C| = (|B| + |C|) - |B + C| => 3 + 5 - 7 => 1
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
- # |A + B + C| => 8
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
- # Exclusion inclusion principle formula to calculate intersection of 3 sets
- # |A & B & C| = (|A & B| + |A & C| + |B & C|) - (|A| + |B| + |C|) + |A + B + C|
- # (1 + 1 + 1) - (2 + 3 + 5) + 8 => 1
+ context 'with OR operator' do
+ let(:operator) { Gitlab::Usage::Metrics::Aggregates::UNION_OF_AGGREGATED_METRICS }
+ it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
- context 'with OR operator' do
- let(:aggregated_metrics) do
- [
- aggregated_metric(name: "gmau_1", source: datasource, time_frame: time_frame, operator: "OR")
- ]
- end
-
- it 'returns the number of unique events occurred for any metric in aggregate', :aggregate_failures do
- results = {
- 'gmau_1' => 5
- }
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
+ context 'with AND operator' do
+ let(:operator) { Gitlab::Usage::Metrics::Aggregates::INTERSECTION_OF_AGGREGATED_METRICS }
- expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
+ it 'returns the number of unique events that occurred for all of metrics in the aggregate', :aggregate_failures do
+ expect(namespace::SOURCES[datasource]).to receive(:calculate_metrics_intersections).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(5)
expect(aggregated_metrics_data).to eq(results)
end
end
@@ -331,36 +290,6 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
it_behaves_like 'database_sourced_aggregated_metrics'
it_behaves_like 'redis_sourced_aggregated_metrics'
it_behaves_like 'db sourced aggregated metrics without database_sourced_aggregated_metrics feature'
-
- context 'metrics union calls' do
- it 'caches intermediate operations', :aggregate_failures do
- events = %w[event1 event2 event3 event5]
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:aggregated_metrics)
- .and_return([aggregated_metric(name: 'gmau_1', events: events, operator: "AND", time_frame: time_frame)])
- end
-
- params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
-
- events.each do |event|
- expect(sources::RedisHll).to receive(:calculate_metrics_union)
- .with(params.merge(metric_names: event))
- .once
- .and_return(0)
- end
-
- 2.upto(4) do |subset_size|
- events.combination(subset_size).each do |events|
- expect(sources::RedisHll).to receive(:calculate_metrics_union)
- .with(params.merge(metric_names: events))
- .once
- .and_return(0)
- end
- end
-
- aggregated_metrics_data
- end
- end
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb
new file mode 100644
index 00000000000..41cb445155e
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/calculations/intersection_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::Calculations::Intersection do
+ let_it_be(:recorded_at) { Time.current.to_i }
+ let_it_be(:start_date) { 4.weeks.ago.to_date }
+ let_it_be(:end_date) { Date.current }
+
+ shared_examples 'aggregated_metrics_data with source' do
+ context 'with AND operator' do
+ let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
+
+ context 'with even number of metrics' do
+ it 'calculates intersection correctly', :aggregate_failures do
+ # gmau_1 data is as follow
+ # |A| => 4
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(4)
+ # |B| => 6
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event5')).and_return(6)
+ # |A + B| => 8
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event3 event5])).and_return(8)
+ # Exclusion inclusion principle formula to calculate intersection of 2 sets
+ # |A & B| = (|A| + |B|) - |A + B| => (4 + 6) - 8 => 2
+ expect(source.calculate_metrics_intersections(metric_names: %w[event3 event5], start_date: start_date, end_date: end_date, recorded_at: recorded_at)).to eq(2)
+ end
+ end
+
+ context 'with odd number of metrics' do
+ it 'calculates intersection correctly', :aggregate_failures do
+ # gmau_2 data is as follow:
+ # |A| => 2
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event1')).and_return(2)
+ # |B| => 3
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event2')).and_return(3)
+ # |C| => 5
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: 'event3')).and_return(5)
+
+ # |A + B| => 4 therefore |A & B| = (|A| + |B|) - |A + B| => 2 + 3 - 4 => 1
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2])).and_return(4)
+ # |A + C| => 6 therefore |A & C| = (|A| + |C|) - |A + C| => 2 + 5 - 6 => 1
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event3])).and_return(6)
+ # |B + C| => 7 therefore |B & C| = (|B| + |C|) - |B + C| => 3 + 5 - 7 => 1
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event2 event3])).and_return(7)
+ # |A + B + C| => 8
+ expect(source).to receive(:calculate_metrics_union).with(params.merge(metric_names: %w[event1 event2 event3])).and_return(8)
+ # Exclusion inclusion principle formula to calculate intersection of 3 sets
+ # |A & B & C| = (|A & B| + |A & C| + |B & C|) - (|A| + |B| + |C|) + |A + B + C|
+ # (1 + 1 + 1) - (2 + 3 + 5) + 8 => 1
+ expect(source.calculate_metrics_intersections(metric_names: %w[event1 event2 event3], start_date: start_date, end_date: end_date, recorded_at: recorded_at)).to eq(1)
+ end
+ end
+ end
+ end
+
+ describe '.aggregated_metrics_data' do
+ let(:source) do
+ Class.new do
+ extend Gitlab::Usage::Metrics::Aggregates::Sources::Calculations::Intersection
+ end
+ end
+
+ it 'caches intermediate operations', :aggregate_failures do
+ events = %w[event1 event2 event3 event5]
+
+ params = { start_date: start_date, end_date: end_date, recorded_at: recorded_at }
+
+ events.each do |event|
+ expect(source).to receive(:calculate_metrics_union)
+ .with(params.merge(metric_names: event))
+ .once
+ .and_return(0)
+ end
+
+ 2.upto(4) do |subset_size|
+ events.combination(subset_size).each do |events|
+ expect(source).to receive(:calculate_metrics_union)
+ .with(params.merge(metric_names: events))
+ .once
+ .and_return(0)
+ end
+ end
+
+ expect(source.calculate_metrics_intersections(metric_names: events, start_date: start_date, end_date: end_date, recorded_at: recorded_at)).to eq(0)
+ end
+
+ it_behaves_like 'aggregated_metrics_data with source'
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
index db878828cd6..1ae4c9414dd 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
@@ -12,11 +12,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let(:metric_2) { 'metric_2' }
let(:metric_names) { [metric_1, metric_2] }
- describe '.calculate_events_union' do
- subject(:calculate_metrics_union) do
- described_class.calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
- end
-
+ describe 'metric calculations' do
before do
[
{
@@ -36,23 +32,55 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
end
- it 'returns the number of unique events in the union of all metrics' do
- expect(calculate_metrics_union.round(2)).to eq(3.12)
- end
+ describe '.calculate_events_union' do
+ subject(:calculate_metrics_union) do
+ described_class.calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ end
+
+ it 'returns the number of unique events in the union of all metrics' do
+ expect(calculate_metrics_union.round(2)).to eq(3.12)
+ end
+
+ context 'when there is no aggregated data saved' do
+ let(:metric_names) { [metric_1, 'i do not have any records'] }
+
+ it 'raises error when union data is missing' do
+ expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
+ end
+ end
- context 'when there is no aggregated data saved' do
- let(:metric_names) { [metric_1, 'i do not have any records'] }
+ context 'when there is only one metric defined as aggregated' do
+ let(:metric_names) { [metric_1] }
- it 'raises error when union data is missing' do
- expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
+ it 'returns the number of unique events for that metric' do
+ expect(calculate_metrics_union.round(2)).to eq(2.08)
+ end
end
end
- context 'when there is only one metric defined as aggregated' do
- let(:metric_names) { [metric_1] }
+ describe '.calculate_metrics_intersections' do
+ subject(:calculate_metrics_intersections) do
+ described_class.calculate_metrics_intersections(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ end
+
+ it 'returns the number of common events in the intersection of all metrics' do
+ expect(calculate_metrics_intersections.round(2)).to eq(1.04)
+ end
+
+ context 'when there is no aggregated data saved' do
+ let(:metric_names) { [metric_1, 'i do not have any records'] }
- it 'returns the number of unique events for that metric' do
- expect(calculate_metrics_union.round(2)).to eq(2.08)
+ it 'raises error when union data is missing' do
+ expect { calculate_metrics_intersections }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
+ end
+ end
+
+ context 'when there is only one metric defined in aggregate' do
+ let(:metric_names) { [metric_1] }
+
+ it 'returns the number of common/unique events for the intersection of that metric' do
+ expect(calculate_metrics_intersections.round(2)).to eq(2.08)
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb
index af2de5ea343..83b155b41b1 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/redis_hll_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll do
- describe '.calculate_events_union' do
- let(:event_names) { %w[event_a event_b] }
- let(:start_date) { 7.days.ago }
- let(:end_date) { Date.current }
+ let_it_be(:event_names) { %w[event_a event_b] }
+ let_it_be(:start_date) { 7.days.ago }
+ let_it_be(:end_date) { Date.current }
+ let_it_be(:recorded_at) { Time.current }
+ describe '.calculate_events_union' do
subject(:calculate_metrics_union) do
described_class.calculate_metrics_union(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: nil)
end
@@ -26,4 +27,30 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll do
expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
end
end
+
+ describe '.calculate_metrics_intersections' do
+ subject(:calculate_metrics_intersections) do
+ described_class.calculate_metrics_intersections(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ end
+
+ it 'uses values returned by union to compute the intersection' do
+ event_names.each do |event|
+ expect(Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll).to receive(:calculate_metrics_union)
+ .with(metric_names: event, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ .and_return(5)
+ end
+
+ expect(Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll).to receive(:calculate_metrics_union)
+ .with(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
+ .and_return(2)
+
+ expect(calculate_metrics_intersections).to eq(8)
+ end
+
+ it 'raises error if union is < 0' do
+ allow(Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll).to receive(:calculate_metrics_union).and_raise(Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable)
+
+ expect { calculate_metrics_intersections }.to raise_error(Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable)
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb
new file mode 100644
index 00000000000..52c1ccdcd47
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_boards_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountBoardsMetric do
+ let_it_be(:board) { create(:board) }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb
new file mode 100644
index 00000000000..c3b59904f41
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_issues_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountIssuesMetric do
+ let_it_be(:issue) { create(:issue) }
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb
new file mode 100644
index 00000000000..9f4686ab6cd
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_creating_issues_metric_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersCreatingIssuesMetric do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:issues) { create_list(:issue, 2, author: author, created_at: 4.days.ago) }
+ let_it_be(:old_issue) { create(:issue, author: author, created_at: 2.months.ago) }
+
+ context 'with all time frame' do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'all', data_source: 'database' }, 1
+ end
+
+ context 'for 28d time frame' do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'database' }, 1
+ end
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb
new file mode 100644
index 00000000000..7adba825a13
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/count_users_using_approve_quick_action_metric_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountUsersUsingApproveQuickActionMetric, :clean_gitlab_redis_shared_state do
+ before do
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 1.week.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 1, time: 2.weeks.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.weeks.ago)
+ Gitlab::UsageDataCounters::HLLRedisCounter.track_event(:i_quickactions_approve, values: 2, time: 2.months.ago)
+ end
+
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '28d', data_source: 'redis_hll' }, 2
+ it_behaves_like 'a correct instrumented metric value', { time_frame: '7d', data_source: 'redis_hll' }, 1
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb
new file mode 100644
index 00000000000..83e07200025
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/hostname_metric_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::HostnameMetric do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' }, Gitlab.config.gitlab.host
+end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb
new file mode 100644
index 00000000000..212dd3dc851
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UuidMetric do
+ it_behaves_like 'a correct instrumented metric value', { time_frame: 'none' }, Gitlab::CurrentSettings.uuid
+end
diff --git a/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb b/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb
new file mode 100644
index 00000000000..91c27825cce
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::KeyPathProcessor do
+ describe '#unflatten_default_path' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:key_path, :value, :expected_hash) do
+ 'uuid' | nil | { uuid: nil }
+ 'uuid' | '1111' | { uuid: '1111' }
+ 'counts.issues' | nil | { counts: { issues: nil } }
+ 'counts.issues' | 100 | { counts: { issues: 100 } }
+ 'usage_activity_by_stage.verify.ci_builds' | 100 | { usage_activity_by_stage: { verify: { ci_builds: 100 } } }
+ end
+
+ with_them do
+ subject { described_class.process(key_path, value) }
+
+ it { is_expected.to eq(expected_hash) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 82db3d94493..5f66387c82b 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -28,14 +28,6 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
it 'does not track edit actions if author is not present' do
expect(track_action(author: nil)).to be_nil
end
-
- context 'when feature flag track_editor_edit_actions is disabled' do
- it 'does not track edit actions' do
- stub_feature_flags(track_editor_edit_actions: false)
-
- expect(track_action(author: user1)).to be_nil
- end
- end
end
context 'for web IDE edit actions' do
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 9fc28f6c4ec..4efacae0a48 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -45,6 +45,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'quickactions',
'pipeline_authoring',
'epics_usage',
+ 'epic_boards_usage',
'secure'
)
end
diff --git a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
index 8f9a3e0cd9e..e7edb8b9cf1 100644
--- a/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/kubernetes_agent_counter_spec.rb
@@ -3,21 +3,57 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::KubernetesAgentCounter do
- it_behaves_like 'a redis usage counter', 'Kubernetes Agent', :gitops_sync
+ described_class::KNOWN_EVENTS.each do |event|
+ it_behaves_like 'a redis usage counter', 'Kubernetes Agent', event
+ it_behaves_like 'a redis usage counter with totals', :kubernetes_agent, event => 1
+ end
+
+ describe '.increment_event_counts' do
+ let(:events) do
+ {
+ 'gitops_sync' => 1,
+ 'k8s_api_proxy_request' => 2
+ }
+ end
- it_behaves_like 'a redis usage counter with totals', :kubernetes_agent, gitops_sync: 1
+ subject { described_class.increment_event_counts(events) }
- describe '.increment_gitops_sync' do
- it 'increments the gtops_sync counter by the new increment amount' do
- described_class.increment_gitops_sync(7)
- described_class.increment_gitops_sync(2)
- described_class.increment_gitops_sync(0)
+ it 'increments the specified counters by the new increment amount' do
+ described_class.increment_event_counts(events)
+ described_class.increment_event_counts(events)
+ described_class.increment_event_counts(events)
- expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 9)
+ expect(described_class.totals).to eq(kubernetes_agent_gitops_sync: 3, kubernetes_agent_k8s_api_proxy_request: 6)
end
- it 'raises for negative numbers' do
- expect { described_class.increment_gitops_sync(-1) }.to raise_error(ArgumentError)
+ context 'event is unknown' do
+ let(:events) do
+ {
+ 'gitops_sync' => 1,
+ 'other_event' => 2
+ }
+ end
+
+ it 'raises an ArgumentError' do
+ expect(described_class).not_to receive(:increment_by)
+
+ expect { subject }.to raise_error(ArgumentError, 'unknown event other_event')
+ end
+ end
+
+ context 'increment is negative' do
+ let(:events) do
+ {
+ 'gitops_sync' => -1,
+ 'k8s_api_proxy_request' => 2
+ }
+ end
+
+ it 'raises an ArgumentError' do
+ expect(described_class).not_to receive(:increment_by)
+
+ expect { subject }.to raise_error(ArgumentError, 'gitops_sync count must be greater than or equal to zero')
+ end
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
index 1be2a83f98f..c484595ee71 100644
--- a/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/package_event_counter_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::UsageDataCounters::PackageEventCounter, :clean_gitlab_red
end
it 'includes the right events' do
- expect(described_class::KNOWN_EVENTS.size).to eq 48
+ expect(described_class::KNOWN_EVENTS.size).to eq 51
end
described_class::KNOWN_EVENTS.each do |event|
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
new file mode 100644
index 00000000000..18acd767c6d
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataMetrics do
+ describe '.uncached_data' do
+ subject { described_class.uncached_data }
+
+ around do |example|
+ described_class.instance_variable_set(:@definitions, nil)
+ example.run
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'whith instrumentation_class' do
+ it 'includes top level keys' do
+ expect(subject).to include(:uuid)
+ expect(subject).to include(:hostname)
+ end
+
+ it 'includes counts keys' do
+ expect(subject[:counts]).to include(:boards)
+ end
+
+ it 'includes i_quickactions_approve monthly and weekly key' do
+ expect(subject[:redis_hll_counters][:quickactions]).to include(:i_quickactions_approve_monthly)
+ expect(subject[:redis_hll_counters][:quickactions]).to include(:i_quickactions_approve_weekly)
+ end
+
+ it 'includes counts keys' do
+ expect(subject[:counts]).to include(:issues)
+ end
+
+ it 'includes usage_activity_by_stage keys' do
+ expect(subject[:usage_activity_by_stage][:plan]).to include(:issues)
+ end
+
+ it 'includes usage_activity_by_stage_monthly keys' do
+ expect(subject[:usage_activity_by_stage_monthly][:plan]).to include(:issues)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 718ab3b2d95..695859c8e6e 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -59,6 +59,14 @@ RSpec.describe Gitlab::UsageDataQueries do
end
end
+ describe '.histogram' do
+ it 'returns the histogram sql' do
+ expect(described_class.histogram(AlertManagement::HttpIntegration.active,
+ :project_id, buckets: 1..2, bucket_size: 101))
+ .to match(/^WITH "count_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
+ end
+ end
+
describe 'min/max methods' do
it 'returns nil' do
# user min/max
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 01701f7aebd..d4b6ac09261 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -172,6 +172,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
another_project = create(:project, :repository, creator: another_user)
create(:remote_mirror, project: another_project, enabled: false)
create(:snippet, author: user)
+ create(:suggestion, note: create(:note, project: project))
end
expect(described_class.usage_activity_by_stage_create({})).to include(
@@ -181,7 +182,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_disable_overriding_approvers_per_merge_request: 2,
projects_without_disable_overriding_approvers_per_merge_request: 6,
remote_mirrors: 2,
- snippets: 2
+ snippets: 2,
+ suggestions: 2
)
expect(described_class.usage_activity_by_stage_create(described_class.last_28_days_time_period)).to include(
deploy_keys: 1,
@@ -190,7 +192,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
projects_with_disable_overriding_approvers_per_merge_request: 1,
projects_without_disable_overriding_approvers_per_merge_request: 3,
remote_mirrors: 1,
- snippets: 1
+ snippets: 1,
+ suggestions: 1
)
end
end
@@ -571,7 +574,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
expect(count_data[:projects_with_tracing_enabled]).to eq(1)
- expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
expect(count_data[:projects_with_enabled_alert_integrations]).to eq(1)
expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
@@ -745,10 +747,34 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe '.runners_usage' do
+ before do
+ project = build(:project)
+ create_list(:ci_runner, 2, :instance_type, :online)
+ create(:ci_runner, :group, :online)
+ create(:ci_runner, :group, :inactive)
+ create_list(:ci_runner, 3, :project_type, :online, projects: [project])
+ end
+
+ subject { described_class.runners_usage }
+
+ it 'gathers runner usage counts correctly' do
+ expect(subject[:ci_runners]).to eq(7)
+ expect(subject[:ci_runners_instance_type_active]).to eq(2)
+ expect(subject[:ci_runners_group_type_active]).to eq(1)
+ expect(subject[:ci_runners_project_type_active]).to eq(3)
+
+ expect(subject[:ci_runners_instance_type_active_online]).to eq(2)
+ expect(subject[:ci_runners_group_type_active_online]).to eq(1)
+ expect(subject[:ci_runners_project_type_active_online]).to eq(3)
+ end
+ end
+
describe '.usage_counters' do
subject { described_class.usage_counters }
it { is_expected.to include(:kubernetes_agent_gitops_sync) }
+ it { is_expected.to include(:kubernetes_agent_k8s_api_proxy_request) }
it { is_expected.to include(:static_site_editor_views) }
it { is_expected.to include(:package_events_i_package_pull_package) }
it { is_expected.to include(:package_events_i_package_delete_package_by_user) }
@@ -1158,8 +1184,17 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe ".system_usage_data_settings" do
+ let(:prometheus_client) { double(Gitlab::PrometheusClient) }
+
before do
allow(described_class).to receive(:operating_system).and_return('ubuntu-20.04')
+ expect(prometheus_client).to receive(:query).with(/gitlab_usage_ping:gitaly_apdex:ratio_avg_over_time_5m/).and_return([
+ {
+ 'metric' => {},
+ 'value' => [1616016381.473, '0.95']
+ }
+ ])
+ expect(described_class).to receive(:with_prometheus_client).and_yield(prometheus_client)
end
subject { described_class.system_usage_data_settings }
@@ -1171,6 +1206,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
it 'populates operating system information' do
expect(subject[:settings][:operating_system]).to eq('ubuntu-20.04')
end
+
+ it 'gathers gitaly apdex', :aggregate_failures do
+ expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95)
+ end
end
end
@@ -1291,10 +1330,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'p_analytics_repo' => 123,
'i_analytics_cohorts' => 123,
'i_analytics_dev_ops_score' => 123,
- 'i_analytics_dev_ops_adoption' => 123,
'i_analytics_instance_statistics' => 123,
'p_analytics_merge_request' => 123,
- 'g_analytics_merge_request' => 123,
+ 'i_analytics_dev_ops_adoption' => 123,
+ 'users_viewing_analytics_group_devops_adoption' => 123,
'analytics_unique_visits_for_any_target' => 543,
'analytics_unique_visits_for_any_target_monthly' => 987
}
@@ -1426,6 +1465,86 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
+ describe '.email_campaign_counts' do
+ subject { described_class.send(:email_campaign_counts) }
+
+ context 'when queries time out' do
+ before do
+ allow_any_instance_of(ActiveRecord::Relation)
+ .to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
+ end
+
+ it 'returns -1 for email campaign data' do
+ expected_data = {
+ "in_product_marketing_email_create_0_sent" => -1,
+ "in_product_marketing_email_create_0_cta_clicked" => -1,
+ "in_product_marketing_email_create_1_sent" => -1,
+ "in_product_marketing_email_create_1_cta_clicked" => -1,
+ "in_product_marketing_email_create_2_sent" => -1,
+ "in_product_marketing_email_create_2_cta_clicked" => -1,
+ "in_product_marketing_email_verify_0_sent" => -1,
+ "in_product_marketing_email_verify_0_cta_clicked" => -1,
+ "in_product_marketing_email_verify_1_sent" => -1,
+ "in_product_marketing_email_verify_1_cta_clicked" => -1,
+ "in_product_marketing_email_verify_2_sent" => -1,
+ "in_product_marketing_email_verify_2_cta_clicked" => -1,
+ "in_product_marketing_email_trial_0_sent" => -1,
+ "in_product_marketing_email_trial_0_cta_clicked" => -1,
+ "in_product_marketing_email_trial_1_sent" => -1,
+ "in_product_marketing_email_trial_1_cta_clicked" => -1,
+ "in_product_marketing_email_trial_2_sent" => -1,
+ "in_product_marketing_email_trial_2_cta_clicked" => -1,
+ "in_product_marketing_email_team_0_sent" => -1,
+ "in_product_marketing_email_team_0_cta_clicked" => -1,
+ "in_product_marketing_email_team_1_sent" => -1,
+ "in_product_marketing_email_team_1_cta_clicked" => -1,
+ "in_product_marketing_email_team_2_sent" => -1,
+ "in_product_marketing_email_team_2_cta_clicked" => -1
+ }
+
+ expect(subject).to eq(expected_data)
+ end
+ end
+
+ context 'when there are entries' do
+ before do
+ create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.zone.now)
+ create(:in_product_marketing_email, track: :verify, series: 0)
+ end
+
+ it 'gathers email campaign data' do
+ expected_data = {
+ "in_product_marketing_email_create_0_sent" => 1,
+ "in_product_marketing_email_create_0_cta_clicked" => 1,
+ "in_product_marketing_email_create_1_sent" => 0,
+ "in_product_marketing_email_create_1_cta_clicked" => 0,
+ "in_product_marketing_email_create_2_sent" => 0,
+ "in_product_marketing_email_create_2_cta_clicked" => 0,
+ "in_product_marketing_email_verify_0_sent" => 1,
+ "in_product_marketing_email_verify_0_cta_clicked" => 0,
+ "in_product_marketing_email_verify_1_sent" => 0,
+ "in_product_marketing_email_verify_1_cta_clicked" => 0,
+ "in_product_marketing_email_verify_2_sent" => 0,
+ "in_product_marketing_email_verify_2_cta_clicked" => 0,
+ "in_product_marketing_email_trial_0_sent" => 0,
+ "in_product_marketing_email_trial_0_cta_clicked" => 0,
+ "in_product_marketing_email_trial_1_sent" => 0,
+ "in_product_marketing_email_trial_1_cta_clicked" => 0,
+ "in_product_marketing_email_trial_2_sent" => 0,
+ "in_product_marketing_email_trial_2_cta_clicked" => 0,
+ "in_product_marketing_email_team_0_sent" => 0,
+ "in_product_marketing_email_team_0_cta_clicked" => 0,
+ "in_product_marketing_email_team_1_sent" => 0,
+ "in_product_marketing_email_team_1_cta_clicked" => 0,
+ "in_product_marketing_email_team_2_sent" => 0,
+ "in_product_marketing_email_team_2_cta_clicked" => 0
+ }
+
+ expect(subject).to eq(expected_data)
+ end
+ end
+ end
+
describe '.snowplow_event_counts' do
let_it_be(:time_period) { { collector_tstamp: 8.days.ago..1.day.ago } }
diff --git a/spec/lib/learn_gitlab/onboarding_spec.rb b/spec/lib/learn_gitlab/onboarding_spec.rb
new file mode 100644
index 00000000000..6b4be65f3b2
--- /dev/null
+++ b/spec/lib/learn_gitlab/onboarding_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LearnGitlab::Onboarding do
+ describe '#completed_percentage' do
+ let(:completed_actions) { {} }
+ let(:onboarding_progress) { build(:onboarding_progress, namespace: namespace, **completed_actions) }
+ let(:namespace) { build(:namespace) }
+
+ let_it_be(:tracked_action_columns) do
+ tracked_actions = described_class::ACTION_ISSUE_IDS.keys + described_class::ACTION_DOC_URLS.keys
+ tracked_actions.map { |key| OnboardingProgress.column_name(key) }
+ end
+
+ before do
+ expect(OnboardingProgress).to receive(:find_by).with(namespace: namespace).and_return(onboarding_progress)
+ end
+
+ subject { described_class.new(namespace).completed_percentage }
+
+ context 'when no onboarding_progress exists' do
+ let(:onboarding_progress) { nil }
+
+ it { is_expected.to eq(0) }
+ end
+
+ context 'when no action has been completed' do
+ it { is_expected.to eq(0) }
+ end
+
+ context 'when one action has been completed' do
+ let(:completed_actions) { Hash[tracked_action_columns.first, Time.current] }
+
+ it { is_expected.to eq(11) }
+ end
+
+ context 'when all tracked actions have been completed' do
+ let(:completed_actions) do
+ tracked_action_columns.to_h { |action| [action, Time.current] }
+ end
+
+ it { is_expected.to eq(100) }
+ end
+ end
+end
diff --git a/spec/lib/learn_gitlab_spec.rb b/spec/lib/learn_gitlab/project_spec.rb
index abfd82999c3..523703761bf 100644
--- a/spec/lib/learn_gitlab_spec.rb
+++ b/spec/lib/learn_gitlab/project_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe LearnGitlab do
+RSpec.describe LearnGitlab::Project do
let_it_be(:current_user) { create(:user) }
- let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::PROJECT_NAME) }
- let_it_be(:learn_gitlab_board) { create(:board, project: learn_gitlab_project, name: LearnGitlab::BOARD_NAME) }
- let_it_be(:learn_gitlab_label) { create(:label, project: learn_gitlab_project, name: LearnGitlab::LABEL_NAME) }
+ let_it_be(:learn_gitlab_project) { create(:project, name: LearnGitlab::Project::PROJECT_NAME) }
+ let_it_be(:learn_gitlab_board) { create(:board, project: learn_gitlab_project, name: LearnGitlab::Project::BOARD_NAME) }
+ let_it_be(:learn_gitlab_label) { create(:label, project: learn_gitlab_project, name: LearnGitlab::Project::LABEL_NAME) }
before do
learn_gitlab_project.add_developer(current_user)
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 12c6cbe03b3..4b374452c0a 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -126,6 +126,16 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(s3_config.keys).not_to include(%i(ServerSideEncryption SSEKMSKeyID))
end
+ context 'when no region is specified' do
+ before do
+ raw_config.delete(:region)
+ end
+
+ it 'defaults to us-east-1' do
+ expect(subject[:ObjectStorage][:S3Config][:Region]).to eq('us-east-1')
+ end
+ end
+
context 'when feature flag is disabled' do
before do
stub_feature_flags(use_workhorse_s3_client: false)
diff --git a/spec/lib/security/ci_configuration/sast_build_actions_spec.rb b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
index c8f9430eff9..6a1ea68fdd6 100644
--- a/spec/lib/security/ci_configuration/sast_build_actions_spec.rb
+++ b/spec/lib/security/ci_configuration/sast_build_action_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Security::CiConfiguration::SastBuildActions do
+RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:default_sast_values) do
{ 'global' =>
[
@@ -85,8 +85,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_two_includes)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_two_includes)
end
end
@@ -96,12 +96,12 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_two_includes)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_two_includes)
end
it 'reports defaults have been overwritten' do
- expect(result.first[:default_values_overwritten]).to eq(true)
+ expect(result[:default_values_overwritten]).to eq(true)
end
end
end
@@ -112,8 +112,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_all_params)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_all_params)
end
end
@@ -124,11 +124,11 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set)
+ expect(result[:content]).to eq(sast_yaml_with_no_variables_set)
end
it 'reports defaults have not been overwritten' do
- expect(result.first[:default_values_overwritten]).to eq(false)
+ expect(result[:default_values_overwritten]).to eq(false)
end
context 'analyzer section' do
@@ -137,7 +137,7 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
+ expect(result[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
end
context 'analyzers are disabled' do
@@ -146,9 +146,9 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate }
it 'writes SAST_EXCLUDED_ANALYZERS' do
- stub_const('Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS', 'bandit, brakeman, flawfinder')
+ stub_const('Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS', 'bandit, brakeman, flawfinder')
- expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
+ expect(result[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
end
end
@@ -158,9 +158,9 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params_with_all_analyzers_enabled, gitlab_ci_content).generate }
it 'does not write SAST_DEFAULT_ANALYZERS or SAST_EXCLUDED_ANALYZERS' do
- stub_const('Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS', 'brakeman, flawfinder')
+ stub_const('Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS', 'brakeman, flawfinder')
- expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set)
+ expect(result[:content]).to eq(sast_yaml_with_no_variables_set)
end
end
end
@@ -186,8 +186,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_updated_stage)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_updated_stage)
end
end
@@ -197,8 +197,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_variable_section_added)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_variable_section_added)
end
end
@@ -208,8 +208,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_sast_section_added)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_sast_section_added)
end
end
@@ -219,8 +219,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:action]).to eq('update')
- expect(result.first[:content]).to eq(sast_yaml_sast_variables_section_added)
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(sast_yaml_sast_variables_section_added)
end
end
@@ -289,7 +289,7 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:content]).to eq(sast_yaml_with_no_variables_set)
+ expect(result[:content]).to eq(sast_yaml_with_no_variables_set)
end
end
@@ -297,7 +297,7 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
- expect(result.first[:content]).to eq(sast_yaml_all_params)
+ expect(result[:content]).to eq(sast_yaml_all_params)
end
end
end
@@ -308,22 +308,22 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
before do
- allow_next_instance_of(described_class) do |sast_build_actions|
- allow(sast_build_actions).to receive(:auto_devops_stages).and_return(fast_auto_devops_stages)
+ allow_next_instance_of(described_class) do |sast_build_action|
+ allow(sast_build_action).to receive(:auto_devops_stages).and_return(fast_auto_devops_stages)
end
end
it 'generates the correct YML' do
- expect(result.first[:content]).to eq(auto_devops_with_custom_stage)
+ expect(result[:content]).to eq(auto_devops_with_custom_stage)
end
end
end
- describe 'Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS' do
- subject(:variable) {Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS}
+ describe 'Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS' do
+ subject(:variable) {Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS}
it 'is sorted alphabetically' do
- sorted_variable = Security::CiConfiguration::SastBuildActions::SAST_DEFAULT_ANALYZERS
+ sorted_variable = Security::CiConfiguration::SastBuildAction::SAST_DEFAULT_ANALYZERS
.split(',')
.map(&:strip)
.sort
@@ -342,7 +342,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_with_no_variables_set_but_analyzers
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -360,7 +361,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_with_no_variables_set
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -375,7 +377,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_all_params
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -396,7 +399,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def auto_devops_with_custom_stage
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -430,7 +434,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_two_includes
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -453,7 +458,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_variable_section_added
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -474,7 +480,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_sast_section_added
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -496,7 +503,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_sast_variables_section_added
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
@@ -518,7 +526,8 @@ RSpec.describe Security::CiConfiguration::SastBuildActions do
def sast_yaml_updated_stage
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
- # See https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
stages:
diff --git a/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
new file mode 100644
index 00000000000..31854fcf3a7
--- /dev/null
+++ b/spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb
@@ -0,0 +1,159 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::SecretDetectionBuildAction do
+ subject(:result) { described_class.new(auto_devops_enabled, gitlab_ci_content).generate }
+
+ let(:params) { {} }
+
+ context 'with existing .gitlab-ci.yml' do
+ let(:auto_devops_enabled) { false }
+
+ context 'secret_detection has not been included' do
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ - security
+ variables:
+ RANDOM: make sure this persists
+ include:
+ - template: existing.yml
+ - template: Security/Secret-Detection.gitlab-ci.yml
+ CI_YML
+ end
+
+ context 'template includes are an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => [{ "template" => "existing.yml" }] }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
+ end
+
+ context 'template include is not an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test security),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => { "template" => "existing.yml" } }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
+ end
+ end
+
+ context 'secret_detection has been included' do
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ stages:
+ - test
+ variables:
+ RANDOM: make sure this persists
+ include:
+ - template: Security/Secret-Detection.gitlab-ci.yml
+ CI_YML
+ end
+
+ context 'secret_detection template include are an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => [{ "template" => "Security/Secret-Detection.gitlab-ci.yml" }] }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
+ end
+
+ context 'secret_detection template include is not an array' do
+ let(:gitlab_ci_content) do
+ { "stages" => %w(test),
+ "variables" => { "RANDOM" => "make sure this persists" },
+ "include" => { "template" => "Security/Secret-Detection.gitlab-ci.yml" } }
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('update')
+ expect(result[:content]).to eq(expected_yml)
+ end
+ end
+ end
+ end
+
+ context 'with no .gitlab-ci.yml' do
+ let(:gitlab_ci_content) { nil }
+
+ context 'autodevops disabled' do
+ let(:auto_devops_enabled) { false }
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ include:
+ - template: Security/Secret-Detection.gitlab-ci.yml
+ CI_YML
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('create')
+ expect(result[:content]).to eq(expected_yml)
+ end
+ end
+
+ context 'with autodevops enabled' do
+ let(:auto_devops_enabled) { true }
+ let(:expected_yml) do
+ <<-CI_YML.strip_heredoc
+ # You can override the included template(s) by including variable overrides
+ # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
+ # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
+ # Note that environment variables can be set in several places
+ # See https://docs.gitlab.com/ee/ci/variables/#priority-of-environment-variables
+ include:
+ - template: Auto-DevOps.gitlab-ci.yml
+ CI_YML
+ end
+
+ before do
+ allow_next_instance_of(described_class) do |secret_detection_build_actions|
+ allow(secret_detection_build_actions).to receive(:auto_devops_stages).and_return(fast_auto_devops_stages)
+ end
+ end
+
+ it 'generates the correct YML' do
+ expect(result[:action]).to eq('create')
+ expect(result[:content]).to eq(expected_yml)
+ end
+ end
+ end
+
+ # stubbing this method allows this spec file to use fast_spec_helper
+ def fast_auto_devops_stages
+ auto_devops_template = YAML.safe_load( File.read('lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml') )
+ auto_devops_template['stages']
+ end
+end
diff --git a/spec/models/concerns/sidebars/container_with_html_options_spec.rb b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
index cc83fc84113..7f834419866 100644
--- a/spec/models/concerns/sidebars/container_with_html_options_spec.rb
+++ b/spec/lib/sidebars/concerns/container_with_html_options_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe Sidebars::ContainerWithHtmlOptions do
+RSpec.describe Sidebars::Concerns::ContainerWithHtmlOptions do
subject do
Class.new do
- include Sidebars::ContainerWithHtmlOptions
+ include Sidebars::Concerns::ContainerWithHtmlOptions
def title
'Foo'
@@ -18,4 +18,10 @@ RSpec.describe Sidebars::ContainerWithHtmlOptions do
expect(subject.container_html_options).to eq(aria: { label: 'Foo' })
end
end
+
+ describe '#collapsed_container_html_options' do
+ it 'includes by default aria-label attribute' do
+ expect(subject.collapsed_container_html_options).to eq(aria: { label: 'Foo' })
+ end
+ end
end
diff --git a/spec/lib/sidebars/menu_item_spec.rb b/spec/lib/sidebars/menu_item_spec.rb
new file mode 100644
index 00000000000..3adde64f550
--- /dev/null
+++ b/spec/lib/sidebars/menu_item_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::MenuItem do
+ let(:title) { 'foo' }
+ let(:html_options) { {} }
+ let(:menu_item) { described_class.new(title: title, active_routes: {}, link: '', container_html_options: html_options) }
+
+ it 'includes by default aria-label attribute set to the title' do
+ expect(menu_item.container_html_options).to eq({ aria: { label: title } })
+ end
+
+ context 'when aria-label is overridde during initialization' do
+ let(:html_options) { { aria: { label: 'bar' } } }
+
+ it 'sets the aria-label to the new attribute' do
+ expect(menu_item.container_html_options).to eq html_options
+ end
+ end
+end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
new file mode 100644
index 00000000000..7dcf1940442
--- /dev/null
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Menu do
+ let(:menu) { described_class.new(context) }
+ let(:context) { Sidebars::Context.new(current_user: nil, container: nil) }
+ let(:nil_menu_item) { Sidebars::NilMenuItem.new(item_id: :foo) }
+
+ describe '#all_active_routes' do
+ it 'gathers all active routes of items and the current menu' do
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: { path: %w(bar test) }))
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo2', link: 'foo2', active_routes: { controller: 'fooc' }))
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo3', link: 'foo3', active_routes: { controller: 'barc' }))
+ menu.add_item(nil_menu_item)
+
+ allow(menu).to receive(:active_routes).and_return({ path: 'foo' })
+
+ expect(menu).to receive(:renderable_items).and_call_original
+ expect(menu.all_active_routes).to eq({ path: %w(foo bar test), controller: %w(fooc barc) })
+ end
+ end
+
+ describe '#render?' do
+ context 'when the menus has no items' do
+ it 'returns false' do
+ expect(menu.render?).to be false
+ end
+ end
+
+ context 'when the menu has items' do
+ it 'returns true' do
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}))
+
+ expect(menu.render?).to be true
+ end
+
+ context 'when menu items are NilMenuItem' do
+ it 'returns false' do
+ menu.add_item(nil_menu_item)
+
+ expect(menu.render?).to be false
+ end
+ end
+ end
+ end
+
+ describe '#has_items?' do
+ it 'returns true when there are regular menu items' do
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}))
+
+ expect(menu.has_items?).to be true
+ end
+
+ it 'returns true when there are nil menu items' do
+ menu.add_item(nil_menu_item)
+
+ expect(menu.has_items?).to be true
+ end
+ end
+
+ describe '#has_renderable_items?' do
+ it 'returns true when there are regular menu items' do
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}))
+
+ expect(menu.has_renderable_items?).to be true
+ end
+
+ it 'returns false when there are nil menu items' do
+ menu.add_item(nil_menu_item)
+
+ expect(menu.has_renderable_items?).to be false
+ end
+
+ it 'returns true when there are both regular and nil menu items' do
+ menu.add_item(Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}))
+ menu.add_item(nil_menu_item)
+
+ expect(menu.has_renderable_items?).to be true
+ end
+ end
+
+ describe '#renderable_items' do
+ it 'returns only regular menu items' do
+ item = Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {})
+ menu.add_item(item)
+ menu.add_item(nil_menu_item)
+
+ expect(menu.renderable_items.size).to eq 1
+ expect(menu.renderable_items.first).to eq item
+ end
+ end
+
+ describe '#insert_element_before' do
+ let(:item1) { Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}, item_id: :foo1) }
+ let(:item2) { Sidebars::MenuItem.new(title: 'foo2', link: 'foo2', active_routes: {}, item_id: :foo2) }
+ let(:item3) { Sidebars::MenuItem.new(title: 'foo3', link: 'foo3', active_routes: {}, item_id: :foo3) }
+ let(:list) { [item1, item2] }
+
+ it 'adds element before the specific element class' do
+ menu.insert_element_before(list, :foo2, item3)
+
+ expect(list).to eq [item1, item3, item2]
+ end
+
+ it 'does not add nil elements' do
+ menu.insert_element_before(list, :foo2, nil)
+
+ expect(list).to eq [item1, item2]
+ end
+
+ context 'when reference element does not exist' do
+ it 'adds the element to the top of the list' do
+ menu.insert_element_before(list, :non_existent, item3)
+
+ expect(list).to eq [item3, item1, item2]
+ end
+ end
+ end
+
+ describe '#insert_element_after' do
+ let(:item1) { Sidebars::MenuItem.new(title: 'foo1', link: 'foo1', active_routes: {}, item_id: :foo1) }
+ let(:item2) { Sidebars::MenuItem.new(title: 'foo2', link: 'foo2', active_routes: {}, item_id: :foo2) }
+ let(:item3) { Sidebars::MenuItem.new(title: 'foo3', link: 'foo3', active_routes: {}, item_id: :foo3) }
+ let(:list) { [item1, item2] }
+
+ it 'adds element after the specific element class' do
+ menu.insert_element_after(list, :foo1, item3)
+
+ expect(list).to eq [item1, item3, item2]
+ end
+
+ it 'does not add nil elements' do
+ menu.insert_element_after(list, :foo1, nil)
+
+ expect(list).to eq [item1, item2]
+ end
+
+ context 'when reference element does not exist' do
+ it 'adds the element to the end of the list' do
+ menu.insert_element_after(list, :non_existent, item3)
+
+ expect(list).to eq [item1, item2, item3]
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/panel_spec.rb b/spec/lib/sidebars/panel_spec.rb
new file mode 100644
index 00000000000..b70a79361d0
--- /dev/null
+++ b/spec/lib/sidebars/panel_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Panel do
+ let(:context) { Sidebars::Context.new(current_user: nil, container: nil) }
+ let(:panel) { Sidebars::Panel.new(context) }
+ let(:menu1) { Sidebars::Menu.new(context) }
+ let(:menu2) { Sidebars::Menu.new(context) }
+
+ describe '#renderable_menus' do
+ it 'returns only renderable menus' do
+ panel.add_menu(menu1)
+ panel.add_menu(menu2)
+
+ allow(menu1).to receive(:render?).and_return(true)
+ allow(menu2).to receive(:render?).and_return(false)
+
+ expect(panel.renderable_menus).to eq([menu1])
+ end
+ end
+
+ describe '#has_renderable_menus?' do
+ it 'returns false when no renderable menus' do
+ expect(panel.has_renderable_menus?).to be false
+ end
+
+ it 'returns true when no renderable menus' do
+ allow(menu1).to receive(:render?).and_return(true)
+
+ panel.add_menu(menu1)
+
+ expect(panel.has_renderable_menus?).to be true
+ end
+ end
+
+ describe '#add_element' do
+ it 'adds the element to the last position of the list' do
+ list = [1, 2]
+
+ panel.add_element(list, 3)
+
+ expect(list).to eq([1, 2, 3])
+ end
+
+ it 'does not add nil elements' do
+ list = []
+
+ panel.add_element(list, nil)
+
+ expect(list).to be_empty
+ end
+ end
+
+ describe '#insert_element_before' do
+ let(:user) { build(:user) }
+ let(:list) { [1, user] }
+
+ it 'adds element before the specific element class' do
+ panel.insert_element_before(list, User, 2)
+
+ expect(list).to eq [1, 2, user]
+ end
+
+ it 'does not add nil elements' do
+ panel.insert_element_before(list, User, nil)
+
+ expect(list).to eq [1, user]
+ end
+
+ context 'when reference element does not exist' do
+ it 'adds the element to the top of the list' do
+ panel.insert_element_before(list, Project, 2)
+
+ expect(list).to eq [2, 1, user]
+ end
+ end
+ end
+
+ describe '#insert_element_after' do
+ let(:user) { build(:user) }
+ let(:list) { [1, user] }
+
+ it 'adds element after the specific element class' do
+ panel.insert_element_after(list, Integer, 2)
+
+ expect(list).to eq [1, 2, user]
+ end
+
+ it 'does not add nil elements' do
+ panel.insert_element_after(list, Integer, nil)
+
+ expect(list).to eq [1, user]
+ end
+
+ context 'when reference element does not exist' do
+ it 'adds the element to the end of the list' do
+ panel.insert_element_after(list, Project, 2)
+
+ expect(list).to eq [1, user, 2]
+ end
+ end
+ end
+
+ describe '#replace_element' do
+ let(:user) { build(:user) }
+ let(:list) { [1, user] }
+
+ it 'replace existing element in the list' do
+ panel.replace_element(list, Integer, 2)
+
+ expect(list).to eq [2, user]
+ end
+
+ it 'does not add nil elements' do
+ panel.replace_element(list, Integer, nil)
+
+ expect(list).to eq [1, user]
+ end
+
+ it 'does not add the element if the other element is not found' do
+ panel.replace_element(list, Project, 2)
+
+ expect(list).to eq [1, user]
+ end
+ end
+end
diff --git a/spec/models/sidebars/projects/context_spec.rb b/spec/lib/sidebars/projects/context_spec.rb
index 44578ae1583..44578ae1583 100644
--- a/spec/models/sidebars/projects/context_spec.rb
+++ b/spec/lib/sidebars/projects/context_spec.rb
diff --git a/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb b/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
new file mode 100644
index 00000000000..ed94b81520e
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/analytics_menu_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: project.repository.root_ref) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'whe user cannot read analytics' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'whe user can read analytics' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+
+ context 'when menu does not have any menu items' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when menu has menu items' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+ end
+ end
+ end
+
+ describe '#link' do
+ it 'returns link to the value stream page' do
+ expect(subject.link).to include('/-/value_stream_analytics')
+ end
+
+ context 'when Value Stream is not visible' do
+ it 'returns link to the the first visible menu item' do
+ allow(subject).to receive(:cycle_analytics_menu_item).and_return(double(render?: false))
+
+ expect(subject.link).to eq subject.renderable_items.first.link
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'CI/CD' do
+ let(:item_id) { :ci_cd_analytics }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the project repository is empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when builds access level is DISABLED' do
+ before do
+ project.project_feature.update!(builds_access_level: Featurable::DISABLED)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Repository' do
+ let(:item_id) { :repository_analytics }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the project repository is empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Value Stream' do
+ let(:item_id) { :cycle_analytics }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb b/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb
new file mode 100644
index 00000000000..dee2716e4c2
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/ci_cd_menu_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::CiCdMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:can_view_pipeline_editor) { true }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: 'master', can_view_pipeline_editor: can_view_pipeline_editor) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when user cannot read builds' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when user can read builds' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'Pipelines Editor' do
+ let(:item_id) { :pipelines_editor }
+
+ context 'when user cannot view pipeline editor' do
+ let(:can_view_pipeline_editor) { false }
+
+ it 'does not include pipeline editor menu item' do
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when user can view pipeline editor' do
+ it 'includes pipeline editor menu item' do
+ is_expected.not_to be_nil
+ end
+ end
+ end
+
+ describe 'Artifacts' do
+ let(:item_id) { :artifacts }
+
+ context 'when feature flag :artifacts_management_page is disabled' do
+ it 'does not include artifacts menu item' do
+ stub_feature_flags(artifacts_management_page: false)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when feature flag :artifacts_management_page is enabled' do
+ it 'includes artifacts menu item' do
+ stub_feature_flags(artifacts_management_page: true)
+
+ is_expected.not_to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb b/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb
new file mode 100644
index 00000000000..0ecb328efd1
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/confluence_menu_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ConfluenceMenu do
+ let_it_be_with_refind(:project) { create(:project, has_external_wiki: true) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe 'render?' do
+ context 'when Confluence integration is not present' do
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when Confluence integration is present' do
+ let!(:confluence) { create(:confluence_service, project: project, active: active) }
+
+ context 'when integration is disabled' do
+ let(:active) { false }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when issues integration is enabled' do
+ let(:active) { true }
+
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+
+ it 'does not contain any sub menu' do
+ expect(subject.has_items?).to be false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
new file mode 100644
index 00000000000..4a60dfde674
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/deployments_menu_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::DeploymentsMenu do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ describe '#render?' do
+ subject { described_class.new(context) }
+
+ context 'when menu does not have any menu items' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when menu has menu items' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+ end
+ end
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ shared_examples 'feature flag :sidebar_refactor disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'Feature Flags' do
+ let(:item_id) { :feature_flags }
+
+ it_behaves_like 'access rights checks'
+ it_behaves_like 'feature flag :sidebar_refactor disabled'
+ end
+
+ describe 'Environments' do
+ let(:item_id) { :environments }
+
+ it_behaves_like 'access rights checks'
+ it_behaves_like 'feature flag :sidebar_refactor disabled'
+ end
+
+ describe 'Releases' do
+ let(:item_id) { :releases }
+
+ it_behaves_like 'access rights checks'
+ it_behaves_like 'feature flag :sidebar_refactor disabled'
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb b/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb
new file mode 100644
index 00000000000..5d62eebca1c
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/external_issue_tracker_menu_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ExternalIssueTrackerMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:jira_issues_integration_active) { false }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, jira_issues_integration: jira_issues_integration_active) }
+
+ subject { described_class.new(context) }
+
+ it 'does not contain any sub menu' do
+ expect(subject.has_items?).to be false
+ end
+
+ describe '#render?' do
+ before do
+ expect(subject).to receive(:external_issue_tracker).and_return(external_issue_tracker).at_least(1)
+ end
+
+ context 'when active external issue tracker' do
+ let(:external_issue_tracker) { build(:custom_issue_tracker_service, project: project) }
+
+ context 'is present' do
+ it 'returns true' do
+ expect(subject.render?).to be_truthy
+ end
+ end
+
+ context 'is not present' do
+ let(:external_issue_tracker) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to be_falsey
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb b/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb
new file mode 100644
index 00000000000..19efd2bbd6b
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/external_wiki_menu_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ExternalWikiMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ it 'does not contain any sub menu' do
+ expect(subject.has_items?).to be false
+ end
+
+ describe '#render?' do
+ before do
+ expect(subject).to receive(:external_wiki).and_return(external_wiki).at_least(1)
+ end
+
+ context 'when active external issue tracker' do
+ let(:external_wiki) { build(:external_wiki_service, project: project) }
+
+ context 'is present' do
+ it 'returns true' do
+ expect(subject.render?).to be_truthy
+ end
+ end
+
+ context 'is not present' do
+ let(:external_wiki) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to be_falsey
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb b/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb
new file mode 100644
index 00000000000..44013898721
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/hidden_menu_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::HiddenMenu do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: project.repository.root_ref) }
+
+ describe '#render?' do
+ subject { described_class.new(context) }
+
+ context 'when menu does not have any menu items' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when menu has menu items' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Activity' do
+ let(:item_id) { :activity }
+
+ context 'when user has access to the project' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user is not present' do
+ let(:user) { nil }
+
+ specify { is_expected.not_to be_nil }
+ end
+ end
+ end
+
+ describe 'Graph' do
+ let(:item_id) { :graph }
+
+ context 'when project repository is empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'New Issue' do
+ let(:item_id) { :new_issue }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Jobs' do
+ let(:item_id) { :jobs }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Commits' do
+ let(:item_id) { :commits }
+
+ context 'when project repository is empty' do
+ before do
+ allow(project).to receive(:empty_repo?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Issue Boards' do
+ let(:item_id) { :issue_boards }
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/issues_menu_spec.rb b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
new file mode 100644
index 00000000000..ac62cd7594a
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/issues_menu_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::IssuesMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when user can read issues' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when user cannot read issues' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+
+ describe '#has_pill?' do
+ context 'when issues feature is enabled' do
+ it 'returns true' do
+ expect(subject.has_pill?).to eq true
+ end
+ end
+
+ context 'when issue feature is disabled' do
+ it 'returns false' do
+ allow(project).to receive(:issues_enabled?).and_return(false)
+
+ expect(subject.has_pill?).to eq false
+ end
+ end
+ end
+
+ describe '#pill_count' do
+ it 'returns zero when there are no open issues' do
+ expect(subject.pill_count).to eq 0
+ end
+
+ it 'memoizes the query' do
+ subject.pill_count
+
+ control = ActiveRecord::QueryRecorder.new do
+ subject.pill_count
+ end
+
+ expect(control.count).to eq 0
+ end
+
+ context 'when there are open issues' do
+ it 'returns the number of open issues' do
+ create_list(:issue, 2, :opened, project: project)
+ create(:issue, :closed, project: project)
+
+ expect(subject.pill_count).to eq 2
+ end
+ end
+ end
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'Labels' do
+ let(:item_id) { :labels }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { is_expected.not_to be_nil }
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/labels_menu_spec.rb b/spec/lib/sidebars/projects/menus/labels_menu_spec.rb
new file mode 100644
index 00000000000..e1420f9e61b
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/labels_menu_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::LabelsMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ it 'does not contain any sub menu' do
+ expect(subject.has_items?).to eq false
+ end
+
+ describe '#render?' do
+ let(:issues_enabled) { true }
+
+ before do
+ allow(project).to receive(:issues_enabled?).and_return(issues_enabled)
+ end
+
+ context 'when feature flag :sidebar_refactor is enabled' do
+ let(:issues_enabled) { false }
+
+ it 'returns false' do
+ expect(subject.render?).to be_falsey
+ end
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ context 'when user can read labels' do
+ context 'when issues feature is enabled' do
+ it 'returns false' do
+ expect(subject.render?).to be_falsey
+ end
+ end
+
+ context 'when issues feature is disabled' do
+ let(:issues_enabled) { false }
+
+ it 'returns true' do
+ expect(subject.render?).to be_truthy
+ end
+ end
+ end
+
+ context 'when user cannot read labels' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to be_falsey
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb
new file mode 100644
index 00000000000..ef5ae550551
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/learn_gitlab_menu_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::LearnGitlabMenu do
+ let_it_be(:project) { build(:project) }
+ let_it_be(:experiment_enabled) { true }
+ let_it_be(:tracking_category) { 'Growth::Activation::Experiment::LearnGitLabB' }
+
+ let(:context) do
+ Sidebars::Projects::Context.new(
+ current_user: nil,
+ container: project,
+ learn_gitlab_experiment_enabled: experiment_enabled,
+ learn_gitlab_experiment_tracking_category: tracking_category
+ )
+ end
+
+ subject { described_class.new(context) }
+
+ it 'does not contain any sub menu' do
+ expect(subject.has_items?).to be false
+ end
+
+ describe '#nav_link_html_options' do
+ let_it_be(:data_tracking) do
+ {
+ class: 'home',
+ data: {
+ track_action: 'click_menu',
+ track_property: tracking_category,
+ track_label: 'learn_gitlab'
+ }
+ }
+ end
+
+ specify do
+ expect(subject.nav_link_html_options).to eq(data_tracking)
+ end
+ end
+
+ describe '#render?' do
+ context 'when learn gitlab experiment is enabled' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when learn gitlab experiment is disabled' do
+ let(:experiment_enabled) { false }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+
+ describe '#has_pill?' do
+ context 'when learn gitlab experiment is enabled' do
+ it 'returns true' do
+ expect(subject.has_pill?).to eq true
+ end
+ end
+
+ context 'when learn gitlab experiment is disabled' do
+ let(:experiment_enabled) { false }
+
+ it 'returns false' do
+ expect(subject.has_pill?).to eq false
+ end
+ end
+ end
+
+ describe '#pill_count' do
+ before do
+ expect_next_instance_of(LearnGitlab::Onboarding) do |onboarding|
+ expect(onboarding).to receive(:completed_percentage).and_return(20)
+ end
+ end
+
+ it 'returns pill count' do
+ expect(subject.pill_count).to eq '20%'
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/members_menu_spec.rb b/spec/lib/sidebars/projects/menus/members_menu_spec.rb
new file mode 100644
index 00000000000..dcc085c2957
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/members_menu_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::MembersMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+
+ context 'when user cannot access members' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb
new file mode 100644
index 00000000000..cef303fb068
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/merge_requests_menu_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::MergeRequestsMenu do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when repository is not present' do
+ let(:project) { build(:project) }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when repository is present' do
+ context 'when user can read merge requests' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when user cannot read merge requests' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+ end
+
+ describe '#pill_count' do
+ it 'returns zero when there are no open merge requests' do
+ expect(subject.pill_count).to eq 0
+ end
+
+ it 'memoizes the query' do
+ subject.pill_count
+
+ control = ActiveRecord::QueryRecorder.new do
+ subject.pill_count
+ end
+
+ expect(control.count).to eq 0
+ end
+
+ context 'when there are open merge requests' do
+ it 'returns the number of open merge requests' do
+ create_list(:merge_request, 2, :unique_branches, source_project: project, author: user, state: :opened)
+ create(:merge_request, source_project: project, state: :merged)
+
+ expect(subject.pill_count).to eq 2
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
new file mode 100644
index 00000000000..93618fa3321
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/monitor_menu_spec.rb
@@ -0,0 +1,217 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::MonitorMenu do
+ let_it_be_with_refind(:project) { create(:project) }
+
+ let(:user) { project.owner }
+ let(:show_cluster_hint) { true }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, show_cluster_hint: show_cluster_hint) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when operations feature is disabled' do
+ it 'returns false' do
+ project.project_feature.update!(operations_access_level: Featurable::DISABLED)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when operation feature is enabled' do
+ context 'when menu does not have any renderable menu items' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ context 'when menu has menu items' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+ end
+ end
+ end
+
+ describe '#title' do
+ it 'returns "Monitor"' do
+ expect(subject.title).to eq 'Monitor'
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'returns "Operations"' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ expect(subject.title).to eq 'Operations'
+ end
+ end
+ end
+
+ describe '#extra_container_html_options' do
+ it 'returns "shortcuts-monitor"' do
+ expect(subject.extra_container_html_options).to eq(class: 'shortcuts-monitor')
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'returns "shortcuts-operations"' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ expect(subject.extra_container_html_options).to eq(class: 'shortcuts-operations')
+ end
+ end
+ end
+
+ describe '#link' do
+ context 'when metrics dashboard is visible' do
+ it 'returns link to the metrics dashboard page' do
+ expect(subject.link).to include('/-/environments/metrics')
+ end
+ end
+
+ context 'when metrics dashboard is not visible' do
+ it 'returns link to the feature flags page' do
+ project.project_feature.update!(operations_access_level: Featurable::DISABLED)
+
+ expect(subject.link).to include('/-/feature_flags')
+ end
+ end
+ end
+
+ context 'Menu items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Metrics Dashboard' do
+ let(:item_id) { :metrics }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Logs' do
+ let(:item_id) { :logs }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Tracing' do
+ let(:item_id) { :tracing }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Error Tracking' do
+ let(:item_id) { :error_tracking }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Alert Management' do
+ let(:item_id) { :alert_management }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Incidents' do
+ let(:item_id) { :incidents }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Serverless' do
+ let(:item_id) { :serverless }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+
+ describe 'Terraform' do
+ let(:item_id) { :terraform }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+
+ describe 'Kubernetes' do
+ let(:item_id) { :kubernetes }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+
+ describe 'Environments' do
+ let(:item_id) { :environments }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+
+ describe 'Feature Flags' do
+ let(:item_id) { :feature_flags }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it_behaves_like 'access rights checks'
+ end
+ end
+
+ describe 'Product Analytics' do
+ let(:item_id) { :product_analytics }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when feature flag :product_analytics is disabled' do
+ specify do
+ stub_feature_flags(product_analytics: false)
+
+ is_expected.to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
new file mode 100644
index 00000000000..731dd5eca23
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/packages_registries_menu_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::PackagesRegistriesMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when menu does not have any menu item to show' do
+ it 'returns false' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when menu has menu items to show' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+ end
+
+ describe '#link' do
+ let(:registry_enabled) { true }
+ let(:packages_enabled) { true }
+
+ before do
+ stub_container_registry_config(enabled: registry_enabled)
+ stub_config(packages: { enabled: packages_enabled })
+ end
+
+ context 'when Packages Registry is visible' do
+ it 'menu link points to Packages Registry page' do
+ expect(subject.link).to eq described_class.new(context).renderable_items.find { |i| i.item_id == :packages_registry }.link
+ end
+ end
+
+ context 'when Packages Registry is not visible' do
+ let(:packages_enabled) { false }
+
+ it 'menu link points to Container Registry page' do
+ expect(subject.link).to eq described_class.new(context).renderable_items.find { |i| i.item_id == :container_registry }.link
+ end
+
+ context 'when Container Registry is not visible' do
+ let(:registry_enabled) { false }
+
+ it 'menu link points to Infrastructure Registry page' do
+ expect(subject.link).to eq described_class.new(context).renderable_items.find { |i| i.item_id == :infrastructure_registry }.link
+ end
+ end
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.find { |i| i.item_id == item_id } }
+
+ describe 'Packages Registry' do
+ let(:item_id) { :packages_registry }
+
+ context 'when user can read packages' do
+ context 'when config package setting is disabled' do
+ it 'the menu item is not added to list of menu items' do
+ stub_config(packages: { enabled: false })
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when config package setting is enabled' do
+ it 'the menu item is added to list of menu items' do
+ stub_config(packages: { enabled: true })
+
+ is_expected.not_to be_nil
+ end
+ end
+ end
+
+ context 'when user cannot read packages' do
+ let(:user) { nil }
+
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe 'Container Registry' do
+ let(:item_id) { :container_registry }
+
+ context 'when user can read container images' do
+ context 'when config registry setting is disabled' do
+ it 'the menu item is not added to list of menu items' do
+ stub_container_registry_config(enabled: false)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when config registry setting is enabled' do
+ it 'the menu item is added to list of menu items' do
+ stub_container_registry_config(enabled: true)
+
+ is_expected.not_to be_nil
+ end
+ end
+ end
+
+ context 'when user cannot read container images' do
+ let(:user) { nil }
+
+ it 'the menu item is not added to list of menu items' do
+ is_expected.to be_nil
+ end
+ end
+ end
+
+ describe 'Infrastructure Registry' do
+ let(:item_id) { :infrastructure_registry }
+
+ context 'when feature flag :infrastructure_registry_page is enabled' do
+ it 'the menu item is added to list of menu items' do
+ stub_feature_flags(infrastructure_registry_page: true)
+
+ is_expected.not_to be_nil
+ end
+ end
+
+ context 'when feature flag :infrastructure_registry_page is disabled' do
+ it 'the menu item is not added to list of menu items' do
+ stub_feature_flags(infrastructure_registry_page: false)
+
+ is_expected.to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
new file mode 100644
index 00000000000..b50bf0f4bf1
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/project_information_menu_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ProjectInformationMenu do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ describe 'Menu Items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'Releases' do
+ let(:item_id) { :releases }
+
+ specify { is_expected.to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ context 'when project repository is empty' do
+ it 'does not include releases menu item' do
+ allow(project).to receive(:empty_repo?).and_return(true)
+
+ is_expected.to be_nil
+ end
+ end
+
+ context 'when project repository is not empty' do
+ context 'when user can download code' do
+ specify { is_expected.not_to be_nil }
+ end
+
+ context 'when user cannot download code' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+
+ describe 'Labels' do
+ let(:item_id) { :labels }
+
+ specify { is_expected.not_to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Members' do
+ let(:item_id) { :members }
+
+ specify { is_expected.not_to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+end
diff --git a/spec/models/sidebars/projects/menus/repository/menu_spec.rb b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
index 04eb3357a6f..554a4e3f532 100644
--- a/spec/models/sidebars/projects/menus/repository/menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/repository_menu_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe Sidebars::Projects::Menus::Repository::Menu do
+RSpec.describe Sidebars::Projects::Menus::RepositoryMenu do
let_it_be(:project) { create(:project, :repository) }
let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: 'master') }
subject { described_class.new(context) }
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
new file mode 100644
index 00000000000..88f2df6cd84
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -0,0 +1,177 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ it 'returns false when menu does not have any menu items' do
+ allow(subject).to receive(:has_renderable_items?).and_return(false)
+
+ expect(subject.render?).to be false
+ end
+ end
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.find { |e| e.item_id == item_id } }
+
+ shared_examples 'access rights checks' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'General' do
+ let(:item_id) { :general }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Integrations' do
+ let(:item_id) { :integrations }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Webhooks' do
+ let(:item_id) { :webhooks }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Access Tokens' do
+ let(:item_id) { :access_tokens }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'Repository' do
+ let(:item_id) { :repository }
+
+ it_behaves_like 'access rights checks'
+ end
+
+ describe 'CI/CD' do
+ let(:item_id) { :ci_cd }
+
+ describe 'when project is archived' do
+ before do
+ allow(project).to receive(:archived?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when project is not archived' do
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe 'Monitor' do
+ let(:item_id) { :monitor }
+
+ describe 'when project is archived' do
+ before do
+ allow(project).to receive(:archived?).and_return(true)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when project is not archived' do
+ specify { is_expected.not_to be_nil }
+
+ specify { expect(subject.title).to eq 'Monitor' }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { expect(subject.title).to eq 'Operations' }
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+
+ describe 'Pages' do
+ let(:item_id) { :pages }
+
+ before do
+ allow(project).to receive(:pages_available?).and_return(pages_enabled)
+ end
+
+ describe 'when pages are enabled' do
+ let(:pages_enabled) { true }
+
+ specify { is_expected.not_to be_nil }
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'when pages are not enabled' do
+ let(:pages_enabled) { false }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+
+ describe 'Packages & Registries' do
+ let(:item_id) { :packages_and_registries }
+
+ before do
+ stub_container_registry_config(enabled: container_enabled)
+ end
+
+ describe 'when config registry setting is disabled' do
+ let(:container_enabled) { false }
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when config registry setting is enabled' do
+ let(:container_enabled) { true }
+
+ specify { is_expected.not_to be_nil }
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ specify { is_expected.to be_nil }
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb b/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb
new file mode 100644
index 00000000000..af219e4a742
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/snippets_menu_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::SnippetsMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when user cannot access snippets' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when user can access snippets' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/menus/wiki_menu_spec.rb b/spec/lib/sidebars/projects/menus/wiki_menu_spec.rb
new file mode 100644
index 00000000000..41447ee24a9
--- /dev/null
+++ b/spec/lib/sidebars/projects/menus/wiki_menu_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::WikiMenu do
+ let(:project) { build(:project) }
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ it 'does not contain any sub menu' do
+ expect(subject.has_items?).to be false
+ end
+
+ describe '#render?' do
+ context 'when user can access project wiki' do
+ it 'returns true' do
+ expect(subject.render?).to be true
+ end
+
+ context 'when user cannot access project wiki' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to be false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/projects/panel_spec.rb b/spec/lib/sidebars/projects/panel_spec.rb
new file mode 100644
index 00000000000..51d37bf69ea
--- /dev/null
+++ b/spec/lib/sidebars/projects/panel_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Panel do
+ let(:project) { build(:project) }
+ let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
+
+ subject { described_class.new(context) }
+
+ it 'has a scope menu' do
+ expect(subject.scope_menu).to be_a(Sidebars::Projects::Menus::ScopeMenu)
+ end
+
+ context 'Confluence menu item' do
+ subject { described_class.new(context).instance_variable_get(:@menus) }
+
+ context 'when integration is present and active' do
+ let_it_be(:confluence) { create(:confluence_service, active: true) }
+
+ let(:project) { confluence.project }
+
+ it 'contains Confluence menu item' do
+ expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).not_to be_nil
+ end
+
+ it 'does not contain Wiki menu item' do
+ expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::WikiMenu) }).to be_nil
+ end
+ end
+
+ context 'when integration is not present' do
+ it 'does not contain Confluence menu item' do
+ expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::ConfluenceMenu) }).to be_nil
+ end
+
+ it 'contains Wiki menu item' do
+ expect(subject.index { |i| i.is_a?(Sidebars::Projects::Menus::WikiMenu) }).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/version_check_spec.rb b/spec/lib/version_check_spec.rb
new file mode 100644
index 00000000000..23c381e241e
--- /dev/null
+++ b/spec/lib/version_check_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe VersionCheck do
+ describe '.url' do
+ it 'returns the correct URL' do
+ expect(described_class.url).to match(%r{\A#{Regexp.escape(described_class.host)}/check\.svg\?gitlab_info=\w+})
+ end
+ end
+end
diff --git a/spec/mailers/emails/in_product_marketing_spec.rb b/spec/mailers/emails/in_product_marketing_spec.rb
index 25735e64bdf..3d17e16ef48 100644
--- a/spec/mailers/emails/in_product_marketing_spec.rb
+++ b/spec/mailers/emails/in_product_marketing_spec.rb
@@ -5,7 +5,6 @@ require 'email_spec'
RSpec.describe Emails::InProductMarketing do
include EmailSpec::Matchers
- include InProductMarketingHelper
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -62,11 +61,13 @@ RSpec.describe Emails::InProductMarketing do
with_them do
it 'has the correct subject and content' do
+ message = Gitlab::Email::Message::InProductMarketing.for(track).new(group: group, series: series)
+
aggregate_failures do
- is_expected.to have_subject(subject_line(track, series))
- is_expected.to have_body_text(in_product_marketing_title(track, series))
- is_expected.to have_body_text(in_product_marketing_subtitle(track, series))
- is_expected.to have_body_text(in_product_marketing_cta_text(track, series))
+ is_expected.to have_subject(message.subject_line)
+ is_expected.to have_body_text(message.title)
+ is_expected.to have_body_text(message.subtitle)
+ is_expected.to have_body_text(CGI.unescapeHTML(message.cta_link))
end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 94a081ae0c9..b073b647532 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Notify do
it 'contains a link to issue author' do
is_expected.to have_body_text(issue.author_name)
- is_expected.to have_body_text 'created an issue'
+ is_expected.to have_body_text 'created an issue:'
is_expected.to have_link(issue.to_reference, href: project_issue_url(issue.project, issue))
end
@@ -356,11 +356,11 @@ RSpec.describe Notify do
it 'contains a link to merge request author' do
is_expected.to have_body_text merge_request.author_name
- is_expected.to have_body_text 'created a'
+ is_expected.to have_body_text 'created a merge request:'
end
it 'contains a link to the merge request url' do
- is_expected.to have_link('merge request', href: project_merge_request_url(merge_request.target_project, merge_request))
+ is_expected.to have_link(merge_request.to_reference, href: project_merge_request_url(merge_request.target_project, merge_request))
end
end
diff --git a/spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb b/spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..1588cec0258
--- /dev/null
+++ b/spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210423160427_schedule_drop_invalid_vulnerabilities.rb')
+
+RSpec.describe ScheduleDropInvalidVulnerabilities, :migration do
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) { create_user! }
+ let_it_be(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let_it_be(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+
+ let_it_be(:vulnerabilities) { table(:vulnerabilities) }
+ let_it_be(:vulnerability_with_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:primary_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let_it_be(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let_it_be(:finding) do
+ create_finding!(
+ vulnerability_id: vulnerability_with_finding.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: primary_identifier.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migrations' do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION).to be_scheduled_migration(vulnerability_with_finding.id, vulnerability_with_finding.id)
+ expect(described_class::MIGRATION).to be_scheduled_migration(vulnerability_without_finding.id, vulnerability_without_finding.id)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: Time.current
+ )
+ end
+end
diff --git a/spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb b/spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb
new file mode 100644
index 00000000000..3e57ffb4729
--- /dev/null
+++ b/spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20210430134202_copy_adoption_snapshot_namespace.rb')
+
+RSpec.describe CopyAdoptionSnapshotNamespace, :migration do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:segments_table) { table(:analytics_devops_adoption_segments) }
+ let(:snapshots_table) { table(:analytics_devops_adoption_snapshots) }
+
+ before do
+ namespaces_table.create!(id: 123, name: 'group1', path: 'group1')
+ namespaces_table.create!(id: 124, name: 'group2', path: 'group2')
+
+ segments_table.create!(id: 1, namespace_id: 123)
+ segments_table.create!(id: 2, namespace_id: 124)
+
+ create_snapshot(id: 1, segment_id: 1)
+ create_snapshot(id: 2, segment_id: 2)
+ create_snapshot(id: 3, segment_id: 2, namespace_id: 123)
+ end
+
+ it 'updates all snapshots without namespace set' do
+ migrate!
+
+ expect(snapshots_table.find(1).namespace_id).to eq 123
+ expect(snapshots_table.find(2).namespace_id).to eq 124
+ expect(snapshots_table.find(3).namespace_id).to eq 123
+ end
+
+ def create_snapshot(**additional_params)
+ defaults = {
+ recorded_at: Time.zone.now,
+ issue_opened: true,
+ merge_request_opened: true,
+ merge_request_approved: true,
+ runner_configured: true,
+ pipeline_succeeded: true,
+ deploy_succeeded: true,
+ security_scan_succeeded: true,
+ end_time: Time.zone.now.end_of_month
+ }
+
+ snapshots_table.create!(defaults.merge(additional_params))
+ end
+end
diff --git a/spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb b/spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb
new file mode 100644
index 00000000000..a37772db28c
--- /dev/null
+++ b/spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20210430135954_copy_adoption_segments_namespace.rb')
+
+RSpec.describe CopyAdoptionSegmentsNamespace, :migration do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:segments_table) { table(:analytics_devops_adoption_segments) }
+
+ before do
+ namespaces_table.create!(id: 123, name: 'group1', path: 'group1')
+ namespaces_table.create!(id: 124, name: 'group2', path: 'group2')
+
+ segments_table.create!(id: 1, namespace_id: 123, display_namespace_id: nil)
+ segments_table.create!(id: 2, namespace_id: 124, display_namespace_id: 123)
+ end
+
+ it 'updates all segments without display namespace' do
+ migrate!
+
+ expect(segments_table.find(1).display_namespace_id).to eq 123
+ expect(segments_table.find(2).display_namespace_id).to eq 123
+ end
+end
diff --git a/spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb b/spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb
new file mode 100644
index 00000000000..6e1cc63e42a
--- /dev/null
+++ b/spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'migrate', '20210503105845_add_project_value_stream_id_to_project_stages.rb')
+
+RSpec.describe AddProjectValueStreamIdToProjectStages, schema: 20210503105022 do
+ let(:stages) { table(:analytics_cycle_analytics_project_stages) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:namespace) { table(:namespaces).create!(name: 'ns1', path: 'nsq1') }
+
+ before do
+ project = projects.create!(name: 'p1', namespace_id: namespace.id)
+
+ stages.create!(
+ project_id: project.id,
+ created_at: Time.now,
+ updated_at: Time.now,
+ start_event_identifier: 1,
+ end_event_identifier: 2,
+ name: 'stage 1'
+ )
+
+ stages.create!(
+ project_id: project.id,
+ created_at: Time.now,
+ updated_at: Time.now,
+ start_event_identifier: 3,
+ end_event_identifier: 4,
+ name: 'stage 2'
+ )
+ end
+
+ it 'deletes the existing rows' do
+ migrate!
+
+ expect(stages.count).to eq(0)
+ end
+end
diff --git a/spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb b/spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb
new file mode 100644
index 00000000000..6ffaa26f923
--- /dev/null
+++ b/spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb
@@ -0,0 +1,120 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210511142748_schedule_drop_invalid_vulnerabilities2.rb')
+
+RSpec.describe ScheduleDropInvalidVulnerabilities2, :migration do
+ let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
+
+ let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
+ let_it_be(:users) { table(:users) }
+ let_it_be(:user) { create_user! }
+ let_it_be(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
+
+ let_it_be(:scanners) { table(:vulnerability_scanners) }
+ let_it_be(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
+ let_it_be(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
+
+ let_it_be(:vulnerabilities) { table(:vulnerabilities) }
+ let_it_be(:vulnerability_with_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_without_finding) do
+ create_vulnerability!(
+ project_id: project.id,
+ author_id: user.id
+ )
+ end
+
+ let_it_be(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
+ let_it_be(:primary_identifier) do
+ vulnerability_identifiers.create!(
+ project_id: project.id,
+ external_type: 'uuid-v5',
+ external_id: 'uuid-v5',
+ fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
+ name: 'Identifier for UUIDv5')
+ end
+
+ let_it_be(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
+ let_it_be(:finding) do
+ create_finding!(
+ vulnerability_id: vulnerability_with_finding.id,
+ project_id: project.id,
+ scanner_id: scanner.id,
+ primary_identifier_id: primary_identifier.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ end
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ it 'schedules background migrations' do
+ migrate!
+
+ expect(background_migration_jobs.count).to eq(2)
+ expect(background_migration_jobs.first.arguments).to eq([vulnerability_with_finding.id, vulnerability_with_finding.id])
+ expect(background_migration_jobs.second.arguments).to eq([vulnerability_without_finding.id, vulnerability_without_finding.id])
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, vulnerability_with_finding.id, vulnerability_with_finding.id)
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, vulnerability_without_finding.id, vulnerability_without_finding.id)
+ end
+
+ private
+
+ def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type
+ )
+ end
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_finding!(
+ vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
+ name: "test", severity: 7, confidence: 7, report_type: 0,
+ project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
+ metadata_version: 'test', raw_metadata: 'test', uuid: 'test')
+ vulnerabilities_findings.create!(
+ vulnerability_id: vulnerability_id,
+ project_id: project_id,
+ name: name,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ project_fingerprint: project_fingerprint,
+ scanner_id: scanner_id,
+ primary_identifier_id: primary_identifier_id,
+ location_fingerprint: location_fingerprint,
+ metadata_version: metadata_version,
+ raw_metadata: raw_metadata,
+ uuid: uuid
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: Time.current
+ )
+ end
+end
diff --git a/spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb b/spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb
new file mode 100644
index 00000000000..574020e52d5
--- /dev/null
+++ b/spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe BackfillNugetTemporaryPackagesToProcessingStatus, :migration do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:packages) { table(:packages_packages) }
+
+ before do
+ namespace = namespaces.create!(id: 123, name: 'test_namespace', path: 'test_namespace')
+ project = projects.create!(id: 111, name: 'sample_project', path: 'sample_project', namespace_id: namespace.id)
+
+ packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 4, status: 0, project_id: project.id)
+ packages.create!(name: 'foo', version: '0.1.1', package_type: 4, status: 0, project_id: project.id)
+ packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 4, status: 2, project_id: project.id)
+ packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 1, status: 2, project_id: project.id)
+ packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 1, status: 0, project_id: project.id)
+ end
+
+ it 'updates the applicable packages to processing status', :aggregate_failures do
+ expect(packages.where(status: 0).count).to eq(3)
+ expect(packages.where(status: 2).count).to eq(2)
+ expect(packages.where(name: 'NuGet.Temporary.Package', package_type: 4, status: 0).count).to eq(1)
+
+ migrate!
+
+ expect(packages.where(status: 0).count).to eq(2)
+ expect(packages.where(status: 2).count).to eq(3)
+ expect(packages.where(name: 'NuGet.Temporary.Package', package_type: 4, status: 0).count).to eq(0)
+ end
+end
diff --git a/spec/migrations/change_web_hook_events_default_spec.rb b/spec/migrations/change_web_hook_events_default_spec.rb
new file mode 100644
index 00000000000..3b1a65ece17
--- /dev/null
+++ b/spec/migrations/change_web_hook_events_default_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20210420012444_change_web_hook_events_default.rb')
+
+RSpec.describe ChangeWebHookEventsDefault do
+ let(:web_hooks) { table(:web_hooks) }
+ let(:projects) { table(:projects) }
+ let(:groups) { table(:namespaces) }
+
+ let(:group) { groups.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(name: 'gitlab', path: 'gitlab', namespace_id: group.id) }
+ let(:hook) { web_hooks.create!(project_id: project.id, type: 'ProjectHook') }
+ let(:group_hook) { web_hooks.create!(group_id: group.id, type: 'GroupHook') }
+
+ before do
+ # Simulate the wrong schema
+ %w(push_events issues_events merge_requests_events tag_push_events).each do |column|
+ ActiveRecord::Base.connection.execute "ALTER TABLE web_hooks ALTER COLUMN #{column} DROP DEFAULT"
+ end
+ end
+
+ it 'sets default values' do
+ migrate!
+
+ expect(hook.push_events).to be true
+ expect(hook.issues_events).to be false
+ expect(hook.merge_requests_events).to be false
+ expect(hook.tag_push_events).to be false
+
+ expect(group_hook.push_events).to be true
+ expect(group_hook.issues_events).to be false
+ expect(group_hook.merge_requests_events).to be false
+ expect(group_hook.tag_push_events).to be false
+ end
+end
diff --git a/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb b/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb
index cef6e0f470f..a50e98faf48 100644
--- a/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb
+++ b/spec/migrations/cleanup_projects_with_missing_namespace_spec.rb
@@ -95,12 +95,12 @@ RSpec.describe CleanupProjectsWithMissingNamespace, :migration, schema: SchemaVe
expect(
described_class::Group
.joins('INNER JOIN members ON namespaces.id = members.source_id')
- .where('namespaces.type = ?', 'Group')
- .where('members.type = ?', 'GroupMember')
- .where('members.source_type = ?', 'Namespace')
- .where('members.user_id = ?', ghost_user.id)
- .where('members.requested_at IS NULL')
- .where('members.access_level = ?', described_class::ACCESS_LEVEL_OWNER)
+ .where(namespaces: { type: 'Group' })
+ .where(members: { type: 'GroupMember' })
+ .where(members: { source_type: 'Namespace' })
+ .where(members: { user_id: ghost_user.id })
+ .where(members: { requested_at: nil })
+ .where(members: { access_level: described_class::ACCESS_LEVEL_OWNER })
.where(
described_class::Group
.arel_table[:name]
diff --git a/spec/migrations/generate_ci_jwt_signing_key_spec.rb b/spec/migrations/generate_ci_jwt_signing_key_spec.rb
index 4cfaa8701aa..249af3bcb50 100644
--- a/spec/migrations/generate_ci_jwt_signing_key_spec.rb
+++ b/spec/migrations/generate_ci_jwt_signing_key_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe GenerateCiJwtSigningKey do
attr_encrypted :ci_jwt_signing_key, {
mode: :per_attribute_iv,
- key: Rails.application.secrets.db_key_base[0..31],
+ key: Gitlab::Utils.ensure_utf8_size(Rails.application.secrets.db_key_base, bytes: 32.bytes),
algorithm: 'aes-256-gcm',
encode: true
}
diff --git a/spec/migrations/move_container_registry_enabled_to_project_features2_spec.rb b/spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb
index 11d43a36bc9..4c50aa2dd10 100644
--- a/spec/migrations/move_container_registry_enabled_to_project_features2_spec.rb
+++ b/spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb
@@ -1,11 +1,16 @@
# frozen_string_literal: true
require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20210401131948_move_container_registry_enabled_to_project_features2.rb')
+require Rails.root.join('db', 'post_migrate', '20210415155043_move_container_registry_enabled_to_project_features3.rb')
-RSpec.describe MoveContainerRegistryEnabledToProjectFeatures2, :migration do
+RSpec.describe MoveContainerRegistryEnabledToProjectFeatures3, :migration do
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
+ let!(:background_jobs) do
+ table(:background_migration_jobs).create!(class_name: described_class::MIGRATION, arguments: [-1, -2])
+ table(:background_migration_jobs).create!(class_name: described_class::MIGRATION, arguments: [-3, -4])
+ end
+
let!(:projects) do
[
table(:projects).create!(namespace_id: namespace.id, name: 'project 1'),
@@ -28,11 +33,18 @@ RSpec.describe MoveContainerRegistryEnabledToProjectFeatures2, :migration do
end
it 'schedules jobs for ranges of projects' do
+ # old entries in background_migration_jobs should be deleted.
+ expect(table(:background_migration_jobs).count).to eq(2)
+ expect(table(:background_migration_jobs).first.arguments).to eq([-1, -2])
+ expect(table(:background_migration_jobs).second.arguments).to eq([-3, -4])
+
migrate!
# Since track_jobs is true, each job should have an entry in the background_migration_jobs
# table.
expect(table(:background_migration_jobs).count).to eq(2)
+ expect(table(:background_migration_jobs).first.arguments).to eq([projects[0].id, projects[2].id])
+ expect(table(:background_migration_jobs).second.arguments).to eq([projects[3].id, projects[3].id])
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[2].id)
diff --git a/spec/migrations/remove_hipchat_service_records_spec.rb b/spec/migrations/remove_hipchat_service_records_spec.rb
new file mode 100644
index 00000000000..bc76d7933d8
--- /dev/null
+++ b/spec/migrations/remove_hipchat_service_records_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210420103955_remove_hipchat_service_records.rb')
+
+RSpec.describe RemoveHipchatServiceRecords do
+ let(:services) { table(:services) }
+
+ before do
+ services.create!(type: 'HipchatService')
+ services.create!(type: 'SomeOtherType')
+ end
+
+ it 'removes services records of type HipchatService' do
+ expect(services.count).to eq(2)
+
+ migrate!
+
+ expect(services.count).to eq(1)
+ expect(services.first.type).to eq('SomeOtherType')
+ expect(services.where(type: 'HipchatService')).to be_empty
+ end
+end
diff --git a/spec/migrations/schedule_update_timelogs_project_id_spec.rb b/spec/migrations/schedule_update_timelogs_project_id_spec.rb
new file mode 100644
index 00000000000..e2972d2fd08
--- /dev/null
+++ b/spec/migrations/schedule_update_timelogs_project_id_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210427212034_schedule_update_timelogs_project_id.rb')
+
+RSpec.describe ScheduleUpdateTimelogsProjectId do
+ let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
+ let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
+ let!(:issue) { table(:issues).create!(project_id: project.id) }
+ let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
+ let!(:timelog1) { table(:timelogs).create!(issue_id: issue.id, time_spent: 60) }
+ let!(:timelog2) { table(:timelogs).create!(merge_request_id: merge_request.id, time_spent: 600) }
+ let!(:timelog3) { table(:timelogs).create!(merge_request_id: merge_request.id, time_spent: 60) }
+ let!(:timelog4) { table(:timelogs).create!(issue_id: issue.id, time_spent: 600) }
+
+ it 'correctly schedules background migrations' do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, timelog1.id, timelog2.id)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, timelog3.id, timelog4.id)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/update_invalid_web_hooks_spec.rb b/spec/migrations/update_invalid_web_hooks_spec.rb
new file mode 100644
index 00000000000..a65f82d7082
--- /dev/null
+++ b/spec/migrations/update_invalid_web_hooks_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe UpdateInvalidWebHooks do
+ let(:web_hooks) { table(:web_hooks) }
+ let(:groups) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ before do
+ group = groups.create!(name: 'gitlab', path: 'gitlab-org')
+ project = projects.create!(namespace_id: group.id)
+
+ web_hooks.create!(group_id: group.id, type: 'GroupHook')
+ web_hooks.create!(project_id: project.id, type: 'ProjectHook')
+ web_hooks.create!(group_id: group.id, project_id: project.id, type: 'ProjectHook')
+ end
+
+ it 'clears group_id when ProjectHook type and project_id are present', :aggregate_failures do
+ expect(web_hooks.where.not(group_id: nil).where.not(project_id: nil).count).to eq(1)
+
+ migrate!
+
+ expect(web_hooks.where.not(group_id: nil).where.not(project_id: nil).count).to eq(0)
+ expect(web_hooks.where(type: 'GroupHook').count).to eq(1)
+ expect(web_hooks.where(type: 'ProjectHook').count).to eq(2)
+ end
+end
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index fce31af619c..9efe90e7d41 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Analytics::CycleAnalytics::ProjectStage do
end
it_behaves_like 'value stream analytics stage' do
+ let(:factory) { :cycle_analytics_project_stage }
let(:parent) { build(:project) }
let(:parent_name) { :project }
end
diff --git a/spec/models/analytics/cycle_analytics/project_value_stream_spec.rb b/spec/models/analytics/cycle_analytics/project_value_stream_spec.rb
new file mode 100644
index 00000000000..d84ecedc634
--- /dev/null
+++ b/spec/models/analytics/cycle_analytics/project_value_stream_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::ProjectValueStream, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:stages) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(100) }
+
+ it 'validates uniqueness of name' do
+ project = create(:project)
+ create(:cycle_analytics_project_value_stream, name: 'test', project: project)
+
+ value_stream = build(:cycle_analytics_project_value_stream, name: 'test', project: project)
+
+ expect(value_stream).to be_invalid
+ expect(value_stream.errors.messages).to eq(name: [I18n.t('errors.messages.taken')])
+ end
+ end
+
+ it 'is not custom' do
+ expect(described_class.new).not_to be_custom
+ end
+
+ describe '.build_default_value_stream' do
+ it 'builds the default value stream' do
+ project = build(:project)
+
+ value_stream = described_class.build_default_value_stream(project)
+ expect(value_stream.name).to eq('default')
+ end
+ end
+end
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 37eddf9a22a..2817e177d28 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Appearance do
create(:appearance)
new_row = build(:appearance)
- new_row.save
+ expect { new_row.save! }.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Only 1 appearances row can exist')
expect(new_row.valid?).to eq(false)
end
@@ -39,7 +39,7 @@ RSpec.describe Appearance do
end
it 'returns the path when the upload has been orphaned' do
- appearance.send(logo_type).upload.destroy
+ appearance.send(logo_type).upload.destroy!
appearance.reload
expect(appearance.send("#{logo_type}_path")).to eq(expected_path)
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index 7e6ac351e68..24de46cb536 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -13,20 +13,24 @@ RSpec.describe ApplicationRecord do
describe '.safe_ensure_unique' do
let(:model) { build(:suggestion) }
+ let_it_be(:note) { create(:diff_note_on_merge_request) }
+
let(:klass) { model.class }
before do
- allow(model).to receive(:save).and_raise(ActiveRecord::RecordNotUnique)
+ allow(model).to receive(:save!).and_raise(ActiveRecord::RecordNotUnique)
end
it 'returns false when ActiveRecord::RecordNotUnique is raised' do
- expect(model).to receive(:save).once
- expect(klass.safe_ensure_unique { model.save }).to be_falsey
+ expect(model).to receive(:save!).once
+ model.note_id = note.id
+ expect(klass.safe_ensure_unique { model.save! }).to be_falsey
end
it 'retries based on retry count specified' do
- expect(model).to receive(:save).exactly(3).times
- expect(klass.safe_ensure_unique(retries: 2) { model.save }).to be_falsey
+ expect(model).to receive(:save!).exactly(3).times
+ model.note_id = note.id
+ expect(klass.safe_ensure_unique(retries: 2) { model.save! }).to be_falsey
end
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 808932ce7e4..4b4e7820f7a 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -129,6 +129,11 @@ RSpec.describe ApplicationSetting do
it { is_expected.not_to allow_value(nil).for(:notes_create_limit_allowlist) }
it { is_expected.to allow_value([]).for(:notes_create_limit_allowlist) }
+ it { is_expected.to allow_value('all_tiers').for(:whats_new_variant) }
+ it { is_expected.to allow_value('current_tier').for(:whats_new_variant) }
+ it { is_expected.to allow_value('disabled').for(:whats_new_variant) }
+ it { is_expected.not_to allow_value(nil).for(:whats_new_variant) }
+
context 'help_page_documentation_base_url validations' do
it { is_expected.to allow_value(nil).for(:help_page_documentation_base_url) }
it { is_expected.to allow_value('https://docs.gitlab.com').for(:help_page_documentation_base_url) }
@@ -211,7 +216,8 @@ RSpec.describe ApplicationSetting do
setting.spam_check_endpoint_enabled = true
end
- it { is_expected.to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value(nil).for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('').for(:spam_check_endpoint_url) }
@@ -222,7 +228,8 @@ RSpec.describe ApplicationSetting do
setting.spam_check_endpoint_enabled = false
end
- it { is_expected.to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.to allow_value('grpc://example.org/spam_check').for(:spam_check_endpoint_url) }
+ it { is_expected.not_to allow_value('https://example.org/spam_check').for(:spam_check_endpoint_url) }
it { is_expected.not_to allow_value('nonsense').for(:spam_check_endpoint_url) }
it { is_expected.to allow_value(nil).for(:spam_check_endpoint_url) }
it { is_expected.to allow_value('').for(:spam_check_endpoint_url) }
@@ -245,7 +252,9 @@ RSpec.describe ApplicationSetting do
context "when user accepted let's encrypt terms of service" do
before do
- setting.update(lets_encrypt_terms_of_service_accepted: true)
+ expect do
+ setting.update!(lets_encrypt_terms_of_service_accepted: true)
+ end.to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Lets encrypt notification email can't be blank")
end
it { is_expected.not_to allow_value(nil).for(:lets_encrypt_notification_email) }
@@ -295,26 +304,30 @@ RSpec.describe ApplicationSetting do
describe 'default_artifacts_expire_in' do
it 'sets an error if it cannot parse' do
- setting.update(default_artifacts_expire_in: 'a')
+ expect do
+ setting.update!(default_artifacts_expire_in: 'a')
+ end.to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Default artifacts expire in is not a correct duration")
expect_invalid
end
it 'sets an error if it is blank' do
- setting.update(default_artifacts_expire_in: ' ')
+ expect do
+ setting.update!(default_artifacts_expire_in: ' ')
+ end.to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Default artifacts expire in can't be blank")
expect_invalid
end
it 'sets the value if it is valid' do
- setting.update(default_artifacts_expire_in: '30 days')
+ setting.update!(default_artifacts_expire_in: '30 days')
expect(setting).to be_valid
expect(setting.default_artifacts_expire_in).to eq('30 days')
end
it 'sets the value if it is 0' do
- setting.update(default_artifacts_expire_in: '0')
+ setting.update!(default_artifacts_expire_in: '0')
expect(setting).to be_valid
expect(setting.default_artifacts_expire_in).to eq('0')
@@ -393,18 +406,18 @@ RSpec.describe ApplicationSetting do
context 'auto_devops_domain setting' do
context 'when auto_devops_enabled? is true' do
before do
- setting.update(auto_devops_enabled: true)
+ setting.update!(auto_devops_enabled: true)
end
it 'can be blank' do
- setting.update(auto_devops_domain: '')
+ setting.update!(auto_devops_domain: '')
expect(setting).to be_valid
end
context 'with a valid value' do
before do
- setting.update(auto_devops_domain: 'domain.com')
+ setting.update!(auto_devops_domain: 'domain.com')
end
it 'is valid' do
@@ -414,7 +427,9 @@ RSpec.describe ApplicationSetting do
context 'with an invalid value' do
before do
- setting.update(auto_devops_domain: 'definitelynotahostname')
+ expect do
+ setting.update!(auto_devops_domain: 'definitelynotahostname')
+ end.to raise_error(ActiveRecord::RecordInvalid, "Validation failed: Auto devops domain is not a fully qualified domain name")
end
it 'is invalid' do
@@ -785,6 +800,10 @@ RSpec.describe ApplicationSetting do
throttle_authenticated_api_period_in_seconds
throttle_authenticated_web_requests_per_period
throttle_authenticated_web_period_in_seconds
+ throttle_unauthenticated_packages_api_requests_per_period
+ throttle_unauthenticated_packages_api_period_in_seconds
+ throttle_authenticated_packages_api_requests_per_period
+ throttle_authenticated_packages_api_period_in_seconds
]
end
diff --git a/spec/models/board_group_recent_visit_spec.rb b/spec/models/board_group_recent_visit_spec.rb
index c6fbd263072..d2d287d8e24 100644
--- a/spec/models/board_group_recent_visit_spec.rb
+++ b/spec/models/board_group_recent_visit_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
RSpec.describe BoardGroupRecentVisit do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:board) { create(:board, group: group) }
+ let_it_be(:board_parent) { create(:group) }
+ let_it_be(:board) { create(:board, group: board_parent) }
describe 'relationships' do
it { is_expected.to belong_to(:user) }
@@ -19,56 +18,9 @@ RSpec.describe BoardGroupRecentVisit do
it { is_expected.to validate_presence_of(:board) }
end
- describe '#visited' do
- it 'creates a visit if one does not exists' do
- expect { described_class.visited!(user, board) }.to change(described_class, :count).by(1)
- end
-
- shared_examples 'was visited previously' do
- let!(:visit) { create :board_group_recent_visit, group: board.group, board: board, user: user, updated_at: 7.days.ago }
-
- it 'updates the timestamp' do
- freeze_time do
- described_class.visited!(user, board)
-
- expect(described_class.count).to eq 1
- expect(described_class.first.updated_at).to be_like_time(Time.zone.now)
- end
- end
- end
-
- it_behaves_like 'was visited previously'
-
- context 'when we try to create a visit that is not unique' do
- before do
- expect(described_class).to receive(:find_or_create_by).and_raise(ActiveRecord::RecordNotUnique, 'record not unique')
- expect(described_class).to receive(:find_or_create_by).and_return(visit)
- end
-
- it_behaves_like 'was visited previously'
- end
- end
-
- describe '#latest' do
- def create_visit(time)
- create :board_group_recent_visit, group: group, user: user, updated_at: time
- end
-
- it 'returns the most recent visited' do
- create_visit(7.days.ago)
- create_visit(5.days.ago)
- recent = create_visit(1.day.ago)
-
- expect(described_class.latest(user, group)).to eq recent
- end
-
- it 'returns last 3 visited boards' do
- create_visit(7.days.ago)
- visit1 = create_visit(3.days.ago)
- visit2 = create_visit(2.days.ago)
- visit3 = create_visit(5.days.ago)
-
- expect(described_class.latest(user, group, count: 3)).to eq([visit2, visit1, visit3])
- end
+ it_behaves_like 'boards recent visit' do
+ let_it_be(:board_relation) { :board }
+ let_it_be(:board_parent_relation) { :group }
+ let_it_be(:visit_relation) { :board_group_recent_visit }
end
end
diff --git a/spec/models/board_project_recent_visit_spec.rb b/spec/models/board_project_recent_visit_spec.rb
index 145a4f5b1a7..262c3a8faaa 100644
--- a/spec/models/board_project_recent_visit_spec.rb
+++ b/spec/models/board_project_recent_visit_spec.rb
@@ -3,9 +3,8 @@
require 'spec_helper'
RSpec.describe BoardProjectRecentVisit do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:board) { create(:board, project: project) }
+ let_it_be(:board_parent) { create(:project) }
+ let_it_be(:board) { create(:board, project: board_parent) }
describe 'relationships' do
it { is_expected.to belong_to(:user) }
@@ -19,56 +18,9 @@ RSpec.describe BoardProjectRecentVisit do
it { is_expected.to validate_presence_of(:board) }
end
- describe '#visited' do
- it 'creates a visit if one does not exists' do
- expect { described_class.visited!(user, board) }.to change(described_class, :count).by(1)
- end
-
- shared_examples 'was visited previously' do
- let!(:visit) { create :board_project_recent_visit, project: board.project, board: board, user: user, updated_at: 7.days.ago }
-
- it 'updates the timestamp' do
- freeze_time do
- described_class.visited!(user, board)
-
- expect(described_class.count).to eq 1
- expect(described_class.first.updated_at).to be_like_time(Time.zone.now)
- end
- end
- end
-
- it_behaves_like 'was visited previously'
-
- context 'when we try to create a visit that is not unique' do
- before do
- expect(described_class).to receive(:find_or_create_by).and_raise(ActiveRecord::RecordNotUnique, 'record not unique')
- expect(described_class).to receive(:find_or_create_by).and_return(visit)
- end
-
- it_behaves_like 'was visited previously'
- end
- end
-
- describe '#latest' do
- def create_visit(time)
- create :board_project_recent_visit, project: project, user: user, updated_at: time
- end
-
- it 'returns the most recent visited' do
- create_visit(7.days.ago)
- create_visit(5.days.ago)
- recent = create_visit(1.day.ago)
-
- expect(described_class.latest(user, project)).to eq recent
- end
-
- it 'returns last 3 visited boards' do
- create_visit(7.days.ago)
- visit1 = create_visit(3.days.ago)
- visit2 = create_visit(2.days.ago)
- visit3 = create_visit(5.days.ago)
-
- expect(described_class.latest(user, project, count: 3)).to eq([visit2, visit1, visit3])
- end
+ it_behaves_like 'boards recent visit' do
+ let_it_be(:board_relation) { :board }
+ let_it_be(:board_parent_relation) { :project }
+ let_it_be(:visit_relation) { :board_project_recent_visit }
end
end
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index c8a9504d4fc..0b7c21fd0c3 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -42,4 +42,46 @@ RSpec.describe Board do
expect { project.boards.first_board.find(board_A.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ describe '#disabled_for?' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+
+ subject { board.disabled_for?(user) }
+
+ shared_examples 'board disabled_for?' do
+ context 'when current user cannot create non backlog issues' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when user can create backlog issues' do
+ before do
+ board.resource_parent.add_reporter(user)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'when block_issue_repositioning is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ context 'for group board' do
+ let_it_be(:board) { create(:board, group: group) }
+
+ it_behaves_like 'board disabled_for?'
+ end
+
+ context 'for project board' do
+ let_it_be(:board) { create(:board, project: project) }
+
+ it_behaves_like 'board disabled_for?'
+ end
+ end
end
diff --git a/spec/models/broadcast_message_spec.rb b/spec/models/broadcast_message_spec.rb
index c4d17905637..d981189c6f1 100644
--- a/spec/models/broadcast_message_spec.rb
+++ b/spec/models/broadcast_message_spec.rb
@@ -120,6 +120,12 @@ RSpec.describe BroadcastMessage do
expect(subject.call('/users/name/issues').length).to eq(1)
end
+ it 'returns message if provided a path without a preceding slash' do
+ create(:broadcast_message, target_path: "/users/*/issues", broadcast_type: broadcast_type)
+
+ expect(subject.call('users/name/issues').length).to eq(1)
+ end
+
it 'returns the message for empty target path' do
create(:broadcast_message, target_path: "", broadcast_type: broadcast_type)
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 652ea431696..d1b7125a6e6 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -125,4 +125,13 @@ RSpec.describe BulkImports::Entity, type: :model do
end
end
end
+
+ describe '#encoded_source_full_path' do
+ it 'encodes entity source full path' do
+ expected = 'foo%2Fbar'
+ entity = build(:bulk_import_entity, source_full_path: 'foo/bar')
+
+ expect(entity.encoded_source_full_path).to eq(expected)
+ end
+ end
end
diff --git a/spec/models/bulk_imports/export_spec.rb b/spec/models/bulk_imports/export_spec.rb
new file mode 100644
index 00000000000..d85b77d599b
--- /dev/null
+++ b/spec/models/bulk_imports/export_spec.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Export, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to(:group) }
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_one(:upload) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:relation) }
+ it { is_expected.to validate_presence_of(:status) }
+
+ context 'when not associated with a group or project' do
+ it 'is invalid' do
+ export = build(:bulk_import_export, group: nil, project: nil)
+
+ expect(export).not_to be_valid
+ end
+ end
+
+ context 'when associated with a group' do
+ it 'is valid' do
+ export = build(:bulk_import_export, group: build(:group), project: nil)
+
+ expect(export).to be_valid
+ end
+ end
+
+ context 'when associated with a project' do
+ it 'is valid' do
+ export = build(:bulk_import_export, group: nil, project: build(:project))
+
+ expect(export).to be_valid
+ end
+ end
+
+ context 'when relation is invalid' do
+ it 'is invalid' do
+ export = build(:bulk_import_export, relation: 'unsupported')
+
+ expect(export).not_to be_valid
+ expect(export.errors).to include(:relation)
+ end
+ end
+ end
+
+ describe '#portable' do
+ context 'when associated with project' do
+ it 'returns project' do
+ export = create(:bulk_import_export, project: create(:project), group: nil)
+
+ expect(export.portable).to be_instance_of(Project)
+ end
+ end
+
+ context 'when associated with group' do
+ it 'returns group' do
+ export = create(:bulk_import_export)
+
+ expect(export.portable).to be_instance_of(Group)
+ end
+ end
+ end
+
+ describe '#config' do
+ context 'when associated with project' do
+ it 'returns project config' do
+ export = create(:bulk_import_export, project: create(:project), group: nil)
+
+ expect(export.config).to be_instance_of(BulkImports::FileTransfer::ProjectConfig)
+ end
+ end
+
+ context 'when associated with group' do
+ it 'returns group config' do
+ export = create(:bulk_import_export)
+
+ expect(export.config).to be_instance_of(BulkImports::FileTransfer::GroupConfig)
+ end
+ end
+ end
+end
diff --git a/spec/models/bulk_imports/export_upload_spec.rb b/spec/models/bulk_imports/export_upload_spec.rb
new file mode 100644
index 00000000000..641fa4a1b6c
--- /dev/null
+++ b/spec/models/bulk_imports/export_upload_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ExportUpload do
+ subject { described_class.new(export: create(:bulk_import_export)) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:export) }
+ end
+
+ it 'stores export file' do
+ method = 'export_file'
+ filename = 'labels.ndjson.gz'
+
+ subject.public_send("#{method}=", fixture_file_upload("spec/fixtures/bulk_imports/#{filename}"))
+ subject.save!
+
+ url = "/uploads/-/system/bulk_imports/export_upload/export_file/#{subject.id}/#{filename}"
+
+ expect(subject.public_send(method).url).to eq(url)
+ end
+end
diff --git a/spec/models/bulk_imports/file_transfer/group_config_spec.rb b/spec/models/bulk_imports/file_transfer/group_config_spec.rb
new file mode 100644
index 00000000000..21da71de3c7
--- /dev/null
+++ b/spec/models/bulk_imports/file_transfer/group_config_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileTransfer::GroupConfig do
+ let_it_be(:exportable) { create(:group) }
+ let_it_be(:hex) { '123' }
+
+ before do
+ allow(SecureRandom).to receive(:hex).and_return(hex)
+ end
+
+ subject { described_class.new(exportable) }
+
+ describe '#exportable_tree' do
+ it 'returns exportable tree' do
+ expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder|
+ expect(finder).to receive(:find_root).with(:group).and_call_original
+ end
+
+ expect(subject.portable_tree).not_to be_empty
+ end
+ end
+
+ describe '#export_path' do
+ it 'returns correct export path' do
+ expect(::Gitlab::ImportExport).to receive(:storage_path).and_return('storage_path')
+
+ expect(subject.export_path).to eq("storage_path/#{exportable.full_path}/#{hex}")
+ end
+ end
+
+ describe '#exportable_relations' do
+ it 'returns a list of top level exportable relations' do
+ expect(subject.portable_relations).to include('milestones', 'badges', 'boards', 'labels')
+ end
+ end
+end
diff --git a/spec/models/bulk_imports/file_transfer/project_config_spec.rb b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
new file mode 100644
index 00000000000..021f96ac2a3
--- /dev/null
+++ b/spec/models/bulk_imports/file_transfer/project_config_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileTransfer::ProjectConfig do
+ let_it_be(:exportable) { create(:project) }
+ let_it_be(:hex) { '123' }
+
+ before do
+ allow(SecureRandom).to receive(:hex).and_return(hex)
+ end
+
+ subject { described_class.new(exportable) }
+
+ describe '#exportable_tree' do
+ it 'returns exportable tree' do
+ expect_next_instance_of(::Gitlab::ImportExport::AttributesFinder) do |finder|
+ expect(finder).to receive(:find_root).with(:project).and_call_original
+ end
+
+ expect(subject.portable_tree).not_to be_empty
+ end
+ end
+
+ describe '#export_path' do
+ it 'returns correct export path' do
+ expect(::Gitlab::ImportExport).to receive(:storage_path).and_return('storage_path')
+
+ expect(subject.export_path).to eq("storage_path/#{exportable.disk_path}/#{hex}")
+ end
+ end
+
+ describe '#exportable_relations' do
+ it 'returns a list of top level exportable relations' do
+ expect(subject.portable_relations).to include('issues', 'labels', 'milestones', 'merge_requests')
+ end
+ end
+end
diff --git a/spec/models/bulk_imports/file_transfer_spec.rb b/spec/models/bulk_imports/file_transfer_spec.rb
new file mode 100644
index 00000000000..5a2b303626c
--- /dev/null
+++ b/spec/models/bulk_imports/file_transfer_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::FileTransfer do
+ describe '.config_for' do
+ context 'when portable is group' do
+ it 'returns group config' do
+ expect(described_class.config_for(build(:group))).to be_instance_of(BulkImports::FileTransfer::GroupConfig)
+ end
+ end
+
+ context 'when portable is project' do
+ it 'returns project config' do
+ expect(described_class.config_for(build(:project))).to be_instance_of(BulkImports::FileTransfer::ProjectConfig)
+ end
+ end
+
+ context 'when portable is unsupported' do
+ it 'raises an error' do
+ expect { described_class.config_for(nil) }.to raise_error(BulkImports::FileTransfer::UnsupportedObjectType)
+ end
+ end
+ end
+end
diff --git a/spec/models/bulk_imports/stage_spec.rb b/spec/models/bulk_imports/stage_spec.rb
deleted file mode 100644
index 7765fd4c5c4..00000000000
--- a/spec/models/bulk_imports/stage_spec.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Stage do
- let(:pipelines) do
- if Gitlab.ee?
- [
- [0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
- [1, BulkImports::Groups::Pipelines::MembersPipeline],
- [1, BulkImports::Groups::Pipelines::LabelsPipeline],
- [1, BulkImports::Groups::Pipelines::MilestonesPipeline],
- [1, BulkImports::Groups::Pipelines::BadgesPipeline],
- [1, 'BulkImports::Groups::Pipelines::IterationsPipeline'.constantize],
- [2, 'BulkImports::Groups::Pipelines::EpicsPipeline'.constantize],
- [3, 'BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline'.constantize],
- [3, 'BulkImports::Groups::Pipelines::EpicEventsPipeline'.constantize],
- [4, BulkImports::Groups::Pipelines::EntityFinisher]
- ]
- else
- [
- [0, BulkImports::Groups::Pipelines::GroupPipeline],
- [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
- [1, BulkImports::Groups::Pipelines::MembersPipeline],
- [1, BulkImports::Groups::Pipelines::LabelsPipeline],
- [1, BulkImports::Groups::Pipelines::MilestonesPipeline],
- [1, BulkImports::Groups::Pipelines::BadgesPipeline],
- [2, BulkImports::Groups::Pipelines::EntityFinisher]
- ]
- end
- end
-
- describe '.pipelines' do
- it 'list all the pipelines with their stage number, ordered by stage' do
- expect(described_class.pipelines).to match_array(pipelines)
- end
- end
-
- describe '.pipeline_exists?' do
- it 'returns true when the given pipeline name exists in the pipelines list' do
- expect(described_class.pipeline_exists?(BulkImports::Groups::Pipelines::GroupPipeline)).to eq(true)
- expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::GroupPipeline')).to eq(true)
- end
-
- it 'returns false when the given pipeline name exists in the pipelines list' do
- expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
- end
- end
-end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 623e55aad21..4d77bd53158 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -6,11 +6,11 @@ RSpec.describe ChatName do
let_it_be(:chat_name) { create(:chat_name) }
subject { chat_name }
- it { is_expected.to belong_to(:service) }
+ it { is_expected.to belong_to(:integration) }
it { is_expected.to belong_to(:user) }
it { is_expected.to validate_presence_of(:user) }
- it { is_expected.to validate_presence_of(:service) }
+ it { is_expected.to validate_presence_of(:integration) }
it { is_expected.to validate_presence_of(:team_id) }
it { is_expected.to validate_presence_of(:chat_id) }
@@ -18,7 +18,7 @@ RSpec.describe ChatName do
it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:service_id, :team_id) }
it 'is removed when the project is deleted' do
- expect { subject.reload.service.project.delete }.to change { ChatName.count }.by(-1)
+ expect { subject.reload.integration.project.delete }.to change { ChatName.count }.by(-1)
expect(ChatName.where(id: subject.id)).not_to exist
end
diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb
index e343ec0e698..d00d88ae397 100644
--- a/spec/models/ci/build_dependencies_spec.rb
+++ b/spec/models/ci/build_dependencies_spec.rb
@@ -18,12 +18,8 @@ RSpec.describe Ci::BuildDependencies do
let!(:rubocop_test) { create(:ci_build, pipeline: pipeline, name: 'rubocop', stage_idx: 1, stage: 'test') }
let!(:staging) { create(:ci_build, pipeline: pipeline, name: 'staging', stage_idx: 2, stage: 'deploy') }
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: false)
- end
-
- describe '#local' do
- subject { described_class.new(job).local }
+ context 'for local dependencies' do
+ subject { described_class.new(job).all }
describe 'jobs from previous stages' do
context 'when job is in the first stage' do
@@ -52,7 +48,7 @@ RSpec.describe Ci::BuildDependencies do
project.add_developer(user)
end
- let(:retried_job) { Ci::Build.retry(rspec_test, user) }
+ let!(:retried_job) { Ci::Build.retry(rspec_test, user) }
it 'contains the retried job instead of the original one' do
is_expected.to contain_exactly(build, retried_job, rubocop_test)
@@ -150,7 +146,7 @@ RSpec.describe Ci::BuildDependencies do
end
end
- describe '#cross_pipeline' do
+ context 'for cross_pipeline dependencies' do
let!(:job) do
create(:ci_build,
pipeline: pipeline,
@@ -160,7 +156,7 @@ RSpec.describe Ci::BuildDependencies do
subject { described_class.new(job) }
- let(:cross_pipeline_deps) { subject.cross_pipeline }
+ let(:cross_pipeline_deps) { subject.all }
context 'when dependency specifications are valid' do
context 'when pipeline exists in the hierarchy' do
@@ -378,14 +374,6 @@ RSpec.describe Ci::BuildDependencies do
end
it { is_expected.to eq(false) }
-
- context 'when ci_validate_build_dependencies_override feature flag is enabled' do
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: job.project)
- end
-
- it { is_expected.to eq(true) }
- end
end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 339dffa507f..66d2f5f4ee9 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1132,7 +1132,7 @@ RSpec.describe Ci::Build do
it "executes UPDATE query" do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expect(recorded.log.select { |l| l.match?(/UPDATE.*ci_builds/) }.count).to eq(1)
+ expect(recorded.log.count { |l| l.match?(/UPDATE.*ci_builds/) }).to eq(1)
end
end
@@ -1140,7 +1140,7 @@ RSpec.describe Ci::Build do
it 'does not execute UPDATE query' do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expect(recorded.log.select { |l| l.match?(/UPDATE.*ci_builds/) }.count).to eq(0)
+ expect(recorded.log.count { |l| l.match?(/UPDATE.*ci_builds/) }).to eq(0)
end
end
end
@@ -1205,7 +1205,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
end
it 'has deployments record with created status' do
@@ -1241,7 +1241,7 @@ RSpec.describe Ci::Build do
before do
allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
- allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
end
it_behaves_like 'avoid deadlock'
@@ -3631,46 +3631,29 @@ RSpec.describe Ci::Build do
end
let!(:job) { create(:ci_build, :pending, pipeline: pipeline, stage_idx: 1, options: options) }
+ let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
- context 'when validates for dependencies is enabled' do
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: false)
- end
-
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
-
- context 'when "dependencies" keyword is not defined' do
- let(:options) { {} }
-
- it { expect(job).to have_valid_build_dependencies }
- end
-
- context 'when "dependencies" keyword is empty' do
- let(:options) { { dependencies: [] } }
+ context 'when "dependencies" keyword is not defined' do
+ let(:options) { {} }
- it { expect(job).to have_valid_build_dependencies }
- end
+ it { expect(job).to have_valid_build_dependencies }
+ end
- context 'when "dependencies" keyword is specified' do
- let(:options) { { dependencies: ['test'] } }
+ context 'when "dependencies" keyword is empty' do
+ let(:options) { { dependencies: [] } }
- it_behaves_like 'validation is active'
- end
+ it { expect(job).to have_valid_build_dependencies }
end
- context 'when validates for dependencies is disabled' do
+ context 'when "dependencies" keyword is specified' do
let(:options) { { dependencies: ['test'] } }
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: true)
- end
-
- it_behaves_like 'validation is not active'
+ it_behaves_like 'validation is active'
end
end
describe 'state transition when build fails' do
- let(:service) { ::MergeRequests::AddTodoWhenBuildFailsService.new(project, user) }
+ let(:service) { ::MergeRequests::AddTodoWhenBuildFailsService.new(project: project, current_user: user) }
before do
allow(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).and_return(service)
@@ -4679,25 +4662,30 @@ RSpec.describe Ci::Build do
end
describe '#execute_hooks' do
+ before do
+ build.clear_memoization(:build_data)
+ end
+
context 'with project hooks' do
+ let(:build_data) { double(:BuildData, dup: double(:DupedData)) }
+
before do
create(:project_hook, project: project, job_events: true)
end
- it 'execute hooks' do
- expect_any_instance_of(ProjectHook).to receive(:async_execute)
+ it 'calls project.execute_hooks(build_data, :job_hooks)' do
+ expect(::Gitlab::DataBuilder::Build)
+ .to receive(:build).with(build).and_return(build_data)
+ expect(build.project)
+ .to receive(:execute_hooks).with(build_data.dup, :job_hooks)
build.execute_hooks
end
end
- context 'without relevant project hooks' do
- before do
- create(:project_hook, project: project, job_events: false)
- end
-
- it 'does not execute a hook' do
- expect_any_instance_of(ProjectHook).not_to receive(:async_execute)
+ context 'without project hooks' do
+ it 'does not call project.execute_hooks' do
+ expect(build.project).not_to receive(:execute_hooks)
build.execute_hooks
end
@@ -4708,8 +4696,10 @@ RSpec.describe Ci::Build do
create(:service, active: true, job_events: true, project: project)
end
- it 'execute services' do
- expect_any_instance_of(Service).to receive(:async_execute)
+ it 'executes services' do
+ allow_next_found_instance_of(Integration) do |integration|
+ expect(integration).to receive(:async_execute)
+ end
build.execute_hooks
end
@@ -4720,8 +4710,10 @@ RSpec.describe Ci::Build do
create(:service, active: true, job_events: false, project: project)
end
- it 'execute services' do
- expect_any_instance_of(Service).not_to receive(:async_execute)
+ it 'does not execute services' do
+ allow_next_found_instance_of(Integration) do |integration|
+ expect(integration).not_to receive(:async_execute)
+ end
build.execute_hooks
end
diff --git a/spec/models/ci/commit_with_pipeline_spec.rb b/spec/models/ci/commit_with_pipeline_spec.rb
index 4dd288bde62..320143535e2 100644
--- a/spec/models/ci/commit_with_pipeline_spec.rb
+++ b/spec/models/ci/commit_with_pipeline_spec.rb
@@ -26,15 +26,47 @@ RSpec.describe Ci::CommitWithPipeline do
end
end
+ describe '#lazy_latest_pipeline' do
+ let(:commit_1) do
+ described_class.new(Commit.new(RepoHelpers.sample_commit, project))
+ end
+
+ let(:commit_2) do
+ described_class.new(Commit.new(RepoHelpers.another_sample_commit, project))
+ end
+
+ let!(:commits) { [commit_1, commit_2] }
+
+ it 'executes only 1 SQL query' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ # Running this first ensures we don't run one query for every
+ # commit.
+ commits.each(&:lazy_latest_pipeline)
+
+ # This forces the execution of the SQL queries necessary to load the
+ # data.
+ commits.each { |c| c.latest_pipeline.try(:id) }
+ end
+
+ expect(recorder.count).to eq(1)
+ end
+ end
+
describe '#latest_pipeline' do
let(:pipeline) { double }
shared_examples_for 'fetching latest pipeline' do |ref|
it 'returns the latest pipeline for the project' do
- expect(commit)
- .to receive(:latest_pipeline_for_project)
- .with(ref, project)
- .and_return(pipeline)
+ if ref
+ expect(commit)
+ .to receive(:latest_pipeline_for_project)
+ .with(ref, project)
+ .and_return(pipeline)
+ else
+ expect(commit)
+ .to receive(:lazy_latest_pipeline)
+ .and_return(pipeline)
+ end
expect(result).to eq(pipeline)
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index cdb123573f1..3c4769764d5 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -602,6 +602,34 @@ RSpec.describe Ci::JobArtifact do
end
end
+ context 'FastDestroyAll' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:job) { create(:ci_build, pipeline: pipeline, project: project) }
+
+ let!(:job_artifact) { create(:ci_job_artifact, :archive, job: job) }
+ let(:subjects) { pipeline.job_artifacts }
+
+ describe '.use_fast_destroy' do
+ it 'performs cascading delete with fast_destroy_all' do
+ expect(Ci::DeletedObject.count).to eq(0)
+ expect(subjects.count).to be > 0
+
+ expect { pipeline.destroy! }.not_to raise_error
+
+ expect(subjects.count).to eq(0)
+ expect(Ci::DeletedObject.count).to be > 0
+ end
+
+ it 'updates project statistics' do
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(project, :build_artifacts_size, -job_artifact.file.size)
+
+ pipeline.destroy!
+ end
+ end
+ end
+
def file_type_limit_failure_message(type, limit_name)
<<~MSG
The artifact type `#{type}` is missing its counterpart plan limit which is expected to be named `#{limit_name}`.
diff --git a/spec/models/ci/pipeline_artifact_spec.rb b/spec/models/ci/pipeline_artifact_spec.rb
index 3fe09f05cab..f65483d2290 100644
--- a/spec/models/ci/pipeline_artifact_spec.rb
+++ b/spec/models/ci/pipeline_artifact_spec.rb
@@ -50,6 +50,30 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
end
+ describe 'scopes' do
+ describe '.unlocked' do
+ subject(:pipeline_artifacts) { described_class.unlocked }
+
+ context 'when pipeline is locked' do
+ it 'returns an empty collection' do
+ expect(pipeline_artifacts).to be_empty
+ end
+ end
+
+ context 'when pipeline is unlocked' do
+ before do
+ create(:ci_pipeline_artifact, :with_coverage_report)
+ end
+
+ it 'returns unlocked artifacts' do
+ codequality_report = create(:ci_pipeline_artifact, :with_codequality_mr_diff_report, :unlocked)
+
+ expect(pipeline_artifacts).to eq([codequality_report])
+ end
+ end
+ end
+ end
+
describe 'file is being stored' do
subject { create(:ci_pipeline_artifact, :with_coverage_report) }
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 3e5fbbfe823..d5560edbbfd 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -126,16 +126,6 @@ RSpec.describe Ci::PipelineSchedule do
end
end
- context 'when pipeline schedule runs every minute' do
- let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
-
- it "updates next_run_at to the sidekiq worker's execution time" do
- travel_to(Time.zone.parse("2019-06-01 12:18:00+0000")) do
- expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
- end
- end
- end
-
context 'when there are two different pipeline schedules in different time zones' do
let(:pipeline_schedule_1) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'Eastern Time (US & Canada)') }
let(:pipeline_schedule_2) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
@@ -144,24 +134,6 @@ RSpec.describe Ci::PipelineSchedule do
expect(pipeline_schedule_1.next_run_at).not_to eq(pipeline_schedule_2.next_run_at)
end
end
-
- context 'when there are two different pipeline schedules in the same time zones' do
- let(:pipeline_schedule_1) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
- let(:pipeline_schedule_2) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
-
- it 'sets the sames next_run_at' do
- expect(pipeline_schedule_1.next_run_at).to eq(pipeline_schedule_2.next_run_at)
- end
- end
-
- context 'when updates cron of exsisted pipeline schedule' do
- let(:new_cron) { '0 0 1 1 *' }
-
- it 'updates next_run_at automatically' do
- expect { pipeline_schedule.update!(cron: new_cron) }
- .to change { pipeline_schedule.next_run_at }
- end
- end
end
describe '#schedule_next_run!' do
@@ -178,7 +150,7 @@ RSpec.describe Ci::PipelineSchedule do
context 'when record is invalid' do
before do
- allow(pipeline_schedule).to receive(:save!) { raise ActiveRecord::RecordInvalid.new(pipeline_schedule) }
+ allow(pipeline_schedule).to receive(:save!) { raise ActiveRecord::RecordInvalid, pipeline_schedule }
end
it 'nullifies the next run at' do
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index b7f5811e945..b9457055a18 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -68,14 +68,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#downloadable_artifacts' do
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:downloadable_artifact) { create(:ci_job_artifact, :codequality, job: build) }
+ let_it_be(:expired_artifact) { create(:ci_job_artifact, :junit, :expired, job: build) }
+ let_it_be(:undownloadable_artifact) { create(:ci_job_artifact, :trace, job: build) }
+
+ context 'when artifacts are locked' do
+ it 'returns downloadable artifacts including locked artifacts' do
+ expect(pipeline.downloadable_artifacts).to contain_exactly(downloadable_artifact, expired_artifact)
+ end
+ end
- it 'returns downloadable artifacts that have not expired' do
- downloadable_artifact = create(:ci_job_artifact, :codequality, job: build)
- _expired_artifact = create(:ci_job_artifact, :junit, :expired, job: build)
- _undownloadable_artifact = create(:ci_job_artifact, :trace, job: build)
+ context 'when artifacts are unlocked' do
+ it 'returns only downloadable artifacts not expired' do
+ expired_artifact.job.pipeline.unlocked!
- expect(pipeline.downloadable_artifacts).to contain_exactly(downloadable_artifact)
+ expect(pipeline.reload.downloadable_artifacts).to contain_exactly(downloadable_artifact)
+ end
end
end
end
@@ -1939,6 +1948,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(pipeline.modified_paths).to match(merge_request.modified_paths)
end
end
+
+ context 'when source is an external pull request' do
+ let(:pipeline) do
+ create(:ci_pipeline, source: :external_pull_request_event, external_pull_request: external_pull_request)
+ end
+
+ let(:external_pull_request) do
+ create(:external_pull_request, project: project, target_sha: '281d3a7', source_sha: '498214d')
+ end
+
+ it 'returns external pull request modified paths' do
+ expect(pipeline.modified_paths).to match(external_pull_request.modified_paths)
+ end
+
+ context 'when the FF ci_modified_paths_of_external_prs is disabled' do
+ before do
+ stub_feature_flags(ci_modified_paths_of_external_prs: false)
+ end
+
+ it 'returns nil' do
+ expect(pipeline.modified_paths).to be_nil
+ end
+ end
+ end
end
describe '#all_worktree_paths' do
@@ -3201,18 +3234,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(pipeline.messages.map(&:content)).to contain_exactly('The error message')
end
-
- context 'when feature flag ci_store_pipeline_messages is disabled' do
- before do
- stub_feature_flags(ci_store_pipeline_messages: false)
- end
-
- it 'does not add pipeline error message' do
- pipeline.add_error_message('The error message')
-
- expect(pipeline.messages).to be_empty
- end
- end
end
describe '#has_yaml_errors?' do
@@ -4303,26 +4324,80 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- describe 'reset_ancestor_bridges!' do
- let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+ describe '#reset_source_bridge!' do
+ let(:pipeline) { create(:ci_pipeline, :created, project: project) }
+
+ subject(:reset_bridge) { pipeline.reset_source_bridge!(project.owner) }
+
+ # This whole block will be removed by https://gitlab.com/gitlab-org/gitlab/-/issues/329194
+ # It contains some duplicate checks.
+ context 'when the FF ci_reset_bridge_with_subsequent_jobs is disabled' do
+ before do
+ stub_feature_flags(ci_reset_bridge_with_subsequent_jobs: false)
+ end
+
+ context 'when the pipeline is a child pipeline and the bridge is depended' do
+ let!(:parent_pipeline) { create(:ci_pipeline) }
+ let!(:bridge) { create_bridge(parent_pipeline, pipeline, true) }
+
+ it 'marks source bridge as pending' do
+ reset_bridge
+
+ expect(bridge.reload).to be_pending
+ end
+
+ context 'when the parent pipeline has subsequent jobs after the bridge' do
+ let!(:after_bridge_job) { create(:ci_build, :skipped, pipeline: parent_pipeline, stage_idx: bridge.stage_idx + 1) }
+
+ it 'does not touch subsequent jobs of the bridge' do
+ reset_bridge
+
+ expect(after_bridge_job.reload).to be_skipped
+ end
+ end
+
+ context 'when the parent pipeline has a dependent upstream pipeline' do
+ let!(:upstream_bridge) do
+ create_bridge(create(:ci_pipeline, project: create(:project)), parent_pipeline, true)
+ end
+
+ it 'marks all source bridges as pending' do
+ reset_bridge
+
+ expect(bridge.reload).to be_pending
+ expect(upstream_bridge.reload).to be_pending
+ end
+ end
+ end
+ end
context 'when the pipeline is a child pipeline and the bridge is depended' do
let!(:parent_pipeline) { create(:ci_pipeline) }
let!(:bridge) { create_bridge(parent_pipeline, pipeline, true) }
it 'marks source bridge as pending' do
- pipeline.reset_ancestor_bridges!
+ reset_bridge
expect(bridge.reload).to be_pending
end
+ context 'when the parent pipeline has subsequent jobs after the bridge' do
+ let!(:after_bridge_job) { create(:ci_build, :skipped, pipeline: parent_pipeline, stage_idx: bridge.stage_idx + 1) }
+
+ it 'marks subsequent jobs of the bridge as processable' do
+ reset_bridge
+
+ expect(after_bridge_job.reload).to be_created
+ end
+ end
+
context 'when the parent pipeline has a dependent upstream pipeline' do
let!(:upstream_bridge) do
create_bridge(create(:ci_pipeline, project: create(:project)), parent_pipeline, true)
end
it 'marks all source bridges as pending' do
- pipeline.reset_ancestor_bridges!
+ reset_bridge
expect(bridge.reload).to be_pending
expect(upstream_bridge.reload).to be_pending
@@ -4335,7 +4410,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let!(:bridge) { create_bridge(parent_pipeline, pipeline, false) }
it 'does not touch source bridge' do
- pipeline.reset_ancestor_bridges!
+ reset_bridge
expect(bridge.reload).to be_success
end
@@ -4346,7 +4421,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
it 'does not touch any source bridge' do
- pipeline.reset_ancestor_bridges!
+ reset_bridge
expect(bridge.reload).to be_success
expect(upstream_bridge.reload).to be_success
diff --git a/spec/models/ci/runner_namespace_spec.rb b/spec/models/ci/runner_namespace_spec.rb
new file mode 100644
index 00000000000..41d805adb9f
--- /dev/null
+++ b/spec/models/ci/runner_namespace_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RunnerNamespace do
+ it_behaves_like 'includes Limitable concern' do
+ subject { build(:ci_runner_namespace, group: create(:group, :nested), runner: create(:ci_runner, :group)) }
+ end
+end
diff --git a/spec/models/ci/runner_project_spec.rb b/spec/models/ci/runner_project_spec.rb
new file mode 100644
index 00000000000..13369dba2cf
--- /dev/null
+++ b/spec/models/ci/runner_project_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RunnerProject do
+ it_behaves_like 'includes Limitable concern' do
+ subject { build(:ci_runner_project, project: create(:project), runner: create(:ci_runner, :project)) }
+ end
+end
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index e46d9189c86..5e0fcb4882f 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -286,6 +286,18 @@ RSpec.describe Ci::Stage, :models do
end
end
+ context 'when stage has statuses with nil idx' do
+ before do
+ create(:ci_build, :running, stage_id: stage.id, stage_idx: nil)
+ create(:ci_build, :running, stage_id: stage.id, stage_idx: 10)
+ create(:ci_build, :running, stage_id: stage.id, stage_idx: nil)
+ end
+
+ it 'sets index to a non-empty value' do
+ expect { stage.update_legacy_status }.to change { stage.reload.position }.from(nil).to(10)
+ end
+ end
+
context 'when stage does not have statuses' do
it 'fallbacks to zero' do
expect(stage.reload.position).to be_nil
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
index a85a72eba0b..ea7a55480a8 100644
--- a/spec/models/clusters/agent_spec.rb
+++ b/spec/models/clusters/agent_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Clusters::Agent do
it { is_expected.to belong_to(:created_by_user).class_name('User').optional }
it { is_expected.to belong_to(:project).class_name('::Project') }
it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken') }
+ it { is_expected.to have_many(:last_used_agent_tokens).class_name('Clusters::AgentToken') }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_length_of(:name).is_at_most(63) }
diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb
index 680b351d24a..bde4798abec 100644
--- a/spec/models/clusters/agent_token_spec.rb
+++ b/spec/models/clusters/agent_token_spec.rb
@@ -9,6 +9,19 @@ RSpec.describe Clusters::AgentToken do
it { is_expected.to validate_length_of(:name).is_at_most(255) }
it { is_expected.to validate_presence_of(:name) }
+ describe 'scopes' do
+ describe '.order_last_used_at_desc' do
+ let_it_be(:token_1) { create(:cluster_agent_token, last_used_at: 7.days.ago) }
+ let_it_be(:token_2) { create(:cluster_agent_token, last_used_at: nil) }
+ let_it_be(:token_3) { create(:cluster_agent_token, last_used_at: 2.days.ago) }
+
+ it 'sorts by last_used_at descending, with null values at last' do
+ expect(described_class.order_last_used_at_desc)
+ .to eq([token_3, token_1, token_2])
+ end
+ end
+ end
+
describe '#token' do
it 'is generated on save' do
agent_token = build(:cluster_agent_token, token_encrypted: nil)
diff --git a/spec/models/clusters/applications/elastic_stack_spec.rb b/spec/models/clusters/applications/elastic_stack_spec.rb
index 74cacd486b0..af2802d5e47 100644
--- a/spec/models/clusters/applications/elastic_stack_spec.rb
+++ b/spec/models/clusters/applications/elastic_stack_spec.rb
@@ -10,6 +10,41 @@ RSpec.describe Clusters::Applications::ElasticStack do
include_examples 'cluster application version specs', :clusters_applications_elastic_stack
include_examples 'cluster application helm specs', :clusters_applications_elastic_stack
+ describe 'cluster.integration_elastic_stack state synchronization' do
+ let!(:application) { create(:clusters_applications_elastic_stack) }
+ let(:cluster) { application.cluster }
+ let(:integration) { cluster.integration_elastic_stack }
+
+ describe 'after_destroy' do
+ it 'disables the corresponding integration' do
+ application.destroy!
+
+ expect(integration).not_to be_enabled
+ end
+ end
+
+ describe 'on install' do
+ it 'enables the corresponding integration' do
+ application.make_scheduled!
+ application.make_installing!
+ application.make_installed!
+
+ expect(integration).to be_enabled
+ end
+ end
+
+ describe 'on uninstall' do
+ it 'disables the corresponding integration' do
+ application.make_scheduled!
+ application.make_installing!
+ application.make_installed!
+ application.make_externally_uninstalled!
+
+ expect(integration).not_to be_enabled
+ end
+ end
+ end
+
describe '#install_command' do
let!(:elastic_stack) { create(:clusters_applications_elastic_stack) }
@@ -138,78 +173,5 @@ RSpec.describe Clusters::Applications::ElasticStack do
end
end
- describe '#elasticsearch_client' do
- context 'cluster is nil' do
- it 'returns nil' do
- expect(subject.cluster).to be_nil
- expect(subject.elasticsearch_client).to be_nil
- end
- end
-
- context "cluster doesn't have kubeclient" do
- let(:cluster) { create(:cluster) }
-
- subject { create(:clusters_applications_elastic_stack, cluster: cluster) }
-
- it 'returns nil' do
- expect(subject.elasticsearch_client).to be_nil
- end
- end
-
- context 'cluster has kubeclient' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
- let(:kube_client) { subject.cluster.kubeclient.core_client }
-
- subject { create(:clusters_applications_elastic_stack, cluster: cluster) }
-
- before do
- subject.cluster.platform_kubernetes.namespace = 'a-namespace'
- stub_kubeclient_discover(cluster.platform_kubernetes.api_url)
-
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- cluster_project: cluster.cluster_project,
- project: cluster.cluster_project.project)
- end
-
- it 'creates proxy elasticsearch_client' do
- expect(subject.elasticsearch_client).to be_instance_of(Elasticsearch::Transport::Client)
- end
-
- it 'copies proxy_url, options and headers from kube client to elasticsearch_client' do
- expect(Elasticsearch::Client)
- .to(receive(:new))
- .with(url: a_valid_url)
- .and_call_original
-
- client = subject.elasticsearch_client
- faraday_connection = client.transport.connections.first.connection
-
- expect(faraday_connection.headers["Authorization"]).to eq(kube_client.headers[:Authorization])
- expect(faraday_connection.ssl.cert_store).to be_instance_of(OpenSSL::X509::Store)
- expect(faraday_connection.ssl.verify).to eq(1)
- expect(faraday_connection.options.timeout).to be_nil
- end
-
- context 'when cluster is not reachable' do
- before do
- allow(kube_client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
- end
-
- it 'returns nil' do
- expect(subject.elasticsearch_client).to be_nil
- end
- end
-
- context 'when timeout is provided' do
- it 'sets timeout in elasticsearch_client' do
- client = subject.elasticsearch_client(timeout: 123)
- faraday_connection = client.transport.connections.first.connection
-
- expect(faraday_connection.options.timeout).to eq(123)
- end
- end
- end
- end
+ it_behaves_like 'cluster-based #elasticsearch_client', :clusters_applications_elastic_stack
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 5a0ccabd467..549a273e2d7 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -13,16 +13,13 @@ RSpec.describe Clusters::Applications::Prometheus do
include_examples 'cluster application initial status specs'
describe 'after_destroy' do
- context 'cluster type is project' do
- let(:cluster) { create(:cluster, :with_installed_helm) }
- let(:application) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
+ let(:cluster) { create(:cluster, :with_installed_helm) }
+ let(:application) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
- it 'deactivates prometheus_service after destroy' do
- expect(Clusters::Applications::DeactivateServiceWorker)
- .to receive(:perform_async).with(cluster.id, 'prometheus')
+ it 'disables the corresponding integration' do
+ application.destroy!
- application.destroy!
- end
+ expect(cluster.integration_prometheus).not_to be_enabled
end
end
@@ -31,11 +28,10 @@ RSpec.describe Clusters::Applications::Prometheus do
let(:cluster) { create(:cluster, :with_installed_helm) }
let(:application) { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
- it 'schedules post installation job' do
- expect(Clusters::Applications::ActivateServiceWorker)
- .to receive(:perform_async).with(cluster.id, 'prometheus')
-
+ it 'enables the corresponding integration' do
application.make_installed
+
+ expect(cluster.integration_prometheus).to be_enabled
end
end
@@ -44,11 +40,10 @@ RSpec.describe Clusters::Applications::Prometheus do
let(:cluster) { create(:cluster, :with_installed_helm) }
let(:application) { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
- it 'schedules post installation job' do
- expect(Clusters::Applications::ActivateServiceWorker)
- .to receive(:perform_async).with(cluster.id, 'prometheus')
-
+ it 'enables the corresponding integration' do
application.make_externally_installed!
+
+ expect(cluster.integration_prometheus).to be_enabled
end
end
@@ -65,6 +60,26 @@ RSpec.describe Clusters::Applications::Prometheus do
end
end
+ describe '#managed_prometheus?' do
+ subject { prometheus.managed_prometheus? }
+
+ let(:prometheus) { build(:clusters_applications_prometheus) }
+
+ it { is_expected.to be_truthy }
+
+ context 'externally installed' do
+ let(:prometheus) { build(:clusters_applications_prometheus, :externally_installed) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'uninstalled' do
+ let(:prometheus) { build(:clusters_applications_prometheus, :uninstalled) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
describe '#can_uninstall?' do
let(:prometheus) { create(:clusters_applications_prometheus) }
@@ -318,42 +333,10 @@ RSpec.describe Clusters::Applications::Prometheus do
describe 'alert manager token' do
subject { create(:clusters_applications_prometheus) }
- context 'when not set' do
- it 'is empty by default' do
- expect(subject.alert_manager_token).to be_nil
- expect(subject.encrypted_alert_manager_token).to be_nil
- expect(subject.encrypted_alert_manager_token_iv).to be_nil
- end
-
- describe '#generate_alert_manager_token!' do
- it 'generates a token' do
- subject.generate_alert_manager_token!
-
- expect(subject.alert_manager_token).to match(/\A\h{32}\z/)
- end
- end
- end
-
- context 'when set' do
- let(:token) { SecureRandom.hex }
-
- before do
- subject.update!(alert_manager_token: token)
- end
-
- it 'reads the token' do
- expect(subject.alert_manager_token).to eq(token)
- expect(subject.encrypted_alert_manager_token).not_to be_nil
- expect(subject.encrypted_alert_manager_token_iv).not_to be_nil
- end
-
- describe '#generate_alert_manager_token!' do
- it 'does not re-generate the token' do
- subject.generate_alert_manager_token!
-
- expect(subject.alert_manager_token).to eq(token)
- end
- end
+ it 'is autogenerated on creation' do
+ expect(subject.alert_manager_token).to match(/\A\h{32}\z/)
+ expect(subject.encrypted_alert_manager_token).not_to be_nil
+ expect(subject.encrypted_alert_manager_token_iv).not_to be_nil
end
end
end
diff --git a/spec/models/clusters/integrations/elastic_stack_spec.rb b/spec/models/clusters/integrations/elastic_stack_spec.rb
new file mode 100644
index 00000000000..be4d59b52a2
--- /dev/null
+++ b/spec/models/clusters/integrations/elastic_stack_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Integrations::ElasticStack do
+ include KubernetesHelpers
+ include StubRequests
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster') }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:cluster) }
+ it { is_expected.not_to allow_value(nil).for(:enabled) }
+ end
+
+ it_behaves_like 'cluster-based #elasticsearch_client', :clusters_integrations_elastic_stack
+end
diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb
index a7be1673ce2..680786189ad 100644
--- a/spec/models/clusters/integrations/prometheus_spec.rb
+++ b/spec/models/clusters/integrations/prometheus_spec.rb
@@ -15,6 +15,62 @@ RSpec.describe Clusters::Integrations::Prometheus do
it { is_expected.not_to allow_value(nil).for(:enabled) }
end
+ describe 'after_destroy' do
+ subject(:integration) { create(:clusters_integrations_prometheus, cluster: cluster, enabled: true) }
+
+ let(:cluster) { create(:cluster, :with_installed_helm) }
+
+ it 'deactivates prometheus_service' do
+ expect(Clusters::Applications::DeactivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.destroy!
+ end
+ end
+
+ describe 'after_save' do
+ subject(:integration) { create(:clusters_integrations_prometheus, cluster: cluster, enabled: enabled) }
+
+ let(:cluster) { create(:cluster, :with_installed_helm) }
+ let(:enabled) { true }
+
+ context 'when no change to enabled status' do
+ it 'does not touch project services' do
+ integration # ensure integration exists before we set the expectations
+
+ expect(Clusters::Applications::DeactivateServiceWorker)
+ .not_to receive(:perform_async)
+
+ expect(Clusters::Applications::ActivateServiceWorker)
+ .not_to receive(:perform_async)
+
+ integration.update!(enabled: enabled)
+ end
+ end
+
+ context 'when enabling' do
+ let(:enabled) { false }
+
+ it 'deactivates prometheus_service' do
+ expect(Clusters::Applications::ActivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.update!(enabled: true)
+ end
+ end
+
+ context 'when disabling' do
+ let(:enabled) { true }
+
+ it 'activates prometheus_service' do
+ expect(Clusters::Applications::DeactivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ integration.update!(enabled: false)
+ end
+ end
+ end
+
describe '#prometheus_client' do
include_examples '#prometheus_client shared' do
let(:factory) { :clusters_integrations_prometheus }
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index e64dee2d26f..feb2f3630c1 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -259,6 +259,40 @@ RSpec.describe CommitStatus do
end
end
+ describe '#queued_duration' do
+ subject { commit_status.queued_duration }
+
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
+ context 'when created, then enqueued, then started' do
+ before do
+ commit_status.queued_at = 30.seconds.ago
+ commit_status.started_at = 25.seconds.ago
+ end
+
+ it { is_expected.to eq(5.0) }
+ end
+
+ context 'when created but not yet enqueued' do
+ before do
+ commit_status.queued_at = nil
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when enqueued, but not started' do
+ before do
+ commit_status.queued_at = Time.current - 1.minute
+ commit_status.started_at = nil
+ end
+
+ it { is_expected.to eq(1.minute) }
+ end
+ end
+
describe '.latest' do
subject { described_class.latest.order(:id) }
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index e40b0cf11ff..ca6df506ee8 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe BulkInsertSafe do
before(:all) do
ActiveRecord::Schema.define do
+ create_table :bulk_insert_parent_items, force: true do |t|
+ t.string :name, null: false
+ end
+
create_table :bulk_insert_items, force: true do |t|
t.string :name, null: true
t.integer :enum_value, null: false
@@ -12,6 +16,7 @@ RSpec.describe BulkInsertSafe do
t.string :encrypted_secret_value_iv, null: false
t.binary :sha_value, null: false, limit: 20
t.jsonb :jsonb_value, null: false
+ t.belongs_to :bulk_insert_parent_item, foreign_key: true, null: true
t.index :name, unique: true
end
@@ -21,9 +26,23 @@ RSpec.describe BulkInsertSafe do
after(:all) do
ActiveRecord::Schema.define do
drop_table :bulk_insert_items, force: true
+ drop_table :bulk_insert_parent_items, force: true
end
end
+ BulkInsertParentItem = Class.new(ActiveRecord::Base) do
+ self.table_name = :bulk_insert_parent_items
+ self.inheritance_column = :_type_disabled
+
+ def self.name
+ table_name.singularize.camelcase
+ end
+ end
+
+ let_it_be(:bulk_insert_parent_item) do
+ BulkInsertParentItem.create!(name: 'parent')
+ end
+
let_it_be(:bulk_insert_item_class) do
Class.new(ActiveRecord::Base) do
self.table_name = 'bulk_insert_items'
@@ -33,6 +52,8 @@ RSpec.describe BulkInsertSafe do
validates :name, :enum_value, :secret_value, :sha_value, :jsonb_value, presence: true
+ belongs_to :bulk_insert_parent_item
+
sha_attribute :sha_value
enum enum_value: { case_1: 1 }
@@ -51,8 +72,8 @@ RSpec.describe BulkInsertSafe do
'BulkInsertItem'
end
- def self.valid_list(count)
- Array.new(count) { |n| new(name: "item-#{n}", secret_value: 'my-secret') }
+ def self.valid_list(count, bulk_insert_parent_item: nil)
+ Array.new(count) { |n| new(name: "item-#{n}", secret_value: 'my-secret', bulk_insert_parent_item: bulk_insert_parent_item) }
end
def self.invalid_list(count)
@@ -117,6 +138,14 @@ RSpec.describe BulkInsertSafe do
bulk_insert_item_class.bulk_insert!(items, batch_size: 5)
end
+ it 'inserts items with belongs_to association' do
+ items = bulk_insert_item_class.valid_list(10, bulk_insert_parent_item: bulk_insert_parent_item)
+
+ bulk_insert_item_class.bulk_insert!(items, batch_size: 5)
+
+ expect(bulk_insert_item_class.last(items.size).map(&:bulk_insert_parent_item)).to eq([bulk_insert_parent_item] * 10)
+ end
+
it 'items can be properly fetched from database' do
items = bulk_insert_item_class.valid_list(10)
@@ -129,8 +158,7 @@ RSpec.describe BulkInsertSafe do
it 'rolls back the transaction when any item is invalid' do
# second batch is bad
- all_items = bulk_insert_item_class.valid_list(10) +
- bulk_insert_item_class.invalid_list(10)
+ all_items = bulk_insert_item_class.valid_list(10) + bulk_insert_item_class.invalid_list(10)
expect do
bulk_insert_item_class.bulk_insert!(all_items, batch_size: 2) rescue nil
diff --git a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
index ddff9ce32b4..02cd8557231 100644
--- a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
+++ b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
@@ -142,7 +142,7 @@ RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
end
it 'does not allow the local value to be saved' do
- subgroup_settings.delayed_project_removal = nil
+ subgroup_settings.delayed_project_removal = false
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
@@ -164,6 +164,19 @@ RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
end
end
+ describe '#delayed_project_removal=' do
+ before do
+ subgroup_settings.update!(delayed_project_removal: nil)
+ group_settings.update!(delayed_project_removal: true)
+ end
+
+ it 'does not save the value locally when it matches the cascaded value' do
+ subgroup_settings.update!(delayed_project_removal: true)
+
+ expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(nil)
+ end
+ end
+
describe '#delayed_project_removal_locked?' do
shared_examples 'not locked' do
it 'is not locked by an ancestor' do
@@ -189,6 +202,20 @@ RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
it_behaves_like 'not locked'
end
+ context 'when attribute is locked by self' do
+ before do
+ subgroup_settings.update!(lock_delayed_project_removal: true)
+ end
+
+ it 'is not locked by default' do
+ expect(subgroup_settings.delayed_project_removal_locked?).to eq(false)
+ end
+
+ it 'is locked when including self' do
+ expect(subgroup_settings.delayed_project_removal_locked?(include_self: true)).to eq(true)
+ end
+ end
+
context 'when parent does not lock the attribute' do
it_behaves_like 'not locked'
end
@@ -277,6 +304,13 @@ RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
expect { subgroup_settings.save! }
.to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be nil when locking the attribute/)
end
+
+ it 'copies the cascaded value when locking the attribute if the local value is nil', :aggregate_failures do
+ subgroup_settings.delayed_project_removal = nil
+ subgroup_settings.lock_delayed_project_removal = true
+
+ expect(subgroup_settings.read_attribute(:delayed_project_removal)).to eq(false)
+ end
end
context 'when application settings locks the attribute' do
diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb
index e6dbf403b63..00e28e19bd5 100644
--- a/spec/models/concerns/chronic_duration_attribute_spec.rb
+++ b/spec/models/concerns/chronic_duration_attribute_spec.rb
@@ -56,8 +56,7 @@ RSpec.shared_examples 'ChronicDurationAttribute writer' do
subject.send("#{virtual_field}=", '-10m')
expect(subject.valid?).to be_falsey
- expect(subject.errors&.messages)
- .to include(base: ['Maximum job timeout has a value which could not be accepted'])
+ expect(subject.errors.added?(:base, 'Maximum job timeout has a value which could not be accepted')).to be true
end
end
diff --git a/spec/models/concerns/ci/maskable_spec.rb b/spec/models/concerns/ci/maskable_spec.rb
index 840a08b6060..2b13fc21fe8 100644
--- a/spec/models/concerns/ci/maskable_spec.rb
+++ b/spec/models/concerns/ci/maskable_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Ci::Maskable do
end
it 'matches valid strings' do
- expect(subject.match?('Hello+World_123/@:-.')).to eq(true)
+ expect(subject.match?('Hello+World_123/@:-~.')).to eq(true)
end
end
diff --git a/spec/models/concerns/cron_schedulable_spec.rb b/spec/models/concerns/cron_schedulable_spec.rb
new file mode 100644
index 00000000000..39c3d5e55d3
--- /dev/null
+++ b/spec/models/concerns/cron_schedulable_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CronSchedulable do
+ let(:ideal_next_run_at) { schedule.send(:ideal_next_run_from, Time.zone.now) }
+ let(:cron_worker_next_run_at) { schedule.send(:cron_worker_next_run_from, Time.zone.now) }
+
+ context 'for ci_pipeline_schedule' do
+ let(:schedule) { create(:ci_pipeline_schedule, :every_minute) }
+ let(:schedule_1) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
+ let(:schedule_2) { create(:ci_pipeline_schedule, :weekly, cron_timezone: 'UTC') }
+ let(:new_cron) { '0 0 1 1 *' }
+
+ it_behaves_like 'handles set_next_run_at'
+ end
+end
diff --git a/spec/models/concerns/has_integrations_spec.rb b/spec/models/concerns/has_integrations_spec.rb
new file mode 100644
index 00000000000..6e55a1c8b01
--- /dev/null
+++ b/spec/models/concerns/has_integrations_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe HasIntegrations do
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
+ let_it_be(:instance_integration) { create(:jira_service, :instance) }
+
+ before do
+ create(:jira_service, project: project_1, inherit_from_id: instance_integration.id)
+ create(:jira_service, project: project_2, inherit_from_id: nil)
+ create(:jira_service, group: create(:group), project: nil, inherit_from_id: nil)
+ create(:jira_service, project: project_3, inherit_from_id: nil)
+ create(:slack_service, project: project_4, inherit_from_id: nil)
+ end
+
+ describe '.with_custom_integration_for' do
+ it 'returns projects with custom integrations' do
+ # We use pagination to verify that the group is excluded from the query
+ expect(Project.with_custom_integration_for(instance_integration, 0, 2)).to contain_exactly(project_2, project_3)
+ expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2, project_3)
+ end
+ end
+
+ describe '.without_integration' do
+ it 'returns projects without integration' do
+ expect(Project.without_integration(instance_integration)).to contain_exactly(project_4)
+ end
+ end
+end
diff --git a/spec/models/concerns/has_timelogs_report_spec.rb b/spec/models/concerns/has_timelogs_report_spec.rb
index f694fc350ee..f0dca47fae1 100644
--- a/spec/models/concerns/has_timelogs_report_spec.rb
+++ b/spec/models/concerns/has_timelogs_report_spec.rb
@@ -3,16 +3,20 @@
require 'spec_helper'
RSpec.describe HasTimelogsReport do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:group) { create(:group) }
- let(:issue) { create(:issue, project: create(:project, :public, group: group)) }
+ let(:project) { create(:project, :public, group: group) }
+ let(:issue1) { create(:issue, project: project) }
+ let(:merge_request1) { create(:merge_request, source_project: project) }
describe '#timelogs' do
- let!(:timelog1) { create_timelog(15.days.ago) }
- let!(:timelog2) { create_timelog(10.days.ago) }
- let!(:timelog3) { create_timelog(5.days.ago) }
- let(:start_time) { 20.days.ago }
- let(:end_time) { 8.days.ago }
+ let_it_be(:start_time) { 20.days.ago }
+ let_it_be(:end_time) { 8.days.ago }
+
+ let!(:timelog1) { create_timelog(15.days.ago, issue: issue1) }
+ let!(:timelog2) { create_timelog(10.days.ago, merge_request: merge_request1) }
+ let!(:timelog3) { create_timelog(5.days.ago, issue: issue1) }
before do
group.add_developer(user)
@@ -45,7 +49,7 @@ RSpec.describe HasTimelogsReport do
end
end
- def create_timelog(time)
- create(:timelog, issue: issue, user: user, spent_at: time)
+ def create_timelog(time, issue: nil, merge_request: nil)
+ create(:timelog, issue: issue, merge_request: merge_request, user: user, spent_at: time)
end
end
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index a7117af81a2..38766d8decd 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -288,7 +288,7 @@ RSpec.describe Noteable do
end
before do
- MergeRequests::MergeToRefService.new(merge_request.project, merge_request.author).execute(merge_request)
+ MergeRequests::MergeToRefService.new(project: merge_request.project, current_user: merge_request.author).execute(merge_request)
Discussions::CaptureDiffNotePositionsService.new(merge_request).execute
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 6ab87053258..0a433a8cf4f 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe Routable do
end
end
-RSpec.describe Group, 'Routable' do
+RSpec.describe Group, 'Routable', :with_clean_rails_cache do
let_it_be_with_reload(:group) { create(:group, name: 'foo') }
let_it_be(:nested_group) { create(:group, parent: group) }
@@ -165,19 +165,63 @@ RSpec.describe Group, 'Routable' do
end
end
+ describe '#parent_loaded?' do
+ before do
+ group.parent = create(:group)
+ group.save!
+
+ group.reload
+ end
+
+ it 'is false when the parent is not loaded' do
+ expect(group.parent_loaded?).to be_falsey
+ end
+
+ it 'is true when the parent is loaded' do
+ group.parent
+
+ expect(group.parent_loaded?).to be_truthy
+ end
+ end
+
+ describe '#route_loaded?' do
+ it 'is false when the route is not loaded' do
+ expect(group.route_loaded?).to be_falsey
+ end
+
+ it 'is true when the route is loaded' do
+ group.route
+
+ expect(group.route_loaded?).to be_truthy
+ end
+ end
+
describe '#full_path' do
it { expect(group.full_path).to eq(group.path) }
it { expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}") }
+
+ it 'hits the cache when not preloaded' do
+ forcibly_hit_cached_lookup(nested_group, :full_path)
+
+ expect(nested_group.full_path).to eq("#{group.full_path}/#{nested_group.path}")
+ end
end
describe '#full_name' do
it { expect(group.full_name).to eq(group.name) }
it { expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}") }
+
+ it 'hits the cache when not preloaded' do
+ forcibly_hit_cached_lookup(nested_group, :full_name)
+
+ expect(nested_group.full_name).to eq("#{group.name} / #{nested_group.name}")
+ end
end
end
-RSpec.describe Project, 'Routable' do
- let_it_be(:project) { create(:project) }
+RSpec.describe Project, 'Routable', :with_clean_rails_cache do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, namespace: namespace) }
it_behaves_like '.find_by_full_path' do
let_it_be(:record) { project }
@@ -192,10 +236,30 @@ RSpec.describe Project, 'Routable' do
end
describe '#full_path' do
- it { expect(project.full_path).to eq "#{project.namespace.full_path}/#{project.path}" }
+ it { expect(project.full_path).to eq "#{namespace.full_path}/#{project.path}" }
+
+ it 'hits the cache when not preloaded' do
+ forcibly_hit_cached_lookup(project, :full_path)
+
+ expect(project.full_path).to eq("#{namespace.full_path}/#{project.path}")
+ end
end
describe '#full_name' do
- it { expect(project.full_name).to eq "#{project.namespace.human_name} / #{project.name}" }
+ it { expect(project.full_name).to eq "#{namespace.human_name} / #{project.name}" }
+
+ it 'hits the cache when not preloaded' do
+ forcibly_hit_cached_lookup(project, :full_name)
+
+ expect(project.full_name).to eq("#{namespace.human_name} / #{project.name}")
+ end
end
end
+
+def forcibly_hit_cached_lookup(record, method)
+ stub_feature_flags(cached_route_lookups: true)
+ expect(record).to receive(:persisted?).and_return(true)
+ expect(record).to receive(:route_loaded?).and_return(false)
+ expect(record).to receive(:parent_loaded?).and_return(false)
+ expect(Gitlab::Cache).to receive(:fetch_once).with([record.cache_key, method]).and_call_original
+end
diff --git a/spec/models/concerns/sidebars/positionable_list_spec.rb b/spec/models/concerns/sidebars/positionable_list_spec.rb
deleted file mode 100644
index 231aa5295dd..00000000000
--- a/spec/models/concerns/sidebars/positionable_list_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::PositionableList do
- subject do
- Class.new do
- include Sidebars::PositionableList
- end.new
- end
-
- describe '#add_element' do
- it 'adds the element to the last position of the list' do
- list = [1, 2]
-
- subject.add_element(list, 3)
-
- expect(list).to eq([1, 2, 3])
- end
- end
-
- describe '#insert_element_before' do
- let(:user) { build(:user) }
- let(:list) { [1, user] }
-
- it 'adds element before the specific element class' do
- subject.insert_element_before(list, User, 2)
-
- expect(list).to eq [1, 2, user]
- end
-
- context 'when reference element does not exist' do
- it 'adds the element to the top of the list' do
- subject.insert_element_before(list, Project, 2)
-
- expect(list).to eq [2, 1, user]
- end
- end
- end
-
- describe '#insert_element_after' do
- let(:user) { build(:user) }
- let(:list) { [1, user] }
-
- it 'adds element after the specific element class' do
- subject.insert_element_after(list, Integer, 2)
-
- expect(list).to eq [1, 2, user]
- end
-
- context 'when reference element does not exist' do
- it 'adds the element to the end of the list' do
- subject.insert_element_after(list, Project, 2)
-
- expect(list).to eq [1, user, 2]
- end
- end
- end
-end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 0ecefff3a97..abaae5b059a 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ContainerRepository do
+ using RSpec::Parameterized::TableSyntax
+
let(:group) { create(:group, name: 'group') }
let(:project) { create(:project, path: 'test', group: group) }
@@ -29,18 +31,6 @@ RSpec.describe ContainerRepository do
end
end
- describe '.exists_by_path?' do
- it 'returns true for known container repository paths' do
- path = ContainerRegistry::Path.new("#{project.full_path}/#{repository.name}")
- expect(described_class.exists_by_path?(path)).to be_truthy
- end
-
- it 'returns false for unknown container repository paths' do
- path = ContainerRegistry::Path.new('you/dont/know/me')
- expect(described_class.exists_by_path?(path)).to be_falsey
- end
- end
-
describe '#tag' do
it 'has a test tag' do
expect(repository.tag('test')).not_to be_nil
@@ -359,6 +349,17 @@ RSpec.describe ContainerRepository do
it { is_expected.to contain_exactly(repository) }
end
+ describe '.expiration_policy_started_at_nil_or_before' do
+ let_it_be(:repository1) { create(:container_repository, expiration_policy_started_at: nil) }
+ let_it_be(:repository2) { create(:container_repository, expiration_policy_started_at: 1.day.ago) }
+ let_it_be(:repository3) { create(:container_repository, expiration_policy_started_at: 2.hours.ago) }
+ let_it_be(:repository4) { create(:container_repository, expiration_policy_started_at: 1.week.ago) }
+
+ subject { described_class.expiration_policy_started_at_nil_or_before(3.hours.ago) }
+
+ it { is_expected.to contain_exactly(repository1, repository2, repository4) }
+ end
+
describe '.waiting_for_cleanup' do
let_it_be(:repository_cleanup_scheduled) { create(:container_repository, :cleanup_scheduled) }
let_it_be(:repository_cleanup_unfinished) { create(:container_repository, :cleanup_unfinished) }
@@ -368,4 +369,74 @@ RSpec.describe ContainerRepository do
it { is_expected.to contain_exactly(repository_cleanup_scheduled, repository_cleanup_unfinished) }
end
+
+ describe '.exists_by_path?' do
+ it 'returns true for known container repository paths' do
+ path = ContainerRegistry::Path.new("#{project.full_path}/#{repository.name}")
+ expect(described_class.exists_by_path?(path)).to be_truthy
+ end
+
+ it 'returns false for unknown container repository paths' do
+ path = ContainerRegistry::Path.new('you/dont/know/me')
+ expect(described_class.exists_by_path?(path)).to be_falsey
+ end
+ end
+
+ describe '.with_enabled_policy' do
+ let_it_be(:repository) { create(:container_repository) }
+ let_it_be(:repository2) { create(:container_repository) }
+
+ subject { described_class.with_enabled_policy }
+
+ before do
+ repository.project.container_expiration_policy.update!(enabled: true)
+ end
+
+ it { is_expected.to eq([repository]) }
+ end
+
+ context 'with repositories' do
+ let_it_be_with_reload(:repository) { create(:container_repository, :cleanup_unscheduled) }
+ let_it_be(:other_repository) { create(:container_repository, :cleanup_unscheduled) }
+
+ let(:policy) { repository.project.container_expiration_policy }
+
+ before do
+ ContainerExpirationPolicy.update_all(enabled: true)
+ end
+
+ describe '.requiring_cleanup' do
+ subject { described_class.requiring_cleanup }
+
+ context 'with next_run_at in the future' do
+ before do
+ policy.update_column(:next_run_at, 10.minutes.from_now)
+ end
+
+ it { is_expected.to eq([]) }
+ end
+
+ context 'with next_run_at in the past' do
+ before do
+ policy.update_column(:next_run_at, 10.minutes.ago)
+ end
+
+ it { is_expected.to eq([repository]) }
+ end
+ end
+
+ describe '.with_unfinished_cleanup' do
+ subject { described_class.with_unfinished_cleanup }
+
+ it { is_expected.to eq([]) }
+
+ context 'with an unfinished repository' do
+ before do
+ repository.cleanup_unfinished!
+ end
+
+ it { is_expected.to eq([repository]) }
+ end
+ end
+ end
end
diff --git a/spec/models/context_commits_diff_spec.rb b/spec/models/context_commits_diff_spec.rb
new file mode 100644
index 00000000000..6e03ea2745e
--- /dev/null
+++ b/spec/models/context_commits_diff_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContextCommitsDiff do
+ let_it_be(:sha1) { "33f3729a45c02fc67d00adb1b8bca394b0e761d9" }
+ let_it_be(:sha2) { "ae73cb07c9eeaf35924a10f713b364d32b2dd34f" }
+ let_it_be(:sha3) { "0b4bc9a49b562e85de7cc9e834518ea6828729b9" }
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:project) { merge_request.project }
+ let_it_be(:mrcc1) { create(:merge_request_context_commit, merge_request: merge_request, sha: sha1, committed_date: project.commit_by(oid: sha1).committed_date) }
+ let_it_be(:mrcc2) { create(:merge_request_context_commit, merge_request: merge_request, sha: sha2, committed_date: project.commit_by(oid: sha2).committed_date) }
+ let_it_be(:mrcc3) { create(:merge_request_context_commit, merge_request: merge_request, sha: sha3, committed_date: project.commit_by(oid: sha3).committed_date) }
+
+ subject { merge_request.context_commits_diff }
+
+ describe ".empty?" do
+ it 'checks if empty' do
+ expect(subject.empty?).to be(false)
+ end
+ end
+
+ describe '.commits_count' do
+ it 'reports commits count' do
+ expect(subject.commits_count).to be(3)
+ end
+ end
+
+ describe '.diffs' do
+ it 'returns instance of Gitlab::Diff::FileCollection::Compare' do
+ expect(subject.diffs).to be_a(Gitlab::Diff::FileCollection::Compare)
+ end
+
+ it 'returns all diffs between first and last commits' do
+ expect(subject.diffs.diff_files.size).to be(5)
+ end
+ end
+
+ describe '.raw_diffs' do
+ before do
+ allow(subject).to receive(:paths).and_return(["Gemfile.zip", "files/images/6049019_460s.jpg", "files/ruby/feature.rb"])
+ end
+
+ it 'returns instance of Gitlab::Git::DiffCollection' do
+ expect(subject.raw_diffs).to be_a(Gitlab::Git::DiffCollection)
+ end
+
+ it 'returns only diff for files changed in the context commits' do
+ expect(subject.raw_diffs.size).to be(3)
+ end
+ end
+
+ describe '.diff_refs' do
+ it 'returns correct sha' do
+ expect(subject.diff_refs.head_sha).to eq(sha3)
+ expect(subject.diff_refs.base_sha).to eq("913c66a37b4a45b9769037c55c2d238bd0942d2e")
+ end
+ end
+end
diff --git a/spec/models/custom_emoji_spec.rb b/spec/models/custom_emoji_spec.rb
index e34934d393a..4a8b671bab7 100644
--- a/spec/models/custom_emoji_spec.rb
+++ b/spec/models/custom_emoji_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe CustomEmoji do
new_emoji = build(:custom_emoji, name: old_emoji.name, namespace: old_emoji.namespace, group: group)
expect(new_emoji).not_to be_valid
- expect(new_emoji.errors.messages).to include(name: ["has already been taken"])
+ expect(new_emoji.errors.messages).to eq(creator: ["can't be blank"], name: ["has already been taken"])
end
it 'disallows non http and https file value' do
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index c9544569ad6..bcd237cbd38 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -72,6 +72,35 @@ RSpec.describe Deployment do
end
end
+ describe '.for_environment_name' do
+ subject { described_class.for_environment_name(project, environment_name) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:production) { create(:environment, :production, project: project) }
+ let_it_be(:staging) { create(:environment, :staging, project: project) }
+ let_it_be(:other_project) { create(:project, :repository) }
+ let_it_be(:other_production) { create(:environment, :production, project: other_project) }
+ let(:environment_name) { production.name }
+
+ context 'when deployment belongs to the environment' do
+ let!(:deployment) { create(:deployment, project: project, environment: production) }
+
+ it { is_expected.to eq([deployment]) }
+ end
+
+ context 'when deployment belongs to the same project but different environment name' do
+ let!(:deployment) { create(:deployment, project: project, environment: staging) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when deployment belongs to the same environment name but different project' do
+ let!(:deployment) { create(:deployment, project: other_project, environment: other_production) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
describe '.success' do
subject { described_class.success }
@@ -107,11 +136,13 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::ExecuteHooksWorker asynchronously' do
- expect(Deployments::ExecuteHooksWorker)
- .to receive(:perform_async).with(deployment.id)
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
- deployment.run!
+ deployment.run!
+ end
end
it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
@@ -141,11 +172,13 @@ RSpec.describe Deployment do
deployment.succeed!
end
- it 'executes Deployments::ExecuteHooksWorker asynchronously' do
- expect(Deployments::ExecuteHooksWorker)
- .to receive(:perform_async).with(deployment.id)
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
- deployment.succeed!
+ deployment.succeed!
+ end
end
end
@@ -168,11 +201,13 @@ RSpec.describe Deployment do
deployment.drop!
end
- it 'executes Deployments::ExecuteHooksWorker asynchronously' do
- expect(Deployments::ExecuteHooksWorker)
- .to receive(:perform_async).with(deployment.id)
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
- deployment.drop!
+ deployment.drop!
+ end
end
end
@@ -195,11 +230,13 @@ RSpec.describe Deployment do
deployment.cancel!
end
- it 'executes Deployments::ExecuteHooksWorker asynchronously' do
- expect(Deployments::ExecuteHooksWorker)
- .to receive(:perform_async).with(deployment.id)
+ it 'executes Deployments::HooksWorker asynchronously' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
- deployment.cancel!
+ deployment.cancel!
+ end
end
end
@@ -220,11 +257,13 @@ RSpec.describe Deployment do
deployment.skip!
end
- it 'does not execute Deployments::ExecuteHooksWorker' do
- expect(Deployments::ExecuteHooksWorker)
- .not_to receive(:perform_async).with(deployment.id)
+ it 'does not execute Deployments::HooksWorker' do
+ freeze_time do
+ expect(Deployments::HooksWorker)
+ .not_to receive(:perform_async).with(deployment_id: deployment.id, status_changed_at: Time.current)
- deployment.skip!
+ deployment.skip!
+ end
end
end
@@ -714,7 +753,7 @@ RSpec.describe Deployment do
it 'schedules workers when finishing a deploy' do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- expect(Deployments::ExecuteHooksWorker).to receive(:perform_async)
+ expect(Deployments::HooksWorker).to receive(:perform_async)
deploy.update_status('success')
end
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index 674d2fc420d..f2ce5e42eaf 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -512,7 +512,7 @@ RSpec.describe DesignManagement::Design do
end
describe '#to_reference' do
- let(:namespace) { build(:namespace, path: 'sample-namespace') }
+ let(:namespace) { build(:namespace, id: non_existing_record_id, path: 'sample-namespace') }
let(:project) { build(:project, name: 'sample-project', namespace: namespace) }
let(:group) { create(:group, name: 'Group', path: 'sample-group') }
let(:issue) { build(:issue, iid: 1, project: project) }
diff --git a/spec/models/email_spec.rb b/spec/models/email_spec.rb
index 62f2a53ab3c..cd0938682db 100644
--- a/spec/models/email_spec.rb
+++ b/spec/models/email_spec.rb
@@ -44,12 +44,11 @@ RSpec.describe Email do
end
end
- describe 'delegation' do
- let(:user) { create(:user) }
-
- it 'delegates to :user' do
- expect(build(:email, user: user).username).to eq user.username
- end
+ describe 'delegations' do
+ it { is_expected.to delegate_method(:can?).to(:user) }
+ it { is_expected.to delegate_method(:username).to(:user) }
+ it { is_expected.to delegate_method(:pending_invitations).to(:user) }
+ it { is_expected.to delegate_method(:accept_pending_invitations!).to(:user) }
end
describe 'Devise emails' do
diff --git a/spec/models/external_pull_request_spec.rb b/spec/models/external_pull_request_spec.rb
index e0822fc177a..bac2c369d7d 100644
--- a/spec/models/external_pull_request_spec.rb
+++ b/spec/models/external_pull_request_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe ExternalPullRequest do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:source_branch) { 'the-branch' }
let(:status) { :open }
@@ -217,4 +218,18 @@ RSpec.describe ExternalPullRequest do
expect(pull_request).not_to be_from_fork
end
end
+
+ describe '#modified_paths' do
+ let(:pull_request) do
+ build(:external_pull_request, project: project, target_sha: '281d3a7', source_sha: '498214d')
+ end
+
+ subject(:modified_paths) { pull_request.modified_paths }
+
+ it 'returns modified paths' do
+ expect(modified_paths).to eq ['bar/branch-test.txt',
+ 'files/js/commit.coffee',
+ 'with space/README.md']
+ end
+ end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 2f82d8a0bbe..5cc5c4d86d6 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Group do
it { is_expected.to have_many(:container_repositories) }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:group_deploy_keys) }
- it { is_expected.to have_many(:services) }
+ it { is_expected.to have_many(:integrations) }
it { is_expected.to have_one(:dependency_proxy_setting) }
it { is_expected.to have_many(:dependency_proxy_blobs) }
it { is_expected.to have_many(:dependency_proxy_manifests) }
@@ -395,18 +395,94 @@ RSpec.describe Group do
end
end
- context 'assigning a new parent' do
- let!(:old_parent) { create(:group) }
- let!(:new_parent) { create(:group) }
+ context 'assign a new parent' do
let!(:group) { create(:group, parent: old_parent) }
+ let(:recorded_queries) { ActiveRecord::QueryRecorder.new }
+
+ subject do
+ recorded_queries.record do
+ group.update(parent: new_parent)
+ end
+ end
before do
- group.update(parent: new_parent)
+ subject
reload_models(old_parent, new_parent, group)
end
- it 'updates traversal_ids' do
- expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ context 'within the same hierarchy' do
+ let!(:root) { create(:group).reload }
+ let!(:old_parent) { create(:group, parent: root) }
+ let!(:new_parent) { create(:group, parent: root) }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [root.id, new_parent.id, group.id]
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ it_behaves_like 'locked row' do
+ let(:row) { root }
+ end
+ end
+
+ context 'to another hierarchy' do
+ let!(:old_parent) { create(:group) }
+ let!(:new_parent) { create(:group) }
+ let!(:group) { create(:group, parent: old_parent) }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ end
+
+ it_behaves_like 'locked rows' do
+ let(:rows) { [old_parent, new_parent] }
+ end
+
+ context 'old hierarchy' do
+ let(:root) { old_parent.root_ancestor }
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ end
+
+ context 'new hierarchy' do
+ let(:root) { new_parent.root_ancestor }
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ end
+ end
+
+ context 'from being a root ancestor' do
+ let!(:old_parent) { nil }
+ let!(:new_parent) { create(:group) }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ end
+
+ it_behaves_like 'locked rows' do
+ let(:rows) { [group, new_parent] }
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids' do
+ let(:root) { new_parent }
+ end
+ end
+
+ context 'to being a root ancestor' do
+ let!(:old_parent) { create(:group) }
+ let!(:new_parent) { nil }
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [group.id]
+ end
+
+ it_behaves_like 'locked rows' do
+ let(:rows) { [old_parent, group] }
+ end
+
+ it_behaves_like 'hierarchy with traversal_ids' do
+ let(:root) { group }
+ end
end
end
@@ -427,6 +503,58 @@ RSpec.describe Group do
end
end
+ context 'traversal queries' do
+ let_it_be(:group, reload: true) { create(:group, :nested) }
+
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it_behaves_like 'namespace traversal'
+
+ describe '#self_and_descendants' do
+ it { expect(group.self_and_descendants.to_sql).not_to include 'traversal_ids @>' }
+ end
+
+ describe '#descendants' do
+ it { expect(group.descendants.to_sql).not_to include 'traversal_ids @>' }
+ end
+
+ describe '#ancestors' do
+ it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' }
+ end
+ end
+
+ context 'linear' do
+ it_behaves_like 'namespace traversal'
+
+ describe '#self_and_descendants' do
+ it { expect(group.self_and_descendants.to_sql).to include 'traversal_ids @>' }
+ end
+
+ describe '#descendants' do
+ it { expect(group.descendants.to_sql).to include 'traversal_ids @>' }
+ end
+
+ describe '#ancestors' do
+ it { expect(group.ancestors.to_sql).to include "\"namespaces\".\"id\" = #{group.parent_id}" }
+
+ it 'hierarchy order' do
+ expect(group.ancestors(hierarchy_order: :asc).to_sql).to include 'ORDER BY "depth" ASC'
+ end
+
+ context 'ancestor linear queries feature flag disabled' do
+ before do
+ stub_feature_flags(use_traversal_ids_for_ancestors: false)
+ end
+
+ it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' }
+ end
+ end
+ end
+ end
+
describe '.without_integration' do
let(:another_group) { create(:group) }
let(:instance_integration) { build(:jira_service, :instance) }
@@ -504,6 +632,16 @@ RSpec.describe Group do
it { is_expected.to match_array([private_group, internal_group]) }
end
+ describe 'with_onboarding_progress' do
+ subject { described_class.with_onboarding_progress }
+
+ it 'joins onboarding_progress' do
+ create(:onboarding_progress, namespace: group)
+
+ expect(subject).to eq([group])
+ end
+ end
+
describe 'for_authorized_group_members' do
let_it_be(:group_member1) { create(:group_member, source: private_group, user_id: user1.id, access_level: Gitlab::Access::OWNER) }
@@ -579,7 +717,9 @@ RSpec.describe Group do
it "is false if avatar is html page" do
group.update_attribute(:avatar, 'uploads/avatar.html')
- expect(group.avatar_type).to eq(["file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp"])
+ group.avatar_type
+
+ expect(group.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
end
end
@@ -953,140 +1093,167 @@ RSpec.describe Group do
it { expect(subject.parent).to be_kind_of(described_class) }
end
- describe '#max_member_access_for_user' do
- context 'group shared with another group' do
- let(:parent_group_user) { create(:user) }
- let(:group_user) { create(:user) }
- let(:child_group_user) { create(:user) }
-
- let_it_be(:group_parent) { create(:group, :private) }
- let_it_be(:group) { create(:group, :private, parent: group_parent) }
- let_it_be(:group_child) { create(:group, :private, parent: group) }
-
- let_it_be(:shared_group_parent) { create(:group, :private) }
- let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
- let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
-
- before do
- group_parent.add_owner(parent_group_user)
- group.add_owner(group_user)
- group_child.add_owner(child_group_user)
-
- create(:group_group_link, { shared_with_group: group,
- shared_group: shared_group,
- group_access: GroupMember::DEVELOPER })
- end
+ context "with member access" do
+ let_it_be(:group_user) { create(:user) }
+ describe '#max_member_access_for_user' do
context 'with user in the group' do
- let(:user) { group_user }
+ before do
+ group.add_owner(group_user)
+ end
it 'returns correct access level' do
- expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::DEVELOPER)
- expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::DEVELOPER)
+ expect(group.max_member_access_for_user(group_user)).to eq(Gitlab::Access::OWNER)
end
+ end
- context 'with lower group access level than max access level for share' do
- let(:user) { create(:user) }
+ context 'when user is nil' do
+ it 'returns NO_ACCESS' do
+ expect(group.max_member_access_for_user(nil)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
- it 'returns correct access level' do
- group.add_reporter(user)
+ context 'evaluating admin access level' do
+ let_it_be(:admin) { create(:admin) }
- expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::REPORTER)
- expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::REPORTER)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'returns OWNER by default' do
+ expect(group.max_member_access_for_user(admin)).to eq(Gitlab::Access::OWNER)
end
end
- end
- context 'with user in the parent group' do
- let(:user) { parent_group_user }
+ context 'when admin mode is disabled' do
+ it 'returns NO_ACCESS' do
+ expect(group.max_member_access_for_user(admin)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
- it 'returns correct access level' do
- expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ it 'returns NO_ACCESS when only concrete membership should be considered' do
+ expect(group.max_member_access_for_user(admin, only_concrete_membership: true))
+ .to eq(Gitlab::Access::NO_ACCESS)
end
end
- context 'with user in the child group' do
- let(:user) { child_group_user }
+ context 'when max_access_for_group is set' do
+ let(:max_member_access) { 111 }
- it 'returns correct access level' do
- expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ before do
+ group_user.max_access_for_group[group.id] = max_member_access
end
- end
-
- context 'unrelated project owner' do
- let(:common_id) { [Project.maximum(:id).to_i, Namespace.maximum(:id).to_i].max + 999 }
- let!(:group) { create(:group, id: common_id) }
- let!(:unrelated_project) { create(:project, id: common_id) }
- let(:user) { unrelated_project.owner }
- it 'returns correct access level' do
- expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ it 'uses the cached value' do
+ expect(group.max_member_access_for_user(group_user)).to eq(max_member_access)
end
end
+ end
- context 'user without accepted access request' do
- let!(:user) { create(:user) }
+ describe '#max_member_access' do
+ context 'group shared with another group' do
+ let_it_be(:parent_group_user) { create(:user) }
+ let_it_be(:child_group_user) { create(:user) }
+
+ let_it_be(:group_parent) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: group_parent) }
+ let_it_be(:group_child) { create(:group, :private, parent: group) }
+
+ let_it_be(:shared_group_parent) { create(:group, :private) }
+ let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
before do
- create(:group_member, :developer, :access_request, user: user, group: group)
+ group_parent.add_owner(parent_group_user)
+ group.add_owner(group_user)
+ group_child.add_owner(child_group_user)
+
+ create(:group_group_link, { shared_with_group: group,
+ shared_group: shared_group,
+ group_access: GroupMember::DEVELOPER })
end
- it 'returns correct access level' do
- expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
- expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
+ context 'with user in the group' do
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access(group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access(group_user)).to eq(Gitlab::Access::DEVELOPER)
+ expect(shared_group_child.max_member_access(group_user)).to eq(Gitlab::Access::DEVELOPER)
+ end
+
+ context 'with lower group access level than max access level for share' do
+ let(:user) { create(:user) }
+
+ it 'returns correct access level' do
+ group.add_reporter(user)
+
+ expect(shared_group_parent.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access(user)).to eq(Gitlab::Access::REPORTER)
+ expect(shared_group_child.max_member_access(user)).to eq(Gitlab::Access::REPORTER)
+ end
+ end
end
- end
- end
- context 'multiple groups shared with group' do
- let(:user) { create(:user) }
- let(:group) { create(:group, :private) }
- let(:shared_group_parent) { create(:group, :private) }
- let(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+ context 'with user in the parent group' do
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access(parent_group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access(parent_group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access(parent_group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
- before do
- group.add_owner(user)
+ context 'with user in the child group' do
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access(child_group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access(child_group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access(child_group_user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
- create(:group_group_link, { shared_with_group: group,
- shared_group: shared_group,
- group_access: GroupMember::DEVELOPER })
- create(:group_group_link, { shared_with_group: group,
- shared_group: shared_group_parent,
- group_access: GroupMember::MAINTAINER })
- end
+ context 'unrelated project owner' do
+ let(:common_id) { [Project.maximum(:id).to_i, Namespace.maximum(:id).to_i].max + 999 }
+ let!(:group) { create(:group, id: common_id) }
+ let!(:unrelated_project) { create(:project, id: common_id) }
+ let(:user) { unrelated_project.owner }
- it 'returns correct access level' do
- expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::MAINTAINER)
- end
- end
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
+ end
+
+ context 'user without accepted access request' do
+ let!(:user) { create(:user) }
- context 'evaluating admin access level' do
- let_it_be(:admin) { create(:admin) }
+ before do
+ create(:group_member, :developer, :access_request, user: user, group: group)
+ end
- context 'when admin mode is enabled', :enable_admin_mode do
- it 'returns OWNER by default' do
- expect(group.max_member_access_for_user(admin)).to eq(Gitlab::Access::OWNER)
+ it 'returns correct access level' do
+ expect(shared_group_parent.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ expect(shared_group_child.max_member_access(user)).to eq(Gitlab::Access::NO_ACCESS)
+ end
end
end
- context 'when admin mode is disabled' do
- it 'returns NO_ACCESS' do
- expect(group.max_member_access_for_user(admin)).to eq(Gitlab::Access::NO_ACCESS)
+ context 'multiple groups shared with group' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group, :private) }
+ let(:shared_group_parent) { create(:group, :private) }
+ let(:shared_group) { create(:group, :private, parent: shared_group_parent) }
+
+ before do
+ group.add_owner(user)
+
+ create(:group_group_link, { shared_with_group: group,
+ shared_group: shared_group,
+ group_access: GroupMember::DEVELOPER })
+ create(:group_group_link, { shared_with_group: group,
+ shared_group: shared_group_parent,
+ group_access: GroupMember::MAINTAINER })
end
- end
- it 'returns NO_ACCESS when only concrete membership should be considered' do
- expect(group.max_member_access_for_user(admin, only_concrete_membership: true))
- .to eq(Gitlab::Access::NO_ACCESS)
+ it 'returns correct access level' do
+ expect(shared_group.max_member_access(user)).to eq(Gitlab::Access::MAINTAINER)
+ end
end
end
end
@@ -1118,7 +1285,7 @@ RSpec.describe Group do
end
end
- describe '#members_with_parents' do
+ shared_examples_for 'members_with_parents' do
let!(:group) { create(:group, :nested) }
let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
@@ -1142,6 +1309,50 @@ RSpec.describe Group do
end
end
+ describe '#members_with_parents' do
+ it_behaves_like 'members_with_parents'
+ end
+
+ describe '#authorizable_members_with_parents' do
+ let(:group) { create(:group) }
+
+ it_behaves_like 'members_with_parents'
+
+ context 'members with associated user but also having invite_token' do
+ let!(:member) { create(:group_member, :developer, :invited, user: create(:user), group: group) }
+
+ it 'includes such members in the result' do
+ expect(group.authorizable_members_with_parents).to include(member)
+ end
+ end
+
+ context 'invited members' do
+ let!(:member) { create(:group_member, :developer, :invited, group: group) }
+
+ it 'does not include such members in the result' do
+ expect(group.authorizable_members_with_parents).not_to include(member)
+ end
+ end
+
+ context 'members from group shares' do
+ let(:shared_group) { group }
+ let(:shared_with_group) { create(:group) }
+
+ before do
+ create(:group_group_link, shared_group: shared_group, shared_with_group: shared_with_group)
+ end
+
+ context 'an invited member that is part of the shared_with_group' do
+ let!(:member) { create(:group_member, :developer, :invited, group: shared_with_group) }
+
+ it 'does not include such members in the result' do
+ expect(shared_group.authorizable_members_with_parents).not_to(
+ include(member))
+ end
+ end
+ end
+ end
+
describe '#members_from_self_and_ancestors_with_effective_access_level' do
let!(:group_parent) { create(:group, :private) }
let!(:group) { create(:group, :private, parent: group_parent) }
@@ -1769,13 +1980,35 @@ RSpec.describe Group do
allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
- it 'returns all variables belong to the group and parent groups' do
- expected_array1 = [protected_variable, ci_variable]
- expected_array2 = [variable_child, variable_child_2, variable_child_3]
- got_array = group_child_3.ci_variables_for('ref', project).to_a
+ context 'traversal queries' do
+ shared_examples 'correct ancestor order' do
+ it 'returns all variables belong to the group and parent groups' do
+ expected_array1 = [protected_variable, ci_variable]
+ expected_array2 = [variable_child, variable_child_2, variable_child_3]
+ got_array = group_child_3.ci_variables_for('ref', project).to_a
+
+ expect(got_array.shift(2)).to contain_exactly(*expected_array1)
+ expect(got_array).to eq(expected_array2)
+ end
+ end
+
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
- expect(got_array.shift(2)).to contain_exactly(*expected_array1)
- expect(got_array).to eq(expected_array2)
+ include_examples 'correct ancestor order'
+ end
+
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+
+ group_child_3.reload # make sure traversal_ids are reloaded
+ end
+
+ include_examples 'correct ancestor order'
+ end
end
end
end
@@ -2012,22 +2245,31 @@ RSpec.describe Group do
end
describe '#access_request_approvers_to_be_notified' do
- it 'returns a maximum of ten, active, non_requested owners of the group in recent_sign_in descending order' do
- group = create(:group, :public)
+ let_it_be(:group) { create(:group, :public) }
+ it 'returns a maximum of ten owners of the group in recent_sign_in descending order' do
users = create_list(:user, 12, :with_sign_ins)
active_owners = users.map do |user|
create(:group_member, :owner, group: group, user: user)
end
- create(:group_member, :owner, :blocked, group: group)
- create(:group_member, :maintainer, group: group)
- create(:group_member, :access_request, :owner, group: group)
-
- active_owners_in_recent_sign_in_desc_order = group.members_and_requesters.where(id: active_owners).order_recent_sign_in.limit(10)
+ active_owners_in_recent_sign_in_desc_order = group.members_and_requesters
+ .id_in(active_owners)
+ .order_recent_sign_in.limit(10)
expect(group.access_request_approvers_to_be_notified).to eq(active_owners_in_recent_sign_in_desc_order)
end
+
+ it 'returns active, non_invited, non_requested owners of the group' do
+ owner = create(:group_member, :owner, source: group)
+
+ create(:group_member, :maintainer, group: group)
+ create(:group_member, :owner, :invited, group: group)
+ create(:group_member, :owner, :access_request, group: group)
+ create(:group_member, :owner, :blocked, group: group)
+
+ expect(group.access_request_approvers_to_be_notified.to_a).to eq([owner])
+ end
end
describe '.groups_including_descendants_by' do
@@ -2214,17 +2456,17 @@ RSpec.describe Group do
end
describe "#default_branch_name" do
- context "group.namespace_settings does not have a default branch name" do
+ context "when group.namespace_settings does not have a default branch name" do
it "returns nil" do
expect(group.default_branch_name).to be_nil
end
end
- context "group.namespace_settings has a default branch name" do
+ context "when group.namespace_settings has a default branch name" do
let(:example_branch_name) { "example_branch_name" }
before do
- expect(group.namespace_settings)
+ allow(group.namespace_settings)
.to receive(:default_branch_name)
.and_return(example_branch_name)
end
@@ -2361,4 +2603,20 @@ RSpec.describe Group do
it { is_expected.to eq(Set.new([child_1.id])) }
end
+
+ describe '#to_ability_name' do
+ it 'returns group' do
+ group = build(:group)
+
+ expect(group.to_ability_name).to eq('group')
+ end
+ end
+
+ describe '#activity_path' do
+ it 'returns the group activity_path' do
+ expected_path = "/groups/#{group.name}/-/activity"
+
+ expect(group.activity_path).to eq(expected_path)
+ end
+ end
end
diff --git a/spec/models/hooks/project_hook_spec.rb b/spec/models/hooks/project_hook_spec.rb
index 69fbc4c3b4f..88149465232 100644
--- a/spec/models/hooks/project_hook_spec.rb
+++ b/spec/models/hooks/project_hook_spec.rb
@@ -30,4 +30,13 @@ RSpec.describe ProjectHook do
expect(described_class.tag_push_hooks).to eq([hook])
end
end
+
+ describe '#rate_limit' do
+ let_it_be(:hook) { create(:project_hook) }
+ let_it_be(:plan_limits) { create(:plan_limits, :default_plan, web_hook_calls: 100) }
+
+ it 'returns the default limit' do
+ expect(hook.rate_limit).to be(100)
+ end
+ end
end
diff --git a/spec/models/hooks/service_hook_spec.rb b/spec/models/hooks/service_hook_spec.rb
index f7045d7ac5e..651716c3280 100644
--- a/spec/models/hooks/service_hook_spec.rb
+++ b/spec/models/hooks/service_hook_spec.rb
@@ -4,11 +4,11 @@ require 'spec_helper'
RSpec.describe ServiceHook do
describe 'associations' do
- it { is_expected.to belong_to :service }
+ it { is_expected.to belong_to :integration }
end
describe 'validations' do
- it { is_expected.to validate_presence_of(:service) }
+ it { is_expected.to validate_presence_of(:integration) }
end
describe 'execute' do
@@ -22,4 +22,12 @@ RSpec.describe ServiceHook do
hook.execute(data)
end
end
+
+ describe '#rate_limit' do
+ let(:hook) { build(:service_hook) }
+
+ it 'returns nil' do
+ expect(hook.rate_limit).to be_nil
+ end
+ end
end
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index 02e630cbf27..a72034f1ac5 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -169,4 +169,12 @@ RSpec.describe SystemHook do
hook.async_execute(data, hook_name)
end
end
+
+ describe '#rate_limit' do
+ let(:hook) { build(:system_hook) }
+
+ it 'returns nil' do
+ expect(hook.rate_limit).to be_nil
+ end
+ end
end
diff --git a/spec/models/hooks/web_hook_log_archived_spec.rb b/spec/models/hooks/web_hook_log_archived_spec.rb
new file mode 100644
index 00000000000..ac726dbaf4f
--- /dev/null
+++ b/spec/models/hooks/web_hook_log_archived_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHookLogArchived do
+ let(:source_table) { WebHookLog }
+ let(:destination_table) { described_class }
+
+ it 'has the same columns as the source table' do
+ column_names_from_source_table = column_names(source_table)
+ column_names_from_destination_table = column_names(destination_table)
+
+ expect(column_names_from_destination_table).to match_array(column_names_from_source_table)
+ end
+
+ it 'has the same null constraints as the source table' do
+ constraints_from_source_table = null_constraints(source_table)
+ constraints_from_destination_table = null_constraints(destination_table)
+
+ expect(constraints_from_destination_table.to_a).to match_array(constraints_from_source_table.to_a)
+ end
+
+ it 'inserts the same record as the one in the source table', :aggregate_failures do
+ expect { create(:web_hook_log) }.to change { destination_table.count }.by(1)
+
+ event_from_source_table = source_table.connection.select_one(
+ "SELECT * FROM #{source_table.table_name} ORDER BY created_at desc LIMIT 1"
+ )
+ event_from_destination_table = destination_table.connection.select_one(
+ "SELECT * FROM #{destination_table.table_name} ORDER BY created_at desc LIMIT 1"
+ )
+
+ expect(event_from_destination_table).to eq(event_from_source_table)
+ end
+
+ def column_names(table)
+ table.connection.select_all(<<~SQL)
+ SELECT c.column_name
+ FROM information_schema.columns c
+ WHERE c.table_name = '#{table.table_name}'
+ SQL
+ end
+
+ def null_constraints(table)
+ table.connection.select_all(<<~SQL)
+ SELECT c.column_name, c.is_nullable
+ FROM information_schema.columns c
+ WHERE c.table_name = '#{table.table_name}'
+ AND c.column_name != 'created_at'
+ SQL
+ end
+end
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 413e69fb071..b528dbedd2c 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -3,7 +3,15 @@
require 'spec_helper'
RSpec.describe WebHook do
- let(:hook) { build(:project_hook) }
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project) }
+
+ let(:hook) { build(:project_hook, project: project) }
+
+ around do |example|
+ freeze_time { example.run }
+ end
describe 'associations' do
it { is_expected.to have_many(:web_hook_logs) }
@@ -69,18 +77,30 @@ RSpec.describe WebHook do
let(:data) { { key: 'value' } }
let(:hook_name) { 'project hook' }
- before do
- expect(WebHookService).to receive(:new).with(hook, data, hook_name).and_call_original
+ it '#execute' do
+ expect_next(WebHookService).to receive(:execute)
+
+ hook.execute(data, hook_name)
end
- it '#execute' do
- expect_any_instance_of(WebHookService).to receive(:execute)
+ it 'does not execute non-executable hooks' do
+ hook.update!(disabled_until: 1.day.from_now)
+
+ expect(WebHookService).not_to receive(:new)
hook.execute(data, hook_name)
end
it '#async_execute' do
- expect_any_instance_of(WebHookService).to receive(:async_execute)
+ expect_next(WebHookService).to receive(:async_execute)
+
+ hook.async_execute(data, hook_name)
+ end
+
+ it 'does not async execute non-executable hooks' do
+ hook.update!(disabled_until: 1.day.from_now)
+
+ expect(WebHookService).not_to receive(:new)
hook.async_execute(data, hook_name)
end
@@ -94,4 +114,170 @@ RSpec.describe WebHook do
expect { web_hook.destroy! }.to change(web_hook.web_hook_logs, :count).by(-3)
end
end
+
+ describe '.executable' do
+ let(:not_executable) do
+ [
+ [0, Time.current],
+ [0, 1.minute.from_now],
+ [1, 1.minute.from_now],
+ [3, 1.minute.from_now],
+ [4, nil],
+ [4, 1.day.ago],
+ [4, 1.minute.from_now]
+ ].map do |(recent_failures, disabled_until)|
+ create(:project_hook, project: project, recent_failures: recent_failures, disabled_until: disabled_until)
+ end
+ end
+
+ let(:executables) do
+ [
+ [0, nil],
+ [0, 1.day.ago],
+ [1, nil],
+ [1, 1.day.ago],
+ [3, nil],
+ [3, 1.day.ago]
+ ].map do |(recent_failures, disabled_until)|
+ create(:project_hook, project: project, recent_failures: recent_failures, disabled_until: disabled_until)
+ end
+ end
+
+ it 'finds the correct set of project hooks' do
+ expect(described_class.where(project_id: project.id).executable).to match_array executables
+ end
+
+ context 'when the feature flag is not enabled' do
+ before do
+ stub_feature_flags(web_hooks_disable_failed: false)
+ end
+
+ it 'is the same as all' do
+ expect(described_class.where(project_id: project.id).executable).to match_array(executables + not_executable)
+ end
+ end
+ end
+
+ describe '#executable?' do
+ let(:web_hook) { create(:project_hook, project: project) }
+
+ where(:recent_failures, :not_until, :executable) do
+ [
+ [0, :not_set, true],
+ [0, :past, true],
+ [0, :future, false],
+ [0, :now, false],
+ [1, :not_set, true],
+ [1, :past, true],
+ [1, :future, false],
+ [3, :not_set, true],
+ [3, :past, true],
+ [3, :future, false],
+ [4, :not_set, false],
+ [4, :past, false],
+ [4, :future, false]
+ ]
+ end
+
+ with_them do
+ # Phasing means we cannot put these values in the where block,
+ # which is not subject to the frozen time context.
+ let(:disabled_until) do
+ case not_until
+ when :not_set
+ nil
+ when :past
+ 1.minute.ago
+ when :future
+ 1.minute.from_now
+ when :now
+ Time.current
+ end
+ end
+
+ before do
+ web_hook.update!(recent_failures: recent_failures, disabled_until: disabled_until)
+ end
+
+ it 'has the correct state' do
+ expect(web_hook.executable?).to eq(executable)
+ end
+
+ context 'when the feature flag is enabled for a project' do
+ before do
+ stub_feature_flags(web_hooks_disable_failed: project)
+ end
+
+ it 'has the expected value' do
+ expect(web_hook.executable?).to eq(executable)
+ end
+ end
+
+ context 'when the feature flag is not enabled' do
+ before do
+ stub_feature_flags(web_hooks_disable_failed: false)
+ end
+
+ it 'is executable' do
+ expect(web_hook).to be_executable
+ end
+ end
+ end
+ end
+
+ describe '#next_backoff' do
+ context 'when there was no last backoff' do
+ before do
+ hook.backoff_count = 0
+ end
+
+ it 'is 10 minutes' do
+ expect(hook.next_backoff).to eq(described_class::INITIAL_BACKOFF)
+ end
+ end
+
+ context 'when we have backed off once' do
+ before do
+ hook.backoff_count = 1
+ end
+
+ it 'is twice the initial value' do
+ expect(hook.next_backoff).to eq(20.minutes)
+ end
+ end
+
+ context 'when we have backed off 3 times' do
+ before do
+ hook.backoff_count = 3
+ end
+
+ it 'grows exponentially' do
+ expect(hook.next_backoff).to eq(80.minutes)
+ end
+ end
+
+ context 'when the previous backoff was large' do
+ before do
+ hook.backoff_count = 8 # last value before MAX_BACKOFF
+ end
+
+ it 'does not exceed the max backoff value' do
+ expect(hook.next_backoff).to eq(described_class::MAX_BACKOFF)
+ end
+ end
+ end
+
+ describe '#enable!' do
+ it 'makes a hook executable' do
+ hook.recent_failures = 1000
+
+ expect { hook.enable! }.to change(hook, :executable?).from(false).to(true)
+ end
+ end
+
+ describe '#disable!' do
+ it 'disables a hook' do
+ expect { hook.disable! }.to change(hook, :executable?).from(true).to(false)
+ end
+ end
end
diff --git a/spec/models/instance_metadata/kas_spec.rb b/spec/models/instance_metadata/kas_spec.rb
new file mode 100644
index 00000000000..f8cc34fa8d3
--- /dev/null
+++ b/spec/models/instance_metadata/kas_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::InstanceMetadata::Kas do
+ it 'has InstanceMetadataPolicy as declarative policy' do
+ expect(described_class.declarative_policy_class).to eq("InstanceMetadataPolicy")
+ end
+
+ context 'when KAS is enabled' do
+ it 'has the correct properties' do
+ allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
+
+ expect(subject).to have_attributes(
+ enabled: Gitlab::Kas.enabled?,
+ version: Gitlab::Kas.version,
+ external_url: Gitlab::Kas.external_url
+ )
+ end
+ end
+
+ context 'when KAS is disabled' do
+ it 'has the correct properties' do
+ allow(Gitlab::Kas).to receive(:enabled?).and_return(false)
+
+ expect(subject).to have_attributes(
+ enabled: Gitlab::Kas.enabled?,
+ version: nil,
+ external_url: nil
+ )
+ end
+ end
+end
diff --git a/spec/models/instance_metadata_spec.rb b/spec/models/instance_metadata_spec.rb
index 1835dc8a9af..e3a9167620b 100644
--- a/spec/models/instance_metadata_spec.rb
+++ b/spec/models/instance_metadata_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe InstanceMetadata do
it 'has the correct properties' do
expect(subject).to have_attributes(
version: Gitlab::VERSION,
- revision: Gitlab.revision
+ revision: Gitlab.revision,
+ kas: kind_of(::InstanceMetadata::Kas)
)
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 781e2aece56..77b3778122a 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -3,31 +3,945 @@
require 'spec_helper'
RSpec.describe Integration do
- let_it_be(:project_1) { create(:project) }
- let_it_be(:project_2) { create(:project) }
- let_it_be(:project_3) { create(:project) }
- let_it_be(:project_4) { create(:project) }
- let_it_be(:instance_integration) { create(:jira_service, :instance) }
+ using RSpec::Parameterized::TableSyntax
- before do
- create(:jira_service, project: project_1, inherit_from_id: instance_integration.id)
- create(:jira_service, project: project_2, inherit_from_id: nil)
- create(:jira_service, group: create(:group), project: nil, inherit_from_id: nil)
- create(:jira_service, project: project_3, inherit_from_id: nil)
- create(:slack_service, project: project_4, inherit_from_id: nil)
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ describe "Associations" do
+ it { is_expected.to belong_to :project }
+ it { is_expected.to belong_to :group }
+ it { is_expected.to have_one :service_hook }
+ it { is_expected.to have_one :jira_tracker_data }
+ it { is_expected.to have_one :issue_tracker_data }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:type) }
+
+ where(:project_id, :group_id, :template, :instance, :valid) do
+ 1 | nil | false | false | true
+ nil | 1 | false | false | true
+ nil | nil | true | false | true
+ nil | nil | false | true | true
+ nil | nil | false | false | false
+ nil | nil | true | true | false
+ 1 | 1 | false | false | false
+ 1 | nil | true | false | false
+ 1 | nil | false | true | false
+ nil | 1 | true | false | false
+ nil | 1 | false | true | false
+ end
+
+ with_them do
+ it 'validates the service' do
+ expect(build(:service, project_id: project_id, group_id: group_id, template: template, instance: instance).valid?).to eq(valid)
+ end
+ end
+
+ context 'with existing services' do
+ before_all do
+ create(:service, :template)
+ create(:service, :instance)
+ create(:service, project: project)
+ create(:service, group: group, project: nil)
+ end
+
+ it 'allows only one service template per type' do
+ expect(build(:service, :template)).to be_invalid
+ end
+
+ it 'allows only one instance service per type' do
+ expect(build(:service, :instance)).to be_invalid
+ end
+
+ it 'allows only one project service per type' do
+ expect(build(:service, project: project)).to be_invalid
+ end
+
+ it 'allows only one group service per type' do
+ expect(build(:service, group: group, project: nil)).to be_invalid
+ end
+ end
+ end
+
+ describe 'Scopes' do
+ describe '.by_type' do
+ let!(:service1) { create(:jira_service) }
+ let!(:service2) { create(:jira_service) }
+ let!(:service3) { create(:redmine_service) }
+
+ subject { described_class.by_type(type) }
+
+ context 'when type is "JiraService"' do
+ let(:type) { 'JiraService' }
+
+ it { is_expected.to match_array([service1, service2]) }
+ end
+
+ context 'when type is "RedmineService"' do
+ let(:type) { 'RedmineService' }
+
+ it { is_expected.to match_array([service3]) }
+ end
+ end
+
+ describe '.for_group' do
+ let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
+ let!(:service2) { create(:jira_service) }
+
+ it 'returns the right group service' do
+ expect(described_class.for_group(group)).to match_array([service1])
+ end
+ end
+
+ describe '.confidential_note_hooks' do
+ it 'includes services where confidential_note_events is true' do
+ create(:service, active: true, confidential_note_events: true)
+
+ expect(described_class.confidential_note_hooks.count).to eq 1
+ end
+
+ it 'excludes services where confidential_note_events is false' do
+ create(:service, active: true, confidential_note_events: false)
+
+ expect(described_class.confidential_note_hooks.count).to eq 0
+ end
+ end
+
+ describe '.alert_hooks' do
+ it 'includes services where alert_events is true' do
+ create(:service, active: true, alert_events: true)
+
+ expect(described_class.alert_hooks.count).to eq 1
+ end
+
+ it 'excludes services where alert_events is false' do
+ create(:service, active: true, alert_events: false)
+
+ expect(described_class.alert_hooks.count).to eq 0
+ end
+ end
+ end
+
+ describe '#operating?' do
+ it 'is false when the service is not active' do
+ expect(build(:service).operating?).to eq(false)
+ end
+
+ it 'is false when the service is not persisted' do
+ expect(build(:service, active: true).operating?).to eq(false)
+ end
+
+ it 'is true when the service is active and persisted' do
+ expect(create(:service, active: true).operating?).to eq(true)
+ end
+ end
+
+ describe "Test Button" do
+ let(:service) { build(:service, project: project) }
+
+ describe '#can_test?' do
+ subject { service.can_test? }
+
+ context 'when repository is not empty' do
+ let(:project) { build(:project, :repository) }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when repository is empty' do
+ let(:project) { build(:project) }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when instance-level service' do
+ Integration.available_services_types.each do |service_type|
+ let(:service) do
+ service_type.constantize.new(instance: true)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when group-level service' do
+ Integration.available_services_types.each do |service_type|
+ let(:service) do
+ service_type.constantize.new(group_id: group.id)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ describe '#test' do
+ let(:data) { 'test' }
+
+ context 'when repository is not empty' do
+ let(:project) { build(:project, :repository) }
+
+ it 'test runs execute' do
+ expect(service).to receive(:execute).with(data)
+
+ service.test(data)
+ end
+ end
+
+ context 'when repository is empty' do
+ let(:project) { build(:project) }
+
+ it 'test runs execute' do
+ expect(service).to receive(:execute).with(data)
+
+ service.test(data)
+ end
+ end
+ end
+ end
+
+ describe '#project_level?' do
+ it 'is true when service has a project' do
+ expect(build(:service, project: project)).to be_project_level
+ end
+
+ it 'is false when service has no project' do
+ expect(build(:service, project: nil)).not_to be_project_level
+ end
+ end
+
+ describe '#group_level?' do
+ it 'is true when service has a group' do
+ expect(build(:service, group: group)).to be_group_level
+ end
+
+ it 'is false when service has no group' do
+ expect(build(:service, group: nil)).not_to be_group_level
+ end
+ end
+
+ describe '#instance_level?' do
+ it 'is true when service has instance-level integration' do
+ expect(build(:service, :instance)).to be_instance_level
+ end
+
+ it 'is false when service does not have instance-level integration' do
+ expect(build(:service, instance: false)).not_to be_instance_level
+ end
+ end
+
+ describe '.find_or_initialize_non_project_specific_integration' do
+ let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
+ let!(:service2) { create(:jira_service) }
+
+ it 'returns the right service' do
+ expect(Integration.find_or_initialize_non_project_specific_integration('jira', group_id: group)).to eq(service1)
+ end
+
+ it 'does not create a new service' do
+ expect { Integration.find_or_initialize_non_project_specific_integration('redmine', group_id: group) }.not_to change { Integration.count }
+ end
+ end
+
+ describe '.find_or_initialize_all_non_project_specific' do
+ shared_examples 'service instances' do
+ it 'returns the available service instances' do
+ expect(Integration.find_or_initialize_all_non_project_specific(Integration.for_instance).map(&:to_param)).to match_array(Integration.available_services_names(include_project_specific: false))
+ end
+
+ it 'does not create service instances' do
+ expect { Integration.find_or_initialize_all_non_project_specific(Integration.for_instance) }.not_to change { Integration.count }
+ end
+ end
+
+ it_behaves_like 'service instances'
+
+ context 'with all existing instances' do
+ before do
+ Integration.insert_all(
+ Integration.available_services_types(include_project_specific: false).map { |type| { instance: true, type: type } }
+ )
+ end
+
+ it_behaves_like 'service instances'
+
+ context 'with a previous existing service (MockCiService) and a new service (Asana)' do
+ before do
+ Integration.insert({ type: 'MockCiService', instance: true })
+ Integration.delete_by(type: 'AsanaService', instance: true)
+ end
+
+ it_behaves_like 'service instances'
+ end
+ end
+
+ context 'with a few existing instances' do
+ before do
+ create(:jira_service, :instance)
+ end
+
+ it_behaves_like 'service instances'
+ end
+ end
+
+ describe 'template' do
+ shared_examples 'retrieves service templates' do
+ it 'returns the available service templates' do
+ expect(Integration.find_or_create_templates.pluck(:type)).to match_array(Integration.available_services_types(include_project_specific: false))
+ end
+ end
+
+ describe '.find_or_create_templates' do
+ it 'creates service templates' do
+ expect { Integration.find_or_create_templates }.to change { Integration.count }.from(0).to(Integration.available_services_names(include_project_specific: false).size)
+ end
+
+ it_behaves_like 'retrieves service templates'
+
+ context 'with all existing templates' do
+ before do
+ Integration.insert_all(
+ Integration.available_services_types(include_project_specific: false).map { |type| { template: true, type: type } }
+ )
+ end
+
+ it 'does not create service templates' do
+ expect { Integration.find_or_create_templates }.not_to change { Integration.count }
+ end
+
+ it_behaves_like 'retrieves service templates'
+
+ context 'with a previous existing service (Previous) and a new service (Asana)' do
+ before do
+ Integration.insert({ type: 'PreviousService', template: true })
+ Integration.delete_by(type: 'AsanaService', template: true)
+ end
+
+ it_behaves_like 'retrieves service templates'
+ end
+ end
+
+ context 'with a few existing templates' do
+ before do
+ create(:jira_service, :template)
+ end
+
+ it 'creates the rest of the service templates' do
+ expect { Integration.find_or_create_templates }.to change { Integration.count }.from(1).to(Integration.available_services_names(include_project_specific: false).size)
+ end
+
+ it_behaves_like 'retrieves service templates'
+ end
+ end
+
+ describe '.build_from_integration' do
+ context 'when integration is invalid' do
+ let(:integration) do
+ build(:prometheus_service, :template, active: true, properties: {})
+ .tap { |integration| integration.save!(validate: false) }
+ end
+
+ it 'sets service to inactive' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service).to be_valid
+ expect(service.active).to be false
+ end
+ end
+
+ context 'when integration is an instance-level integration' do
+ let(:integration) { create(:jira_service, :instance) }
+
+ it 'sets inherit_from_id from integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service.inherit_from_id).to eq(integration.id)
+ end
+ end
+
+ context 'when integration is a group-level integration' do
+ let(:integration) { create(:jira_service, group: group, project: nil) }
+
+ it 'sets inherit_from_id from integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service.inherit_from_id).to eq(integration.id)
+ end
+ end
+
+ describe 'build issue tracker from an integration' do
+ let(:url) { 'http://jira.example.com' }
+ let(:api_url) { 'http://api-jira.example.com' }
+ let(:username) { 'jira-username' }
+ let(:password) { 'jira-password' }
+ let(:data_params) do
+ {
+ url: url, api_url: api_url,
+ username: username, password: password
+ }
+ end
+
+ shared_examples 'service creation from an integration' do
+ it 'creates a correct service for a project integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service).to be_active
+ expect(service.url).to eq(url)
+ expect(service.api_url).to eq(api_url)
+ expect(service.username).to eq(username)
+ expect(service.password).to eq(password)
+ expect(service.template).to eq(false)
+ expect(service.instance).to eq(false)
+ expect(service.project).to eq(project)
+ expect(service.group).to eq(nil)
+ end
+
+ it 'creates a correct service for a group integration' do
+ service = described_class.build_from_integration(integration, group_id: group.id)
+
+ expect(service).to be_active
+ expect(service.url).to eq(url)
+ expect(service.api_url).to eq(api_url)
+ expect(service.username).to eq(username)
+ expect(service.password).to eq(password)
+ expect(service.template).to eq(false)
+ expect(service.instance).to eq(false)
+ expect(service.project).to eq(nil)
+ expect(service.group).to eq(group)
+ end
+ end
+
+ # this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
+ context 'when data are stored in properties' do
+ let(:properties) { data_params }
+ let!(:integration) do
+ create(:jira_service, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
+ end
+
+ it_behaves_like 'service creation from an integration'
+ end
+
+ context 'when data are stored in separated fields' do
+ let(:integration) do
+ create(:jira_service, :template, data_params.merge(properties: {}))
+ end
+
+ it_behaves_like 'service creation from an integration'
+ end
+
+ context 'when data are stored in both properties and separated fields' do
+ let(:properties) { data_params }
+ let(:integration) do
+ create(:jira_service, :without_properties_callback, active: true, template: true, properties: properties).tap do |integration|
+ create(:jira_tracker_data, data_params.merge(integration: integration))
+ end
+ end
+
+ it_behaves_like 'service creation from an integration'
+ end
+ end
+ end
+
+ describe "for pushover service" do
+ let!(:service_template) do
+ PushoverService.create!(
+ template: true,
+ properties: {
+ device: 'MyDevice',
+ sound: 'mic',
+ priority: 4,
+ api_key: '123456789'
+ })
+ end
+
+ describe 'is prefilled for projects pushover service' do
+ it "has all fields prefilled" do
+ service = project.find_or_initialize_service('pushover')
+
+ expect(service.template).to eq(false)
+ expect(service.device).to eq('MyDevice')
+ expect(service.sound).to eq('mic')
+ expect(service.priority).to eq(4)
+ expect(service.api_key).to eq('123456789')
+ end
+ end
+ end
+ end
+
+ describe '.default_integration' do
+ context 'with an instance-level service' do
+ let_it_be(:instance_service) { create(:jira_service, :instance) }
+
+ it 'returns the instance service' do
+ expect(described_class.default_integration('JiraService', project)).to eq(instance_service)
+ end
+
+ it 'returns nil for nonexistent service type' do
+ expect(described_class.default_integration('HipchatService', project)).to eq(nil)
+ end
+
+ context 'with a group service' do
+ let_it_be(:group_service) { create(:jira_service, group_id: group.id, project_id: nil) }
+
+ it 'returns the group service for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ end
+
+ it 'returns the instance service for a group' do
+ expect(described_class.default_integration('JiraService', group)).to eq(instance_service)
+ end
+
+ context 'with a subgroup' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+
+ let!(:project) { create(:project, group: subgroup) }
+
+ it 'returns the closest group service for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ end
+
+ it 'returns the closest group service for a subgroup' do
+ expect(described_class.default_integration('JiraService', subgroup)).to eq(group_service)
+ end
+
+ context 'having a service with custom settings' do
+ let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil) }
+
+ it 'returns the closest group service for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(subgroup_service)
+ end
+ end
+
+ context 'having a service inheriting settings' do
+ let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil, inherit_from_id: group_service.id) }
+
+ it 'returns the closest group service which does not inherit from its parent for a project' do
+ expect(described_class.default_integration('JiraService', project)).to eq(group_service)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '.create_from_active_default_integrations' do
+ context 'with an active service template' do
+ let_it_be(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
+
+ it 'creates a service from the template' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.integrations.size).to eq(1)
+ expect(project.reload.integrations.first.api_url).to eq(template_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to be_nil
+ end
+
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+
+ it 'creates a service from the instance-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.integrations.size).to eq(1)
+ expect(project.reload.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ end
+
+ context 'passing a group' do
+ it 'creates a service from the instance-level integration' do
+ described_class.create_from_active_default_integrations(group, :group_id)
+
+ expect(group.reload.integrations.size).to eq(1)
+ expect(group.reload.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(group.reload.integrations.first.inherit_from_id).to eq(instance_integration.id)
+ end
+ end
+
+ context 'with an active group-level integration' do
+ let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+
+ it 'creates a service from the group-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.integrations.size).to eq(1)
+ expect(project.reload.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to eq(group_integration.id)
+ end
+
+ context 'passing a group' do
+ let!(:subgroup) { create(:group, parent: group) }
+
+ it 'creates a service from the group-level integration' do
+ described_class.create_from_active_default_integrations(subgroup, :group_id)
+
+ expect(subgroup.reload.integrations.size).to eq(1)
+ expect(subgroup.reload.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(subgroup.reload.integrations.first.inherit_from_id).to eq(group_integration.id)
+ end
+ end
+
+ context 'with an active subgroup' do
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup) { create(:group, parent: group) }
+ let(:project) { create(:project, group: subgroup) }
+
+ it 'creates a service from the subgroup-level integration' do
+ described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
+
+ expect(project.reload.integrations.size).to eq(1)
+ expect(project.reload.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.reload.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+
+ context 'passing a group' do
+ let!(:sub_subgroup) { create(:group, parent: subgroup) }
+
+ context 'traversal queries' do
+ shared_examples 'correct ancestor order' do
+ it 'creates a service from the subgroup-level integration' do
+ described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
+
+ sub_subgroup.reload
+
+ expect(sub_subgroup.integrations.size).to eq(1)
+ expect(sub_subgroup.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(sub_subgroup.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+
+ context 'having a service inheriting settings' do
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
+
+ it 'creates a service from the group-level integration' do
+ described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
+
+ sub_subgroup.reload
+
+ expect(sub_subgroup.integrations.size).to eq(1)
+ expect(sub_subgroup.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(sub_subgroup.integrations.first.inherit_from_id).to eq(group_integration.id)
+ end
+ end
+ end
+
+ context 'recursive' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ include_examples 'correct ancestor order'
+ end
+
+ context 'linear' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+
+ sub_subgroup.reload # make sure traversal_ids are reloaded
+ end
+
+ include_examples 'correct ancestor order'
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '.inherited_descendants_from_self_or_ancestors_from' do
+ let_it_be(:subgroup1) { create(:group, parent: group) }
+ let_it_be(:subgroup2) { create(:group, parent: group) }
+ let_it_be(:project1) { create(:project, group: subgroup1) }
+ let_it_be(:project2) { create(:project, group: subgroup2) }
+ let_it_be(:group_integration) { create(:prometheus_service, group: group, project: nil) }
+ let_it_be(:subgroup_integration1) { create(:prometheus_service, group: subgroup1, project: nil, inherit_from_id: group_integration.id) }
+ let_it_be(:subgroup_integration2) { create(:prometheus_service, group: subgroup2, project: nil) }
+ let_it_be(:project_integration1) { create(:prometheus_service, group: nil, project: project1, inherit_from_id: group_integration.id) }
+ let_it_be(:project_integration2) { create(:prometheus_service, group: nil, project: project2, inherit_from_id: subgroup_integration2.id) }
+
+ it 'returns the groups and projects inheriting from integration ancestors', :aggregate_failures do
+ expect(described_class.inherited_descendants_from_self_or_ancestors_from(group_integration)).to eq([subgroup_integration1, project_integration1])
+ expect(described_class.inherited_descendants_from_self_or_ancestors_from(subgroup_integration2)).to eq([project_integration2])
+ end
end
- describe '.with_custom_integration_for' do
- it 'returns projects with custom integrations' do
- # We use pagination to verify that the group is excluded from the query
- expect(Project.with_custom_integration_for(instance_integration, 0, 2)).to contain_exactly(project_2, project_3)
- expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2, project_3)
+ describe '.service_name_to_model' do
+ it 'returns the model for the given service name', :aggregate_failures do
+ expect(described_class.service_name_to_model('asana')).to eq(Integrations::Asana)
+ # TODO We can remove this test when all models have been namespaced:
+ # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60968#note_570994955
+ expect(described_class.service_name_to_model('youtrack')).to eq(YoutrackService)
end
+
+ it 'raises an error if service name is invalid' do
+ expect { described_class.service_name_to_model('foo') }.to raise_exception(NameError, /uninitialized constant FooService/)
+ end
+ end
+
+ describe "{property}_changed?" do
+ let(:service) do
+ Integrations::Bamboo.create!(
+ project: project,
+ properties: {
+ bamboo_url: 'http://gitlab.com',
+ username: 'mic',
+ password: "password"
+ }
+ )
+ end
+
+ it "returns false when the property has not been assigned a new value" do
+ service.username = "key_changed"
+ expect(service.bamboo_url_changed?).to be_falsy
+ end
+
+ it "returns true when the property has been assigned a different value" do
+ service.bamboo_url = "http://example.com"
+ expect(service.bamboo_url_changed?).to be_truthy
+ end
+
+ it "returns true when the property has been assigned a different value twice" do
+ service.bamboo_url = "http://example.com"
+ service.bamboo_url = "http://example.com"
+ expect(service.bamboo_url_changed?).to be_truthy
+ end
+
+ it "returns false when the property has been re-assigned the same value" do
+ service.bamboo_url = 'http://gitlab.com'
+ expect(service.bamboo_url_changed?).to be_falsy
+ end
+
+ it "returns false when the property has been assigned a new value then saved" do
+ service.bamboo_url = 'http://example.com'
+ service.save!
+ expect(service.bamboo_url_changed?).to be_falsy
+ end
+ end
+
+ describe "{property}_touched?" do
+ let(:service) do
+ Integrations::Bamboo.create!(
+ project: project,
+ properties: {
+ bamboo_url: 'http://gitlab.com',
+ username: 'mic',
+ password: "password"
+ }
+ )
+ end
+
+ it "returns false when the property has not been assigned a new value" do
+ service.username = "key_changed"
+ expect(service.bamboo_url_touched?).to be_falsy
+ end
+
+ it "returns true when the property has been assigned a different value" do
+ service.bamboo_url = "http://example.com"
+ expect(service.bamboo_url_touched?).to be_truthy
+ end
+
+ it "returns true when the property has been assigned a different value twice" do
+ service.bamboo_url = "http://example.com"
+ service.bamboo_url = "http://example.com"
+ expect(service.bamboo_url_touched?).to be_truthy
+ end
+
+ it "returns true when the property has been re-assigned the same value" do
+ service.bamboo_url = 'http://gitlab.com'
+ expect(service.bamboo_url_touched?).to be_truthy
+ end
+
+ it "returns false when the property has been assigned a new value then saved" do
+ service.bamboo_url = 'http://example.com'
+ service.save!
+ expect(service.bamboo_url_changed?).to be_falsy
+ end
+ end
+
+ describe "{property}_was" do
+ let(:service) do
+ Integrations::Bamboo.create!(
+ project: project,
+ properties: {
+ bamboo_url: 'http://gitlab.com',
+ username: 'mic',
+ password: "password"
+ }
+ )
+ end
+
+ it "returns nil when the property has not been assigned a new value" do
+ service.username = "key_changed"
+ expect(service.bamboo_url_was).to be_nil
+ end
+
+ it "returns the previous value when the property has been assigned a different value" do
+ service.bamboo_url = "http://example.com"
+ expect(service.bamboo_url_was).to eq('http://gitlab.com')
+ end
+
+ it "returns initial value when the property has been re-assigned the same value" do
+ service.bamboo_url = 'http://gitlab.com'
+ expect(service.bamboo_url_was).to eq('http://gitlab.com')
+ end
+
+ it "returns initial value when the property has been assigned multiple values" do
+ service.bamboo_url = "http://example.com"
+ service.bamboo_url = "http://example2.com"
+ expect(service.bamboo_url_was).to eq('http://gitlab.com')
+ end
+
+ it "returns nil when the property has been assigned a new value then saved" do
+ service.bamboo_url = 'http://example.com'
+ service.save!
+ expect(service.bamboo_url_was).to be_nil
+ end
+ end
+
+ describe 'initialize service with no properties' do
+ let(:service) do
+ BugzillaService.create!(
+ project: project,
+ project_url: 'http://gitlab.example.com'
+ )
+ end
+
+ it 'does not raise error' do
+ expect { service }.not_to raise_error
+ end
+
+ it 'sets data correctly' do
+ expect(service.data_fields.project_url).to eq('http://gitlab.example.com')
+ end
+ end
+
+ describe '#api_field_names' do
+ let(:fake_service) do
+ Class.new(Integration) do
+ def fields
+ [
+ { name: 'token' },
+ { name: 'api_token' },
+ { name: 'key' },
+ { name: 'api_key' },
+ { name: 'password' },
+ { name: 'password_field' },
+ { name: 'safe_field' }
+ ]
+ end
+ end
+ end
+
+ let(:service) do
+ fake_service.new(properties: [
+ { token: 'token-value' },
+ { api_token: 'api_token-value' },
+ { key: 'key-value' },
+ { api_key: 'api_key-value' },
+ { password: 'password-value' },
+ { password_field: 'password_field-value' },
+ { safe_field: 'safe_field-value' }
+ ])
+ end
+
+ it 'filters out sensitive fields' do
+ expect(service.api_field_names).to eq(['safe_field'])
+ end
+ end
+
+ context 'logging' do
+ let(:service) { build(:service, project: project) }
+ let(:test_message) { "test message" }
+ let(:arguments) do
+ {
+ service_class: service.class.name,
+ project_path: project.full_path,
+ project_id: project.id,
+ message: test_message,
+ additional_argument: 'some argument'
+ }
+ end
+
+ it 'logs info messages using json logger' do
+ expect(Gitlab::JsonLogger).to receive(:info).with(arguments)
+
+ service.log_info(test_message, additional_argument: 'some argument')
+ end
+
+ it 'logs error messages using json logger' do
+ expect(Gitlab::JsonLogger).to receive(:error).with(arguments)
+
+ service.log_error(test_message, additional_argument: 'some argument')
+ end
+
+ context 'when project is nil' do
+ let(:project) { nil }
+ let(:arguments) do
+ {
+ service_class: service.class.name,
+ project_path: nil,
+ project_id: nil,
+ message: test_message,
+ additional_argument: 'some argument'
+ }
+ end
+
+ it 'logs info messages using json logger' do
+ expect(Gitlab::JsonLogger).to receive(:info).with(arguments)
+
+ service.log_info(test_message, additional_argument: 'some argument')
+ end
+ end
+ end
+
+ describe '#external_wiki?' do
+ where(:type, :active, :result) do
+ 'ExternalWikiService' | true | true
+ 'ExternalWikiService' | false | false
+ 'SlackService' | true | false
+ end
+
+ with_them do
+ it 'returns the right result' do
+ expect(build(:service, type: type, active: active).external_wiki?).to eq(result)
+ end
+ end
+ end
+
+ describe '.available_services_names' do
+ it 'calls the right methods' do
+ expect(described_class).to receive(:services_names).and_call_original
+ expect(described_class).to receive(:dev_services_names).and_call_original
+ expect(described_class).to receive(:project_specific_services_names).and_call_original
+
+ described_class.available_services_names
+ end
+
+ it 'does not call project_specific_services_names with include_project_specific false' do
+ expect(described_class).to receive(:services_names).and_call_original
+ expect(described_class).to receive(:dev_services_names).and_call_original
+ expect(described_class).not_to receive(:project_specific_services_names)
+
+ described_class.available_services_names(include_project_specific: false)
+ end
+
+ it 'does not call dev_services_names with include_dev false' do
+ expect(described_class).to receive(:services_names).and_call_original
+ expect(described_class).not_to receive(:dev_services_names)
+ expect(described_class).to receive(:project_specific_services_names).and_call_original
+
+ described_class.available_services_names(include_dev: false)
+ end
+
+ it { expect(described_class.available_services_names).to include('jenkins') }
end
- describe '.without_integration' do
- it 'returns projects without integration' do
- expect(Project.without_integration(instance_integration)).to contain_exactly(project_4)
+ describe '.project_specific_services_names' do
+ it do
+ expect(described_class.project_specific_services_names)
+ .to include(*described_class::PROJECT_SPECIFIC_INTEGRATION_NAMES)
end
end
end
diff --git a/spec/models/project_services/asana_service_spec.rb b/spec/models/integrations/asana_spec.rb
index 7a6fe4b1537..4473478910a 100644
--- a/spec/models/project_services/asana_service_spec.rb
+++ b/spec/models/integrations/asana_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AsanaService do
+RSpec.describe Integrations::Asana do
describe 'Associations' do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
@@ -54,7 +54,7 @@ RSpec.describe AsanaService do
d1 = double('Asana::Resources::Task')
expect(d1).to receive(:add_comment).with(text: expected_message)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, gid).once.and_return(d1)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, gid).once.and_return(d1)
@asana.execute(data)
end
@@ -64,7 +64,7 @@ RSpec.describe AsanaService do
d1 = double('Asana::Resources::Task')
expect(d1).to receive(:add_comment)
expect(d1).to receive(:update).with(completed: true)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '456789').once.and_return(d1)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456789').once.and_return(d1)
@asana.execute(data)
end
@@ -74,7 +74,7 @@ RSpec.describe AsanaService do
d1 = double('Asana::Resources::Task')
expect(d1).to receive(:add_comment)
expect(d1).to receive(:update).with(completed: true)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(d1)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(d1)
@asana.execute(data)
end
@@ -88,25 +88,25 @@ RSpec.describe AsanaService do
d1 = double('Asana::Resources::Task')
expect(d1).to receive(:add_comment)
expect(d1).to receive(:update).with(completed: true)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '123').once.and_return(d1)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '123').once.and_return(d1)
d2 = double('Asana::Resources::Task')
expect(d2).to receive(:add_comment)
expect(d2).to receive(:update).with(completed: true)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '456').once.and_return(d2)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '456').once.and_return(d2)
d3 = double('Asana::Resources::Task')
expect(d3).to receive(:add_comment)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '789').once.and_return(d3)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '789').once.and_return(d3)
d4 = double('Asana::Resources::Task')
expect(d4).to receive(:add_comment)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(d4)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '42').once.and_return(d4)
d5 = double('Asana::Resources::Task')
expect(d5).to receive(:add_comment)
expect(d5).to receive(:update).with(completed: true)
- expect(Asana::Resources::Task).to receive(:find_by_id).with(anything, '12').once.and_return(d5)
+ expect(::Asana::Resources::Task).to receive(:find_by_id).with(anything, '12').once.and_return(d5)
@asana.execute(data)
end
diff --git a/spec/models/project_services/assembla_service_spec.rb b/spec/models/integrations/assembla_spec.rb
index 207add6f090..bf9033416e9 100644
--- a/spec/models/project_services/assembla_service_spec.rb
+++ b/spec/models/integrations/assembla_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AssemblaService do
+RSpec.describe Integrations::Assembla do
include StubRequests
describe "Associations" do
diff --git a/spec/models/project_services/bamboo_service_spec.rb b/spec/models/integrations/bamboo_spec.rb
index 45afbcca96d..0ba1595bbd8 100644
--- a/spec/models/project_services/bamboo_service_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BambooService, :use_clean_rails_memory_store_caching do
+RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
include ReactiveCachingHelpers
include StubRequests
diff --git a/spec/models/project_services/campfire_service_spec.rb b/spec/models/integrations/campfire_spec.rb
index ea3990b339b..b23edf03e8a 100644
--- a/spec/models/project_services/campfire_service_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe CampfireService do
+RSpec.describe Integrations::Campfire do
include StubRequests
describe 'Associations' do
diff --git a/spec/models/project_services/chat_message/alert_message_spec.rb b/spec/models/integrations/chat_message/alert_message_spec.rb
index 4d400990789..9866b2d9185 100644
--- a/spec/models/project_services/chat_message/alert_message_spec.rb
+++ b/spec/models/integrations/chat_message/alert_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::AlertMessage do
+RSpec.describe Integrations::ChatMessage::AlertMessage do
subject { described_class.new(args) }
let_it_be(:start_time) { Time.current }
diff --git a/spec/models/project_services/chat_message/base_message_spec.rb b/spec/models/integrations/chat_message/base_message_spec.rb
index a7ddf230758..eada5d1031d 100644
--- a/spec/models/project_services/chat_message/base_message_spec.rb
+++ b/spec/models/integrations/chat_message/base_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::BaseMessage do
+RSpec.describe Integrations::ChatMessage::BaseMessage do
let(:base_message) { described_class.new(args) }
let(:args) { { project_url: 'https://gitlab-domain.com' } }
diff --git a/spec/models/project_services/chat_message/deployment_message_spec.rb b/spec/models/integrations/chat_message/deployment_message_spec.rb
index 6bdf2120b36..ff255af11a3 100644
--- a/spec/models/project_services/chat_message/deployment_message_spec.rb
+++ b/spec/models/integrations/chat_message/deployment_message_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe ChatMessage::DeploymentMessage do
+RSpec.describe Integrations::ChatMessage::DeploymentMessage do
describe '#pretext' do
it 'returns a message with the data returned by the deployment data builder' do
environment = create(:environment, name: "myenvironment")
project = create(:project, :repository)
commit = project.commit('HEAD')
deployment = create(:deployment, status: :success, environment: environment, project: project, sha: commit.sha)
- data = Gitlab::DataBuilder::Deployment.build(deployment)
+ data = Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
message = described_class.new(data)
@@ -118,7 +118,7 @@ RSpec.describe ChatMessage::DeploymentMessage do
job_url = Gitlab::Routing.url_helpers.project_job_url(project, ci_build)
commit_url = Gitlab::UrlBuilder.build(deployment.commit)
user_url = Gitlab::Routing.url_helpers.user_url(user)
- data = Gitlab::DataBuilder::Deployment.build(deployment)
+ data = Gitlab::DataBuilder::Deployment.build(deployment, Time.current)
message = described_class.new(data)
diff --git a/spec/models/project_services/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index 4701ef3e49e..31b80ad3169 100644
--- a/spec/models/project_services/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::IssueMessage do
+RSpec.describe Integrations::ChatMessage::IssueMessage do
subject { described_class.new(args) }
let(:args) do
diff --git a/spec/models/project_services/chat_message/merge_message_spec.rb b/spec/models/integrations/chat_message/merge_message_spec.rb
index 71cfe3ff45b..ed1ad6837e2 100644
--- a/spec/models/project_services/chat_message/merge_message_spec.rb
+++ b/spec/models/integrations/chat_message/merge_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::MergeMessage do
+RSpec.describe Integrations::ChatMessage::MergeMessage do
subject { described_class.new(args) }
let(:args) do
diff --git a/spec/models/project_services/chat_message/note_message_spec.rb b/spec/models/integrations/chat_message/note_message_spec.rb
index 6a741365d55..668c0da26ae 100644
--- a/spec/models/project_services/chat_message/note_message_spec.rb
+++ b/spec/models/integrations/chat_message/note_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::NoteMessage do
+RSpec.describe Integrations::ChatMessage::NoteMessage do
subject { described_class.new(args) }
let(:color) { '#345' }
diff --git a/spec/models/project_services/chat_message/pipeline_message_spec.rb b/spec/models/integrations/chat_message/pipeline_message_spec.rb
index 4eb2f57315b..a80d13d7f5d 100644
--- a/spec/models/project_services/chat_message/pipeline_message_spec.rb
+++ b/spec/models/integrations/chat_message/pipeline_message_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe ChatMessage::PipelineMessage do
+RSpec.describe Integrations::ChatMessage::PipelineMessage do
subject { described_class.new(args) }
let(:args) do
diff --git a/spec/models/project_services/chat_message/push_message_spec.rb b/spec/models/integrations/chat_message/push_message_spec.rb
index e3ba4c2aefe..167487449c3 100644
--- a/spec/models/project_services/chat_message/push_message_spec.rb
+++ b/spec/models/integrations/chat_message/push_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::PushMessage do
+RSpec.describe Integrations::ChatMessage::PushMessage do
subject { described_class.new(args) }
let(:args) do
diff --git a/spec/models/project_services/chat_message/wiki_page_message_spec.rb b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
index 04c9e5934be..e8672a0f9c8 100644
--- a/spec/models/project_services/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatMessage::WikiPageMessage do
+RSpec.describe Integrations::ChatMessage::WikiPageMessage do
subject { described_class.new(args) }
let(:args) do
diff --git a/spec/models/project_services/confluence_service_spec.rb b/spec/models/integrations/confluence_spec.rb
index 6c7ba2c9f32..c217573f48d 100644
--- a/spec/models/project_services/confluence_service_spec.rb
+++ b/spec/models/integrations/confluence_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ConfluenceService do
+RSpec.describe Integrations::Confluence do
describe 'Associations' do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
diff --git a/spec/models/project_services/datadog_service_spec.rb b/spec/models/integrations/datadog_spec.rb
index d15ea1f351b..165b21840e0 100644
--- a/spec/models/project_services/datadog_service_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -3,7 +3,7 @@ require 'securerandom'
require 'spec_helper'
-RSpec.describe DatadogService, :model do
+RSpec.describe Integrations::Datadog do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:build) { create(:ci_build, project: project) }
diff --git a/spec/models/project_services/emails_on_push_service_spec.rb b/spec/models/integrations/emails_on_push_spec.rb
index c5927503eec..ca060f4155e 100644
--- a/spec/models/project_services/emails_on_push_service_spec.rb
+++ b/spec/models/integrations/emails_on_push_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe EmailsOnPushService do
+RSpec.describe Integrations::EmailsOnPush do
let_it_be(:project) { create_default(:project).freeze }
describe 'Validations' do
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 981245627af..390d1552c16 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -100,7 +100,8 @@ RSpec.describe InternalId do
context 'when executed outside of transaction' do
it 'increments counter with in_transaction: "false"' do
- expect(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+
expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
.with(operation: :generate, usage: 'issues', in_transaction: 'false').and_call_original
@@ -158,7 +159,8 @@ RSpec.describe InternalId do
let(:value) { 2 }
it 'increments counter with in_transaction: "false"' do
- expect(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+
expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
.with(operation: :reset, usage: 'issues', in_transaction: 'false').and_call_original
@@ -228,7 +230,8 @@ RSpec.describe InternalId do
context 'when executed outside of transaction' do
it 'increments counter with in_transaction: "false"' do
- expect(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+
expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
.with(operation: :track_greatest, usage: 'issues', in_transaction: 'false').and_call_original
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 18b0a46c928..49c891c20da 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -80,5 +80,20 @@ RSpec.describe Issue::Metrics do
expect(metrics.first_added_to_board_at).to be_like_time(time)
end
end
+
+ describe "#record!" do
+ it "does not cause an N+1 query" do
+ label = create(:label)
+ subject.update!(label_ids: [label.id])
+
+ control_count = ActiveRecord::QueryRecorder.new { Issue::Metrics.find_by(issue: subject).record! }.count
+
+ additional_labels = create_list(:label, 4)
+
+ subject.update!(label_ids: additional_labels.map(&:id))
+
+ expect { Issue::Metrics.find_by(issue: subject).record! }.not_to exceed_query_limit(control_count)
+ end
+ end
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 23caf3647c3..884c476932e 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -242,16 +242,36 @@ RSpec.describe Issue do
expect { issue.close }.to change { issue.state_id }.from(open_state).to(closed_state)
end
+
+ context 'when an argument is provided' do
+ context 'and the argument is a User' do
+ it 'changes closed_by to the given user' do
+ expect { issue.close(user) }.to change { issue.closed_by }.from(nil).to(user)
+ end
+ end
+
+ context 'and the argument is a not a User' do
+ it 'does not change closed_by' do
+ expect { issue.close("test") }.not_to change { issue.closed_by }
+ end
+ end
+ end
+
+ context 'when an argument is not provided' do
+ it 'does not change closed_by' do
+ expect { issue.close }.not_to change { issue.closed_by }
+ end
+ end
end
describe '#reopen' do
let(:issue) { create(:issue, project: reusable_project, state: 'closed', closed_at: Time.current, closed_by: user) }
- it 'sets closed_at to nil when an issue is reopend' do
+ it 'sets closed_at to nil when an issue is reopened' do
expect { issue.reopen }.to change { issue.closed_at }.to(nil)
end
- it 'sets closed_by to nil when an issue is reopend' do
+ it 'sets closed_by to nil when an issue is reopened' do
expect { issue.reopen }.to change { issue.closed_by }.from(user).to(nil)
end
@@ -297,7 +317,7 @@ RSpec.describe Issue do
end
context 'when cross-project in different namespace' do
- let(:another_namespace) { build(:namespace, path: 'another-namespace') }
+ let(:another_namespace) { build(:namespace, id: non_existing_record_id, path: 'another-namespace') }
let(:another_namespace_project) { build(:project, path: 'another-project', namespace: another_namespace) }
it 'returns complete path to the issue' do
@@ -1121,11 +1141,37 @@ RSpec.describe Issue do
end
context "relative positioning" do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue1) { create(:issue, project: project, relative_position: nil) }
+ let_it_be(:issue2) { create(:issue, project: project, relative_position: nil) }
+
it_behaves_like "a class that supports relative positioning" do
let_it_be(:project) { reusable_project }
let(:factory) { :issue }
let(:default_params) { { project: project } }
end
+
+ it 'is not blocked for repositioning by default' do
+ expect(issue1.blocked_for_repositioning?).to eq(false)
+ end
+
+ context 'when block_issue_repositioning flag is enabled for group' do
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it 'is blocked for repositioning' do
+ expect(issue1.blocked_for_repositioning?).to eq(true)
+ end
+
+ it 'does not move issues with null position' do
+ payload = [issue1, issue2]
+
+ expect { described_class.move_nulls_to_end(payload) }.to raise_error(Gitlab::RelativePositioning::IssuePositioningDisabled)
+ expect { described_class.move_nulls_to_start(payload) }.to raise_error(Gitlab::RelativePositioning::IssuePositioningDisabled)
+ end
+ end
end
it_behaves_like 'versioned description'
diff --git a/spec/models/label_link_spec.rb b/spec/models/label_link_spec.rb
index a95481f3083..e5753b34e72 100644
--- a/spec/models/label_link_spec.rb
+++ b/spec/models/label_link_spec.rb
@@ -12,4 +12,28 @@ RSpec.describe LabelLink do
let(:valid_items_for_bulk_insertion) { build_list(:label_link, 10) }
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
+
+ describe 'scopes' do
+ describe '.for_target' do
+ it 'returns the label links for a given target' do
+ label_link = create(:label_link, target: create(:merge_request))
+
+ create(:label_link, target: create(:issue))
+
+ expect(described_class.for_target(label_link.target_id, label_link.target_type))
+ .to contain_exactly(label_link)
+ end
+ end
+
+ describe '.with_remove_on_close_labels' do
+ it 'responds with label_links that can be removed when an issue is closed' do
+ issue = create(:issue)
+ removable_label = create(:label, project: issue.project, remove_on_close: true)
+ create(:label_link, target: issue)
+ removable_issue_label_link = create(:label_link, label: removable_label, target: issue)
+
+ expect(described_class.with_remove_on_close_labels).to contain_exactly(removable_issue_label_link)
+ end
+ end
+ end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 5f3a67b52ba..247be7654d8 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -143,16 +143,10 @@ RSpec.describe Member do
@blocked_maintainer = project.members.find_by(user_id: @blocked_maintainer_user.id, access_level: Gitlab::Access::MAINTAINER)
@blocked_developer = project.members.find_by(user_id: @blocked_developer_user.id, access_level: Gitlab::Access::DEVELOPER)
- @invited_member = create(:project_member, :developer,
- project: project,
- invite_token: '1234',
- invite_email: 'toto1@example.com')
+ @invited_member = create(:project_member, :invited, :developer, project: project)
accepted_invite_user = build(:user, state: :active)
- @accepted_invite_member = create(:project_member, :developer,
- project: project,
- invite_token: '1234',
- invite_email: 'toto2@example.com')
+ @accepted_invite_member = create(:project_member, :invited, :developer, project: project)
.tap { |u| u.accept_invite!(accepted_invite_user) }
requested_user = create(:user).tap { |u| project.request_access(u) }
@@ -325,12 +319,12 @@ RSpec.describe Member do
describe '.search_invite_email' do
it 'returns only members the matching e-mail' do
- create(:group_member, :invited)
+ invited_member = create(:group_member, :invited, invite_email: 'invited@example.com')
- invited = described_class.search_invite_email(@invited_member.invite_email)
+ invited = described_class.search_invite_email(invited_member.invite_email)
expect(invited.count).to eq(1)
- expect(invited.first).to eq(@invited_member)
+ expect(invited.first).to eq(invited_member)
expect(described_class.search_invite_email('bad-email@example.com').count).to eq(0)
end
@@ -414,6 +408,44 @@ RSpec.describe Member do
it { is_expected.not_to include @member_with_minimal_access }
end
+ describe '.connected_to_user' do
+ subject { described_class.connected_to_user.to_a }
+
+ it { is_expected.to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.to include @accepted_invite_member }
+ it { is_expected.to include @accepted_request_member }
+ it { is_expected.to include @blocked_maintainer }
+ it { is_expected.to include @blocked_developer }
+ it { is_expected.to include @requested_member }
+ it { is_expected.to include @member_with_minimal_access }
+ it { is_expected.not_to include @invited_member }
+ end
+
+ describe '.authorizable' do
+ subject { described_class.authorizable.to_a }
+
+ it 'includes the member who has an associated user record,'\
+ 'but also having an invite_token' do
+ member = create(:project_member,
+ :developer,
+ :invited,
+ user: create(:user))
+
+ expect(subject).to include(member)
+ end
+
+ it { is_expected.to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.to include @accepted_invite_member }
+ it { is_expected.to include @accepted_request_member }
+ it { is_expected.to include @blocked_maintainer }
+ it { is_expected.to include @blocked_developer }
+ it { is_expected.not_to include @invited_member }
+ it { is_expected.not_to include @requested_member }
+ it { is_expected.not_to include @member_with_minimal_access }
+ end
+
describe '.distinct_on_user_with_max_access_level' do
let_it_be(:other_group) { create(:group) }
let_it_be(:member_with_lower_access_level) { create(:group_member, :developer, group: other_group, user: @owner_user) }
@@ -884,7 +916,7 @@ RSpec.describe Member do
user = create(:user)
member = project.add_reporter(user)
- member.destroy
+ member.destroy!
expect(user.authorized_projects).not_to include(project)
end
@@ -901,7 +933,7 @@ RSpec.describe Member do
with_them do
describe 'create member' do
- let!(:source) { create(source_type) }
+ let!(:source) { create(source_type) } # rubocop:disable Rails/SaveBang
subject { create(member_type, :guest, user: user, source: source) }
@@ -913,20 +945,20 @@ RSpec.describe Member do
describe 'update member' do
context 'when access level was changed' do
- subject { member.update(access_level: Gitlab::Access::GUEST) }
+ subject { member.update!(access_level: Gitlab::Access::GUEST) }
include_examples 'update highest role with exclusive lease'
end
context 'when access level was not changed' do
- subject { member.update(notification_level: NotificationSetting.levels[:disabled]) }
+ subject { member.update!(notification_level: NotificationSetting.levels[:disabled]) }
include_examples 'does not update the highest role'
end
end
describe 'destroy member' do
- subject { member.destroy }
+ subject { member.destroy! }
include_examples 'update highest role with exclusive lease'
end
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 908bb9f91a3..3a2db5d8516 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -85,11 +85,11 @@ RSpec.describe GroupMember do
expect(user).to receive(:update_two_factor_requirement)
- group_member.save
+ group_member.save!
expect(user).to receive(:update_two_factor_requirement)
- group_member.destroy
+ group_member.destroy!
end
end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index ce3e86f964d..fa77e319c2c 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -49,13 +49,13 @@ RSpec.describe ProjectMember do
it "creates an expired event when left due to expiry" do
expired = create(:project_member, project: project, expires_at: 1.day.from_now)
- travel_to(2.days.from_now) { expired.destroy }
+ travel_to(2.days.from_now) { expired.destroy! }
expect(Event.recent.first).to be_expired_action
end
it "creates a left event when left due to leave" do
- maintainer.destroy
+ maintainer.destroy!
expect(Event.recent.first).to be_left_action
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 5b11a7bf079..4075eb96fc2 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe MergeRequestDiff do
expect(subject.valid?).to be false
expect(subject.errors.count).to eq 3
- expect(subject.errors).to all(include('is not a valid SHA'))
+ expect(subject.errors.full_messages).to all(include('is not a valid SHA'))
end
it 'does not validate uniqueness by default' do
@@ -61,7 +61,7 @@ RSpec.describe MergeRequestDiff do
let_it_be(:merge_head) do
MergeRequests::MergeToRefService
- .new(merge_request.project, merge_request.author)
+ .new(project: merge_request.project, current_user: merge_request.author)
.execute(merge_request)
merge_request.create_merge_head_diff
@@ -485,27 +485,6 @@ RSpec.describe MergeRequestDiff do
'files/whitespace'
])
end
-
- context 'when sort_diffs feature flag is disabled' do
- before do
- stub_feature_flags(sort_diffs: false)
- end
-
- it 'does not sort diff files directory first' do
- expect(diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options).diff_file_paths).to eq([
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/.DS_Store'
- ])
- end
- end
end
end
@@ -581,37 +560,6 @@ RSpec.describe MergeRequestDiff do
'gitlab-shell'
])
end
-
- context 'when sort_diffs feature flag is disabled' do
- before do
- stub_feature_flags(sort_diffs: false)
- end
-
- it 'does not sort diff files directory first' do
- expect(diff_with_commits.diffs(diff_options).diff_file_paths).to eq([
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/.DS_Store',
- 'files/images/wm.svg',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/whitespace',
- 'foo/bar/.gitkeep',
- 'gitlab-grack',
- 'gitlab-shell',
- 'with space/README.md'
- ])
- end
- end
end
end
@@ -718,40 +666,6 @@ RSpec.describe MergeRequestDiff do
])
end
- context 'when sort_diffs feature flag is disabled' do
- before do
- stub_feature_flags(sort_diffs: false)
- end
-
- it 'persists diff files unsorted by directory first' do
- mr_diff = create(:merge_request).merge_request_diff
- diff_files_paths = mr_diff.merge_request_diff_files.map { |file| file.new_path.presence || file.old_path }
-
- expect(diff_files_paths).to eq([
- '.DS_Store',
- '.gitattributes',
- '.gitignore',
- '.gitmodules',
- 'CHANGELOG',
- 'README',
- 'bar/branch-test.txt',
- 'custom-highlighting/test.gitlab-custom',
- 'encoding/iso8859.txt',
- 'files/.DS_Store',
- 'files/images/wm.svg',
- 'files/js/commit.coffee',
- 'files/lfs/lfs_object.iso',
- 'files/ruby/popen.rb',
- 'files/ruby/regex.rb',
- 'files/whitespace',
- 'foo/bar/.gitkeep',
- 'gitlab-grack',
- 'gitlab-shell',
- 'with space/README.md'
- ])
- end
- end
-
it 'expands collapsed diffs before saving' do
mr_diff = create(:merge_request, source_branch: 'expand-collapse-lines', target_branch: 'master').merge_request_diff
diff_file = mr_diff.merge_request_diff_files.find_by(new_path: 'expand-collapse/file-5.txt')
@@ -1166,5 +1080,9 @@ RSpec.describe MergeRequestDiff do
it 'loads nothing if the merge request has no diff record' do
expect(described_class.latest_diff_for_merge_requests(merge_request_3)).to be_empty
end
+
+ it 'loads nothing if nil was passed as merge_request' do
+ expect(described_class.latest_diff_for_merge_requests(nil)).to be_empty
+ end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 4b46c98117f..a77ca1e9a51 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2255,7 +2255,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
describe '#find_codequality_mr_diff_reports' do
let(:project) { create(:project, :repository) }
- let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project) }
+ let(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project, id: 123456789) }
let(:pipeline) { merge_request.head_pipeline }
subject(:mr_diff_report) { merge_request.find_codequality_mr_diff_reports }
@@ -2628,7 +2628,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when the MR has been merged' do
before do
MergeRequests::MergeService
- .new(subject.target_project, subject.author, { sha: subject.diff_head_sha })
+ .new(project: subject.target_project, current_user: subject.author, params: { sha: subject.diff_head_sha })
.execute(subject)
end
@@ -3876,6 +3876,20 @@ RSpec.describe MergeRequest, factory_default: :keep do
subject { merge_request.use_merge_base_pipeline_for_comparison?(service_class) }
+ context 'when service class is Ci::CompareMetricsReportsService' do
+ let(:service_class) { 'Ci::CompareMetricsReportsService' }
+
+ it { is_expected.to be_truthy }
+
+ context 'with the metrics report flag disabled' do
+ before do
+ stub_feature_flags(merge_base_pipeline_for_metrics_comparison: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
context 'when service class is Ci::CompareCodequalityReportsService' do
let(:service_class) { 'Ci::CompareCodequalityReportsService' }
@@ -4806,7 +4820,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when merge_ref_sha is not present' do
let!(:result) do
MergeRequests::MergeToRefService
- .new(merge_request.project, merge_request.author)
+ .new(project: merge_request.project, current_user: merge_request.author)
.execute(merge_request)
end
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index e611484f5ee..20dee288052 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -293,21 +293,7 @@ RSpec.describe Milestone do
end
end
- context 'when `optimized_timebox_queries` feature flag is enabled' do
- before do
- stub_feature_flags(optimized_timebox_queries: true)
- end
-
- it_behaves_like '#for_projects_and_groups'
- end
-
- context 'when `optimized_timebox_queries` feature flag is disabled' do
- before do
- stub_feature_flags(optimized_timebox_queries: false)
- end
-
- it_behaves_like '#for_projects_and_groups'
- end
+ it_behaves_like '#for_projects_and_groups'
describe '.upcoming_ids' do
let(:group_1) { create(:group) }
diff --git a/spec/models/namespace/package_setting_spec.rb b/spec/models/namespace/package_setting_spec.rb
index 097cef8ef3b..4308c8c06bc 100644
--- a/spec/models/namespace/package_setting_spec.rb
+++ b/spec/models/namespace/package_setting_spec.rb
@@ -14,9 +14,12 @@ RSpec.describe Namespace::PackageSetting do
it { is_expected.to allow_value(true).for(:maven_duplicates_allowed) }
it { is_expected.to allow_value(false).for(:maven_duplicates_allowed) }
it { is_expected.not_to allow_value(nil).for(:maven_duplicates_allowed) }
+ it { is_expected.to allow_value(true).for(:generic_duplicates_allowed) }
+ it { is_expected.to allow_value(false).for(:generic_duplicates_allowed) }
+ it { is_expected.not_to allow_value(nil).for(:generic_duplicates_allowed) }
end
- describe '#maven_duplicate_exception_regex' do
+ describe 'regex values' do
let_it_be(:package_settings) { create(:namespace_package_setting) }
subject { package_settings }
@@ -24,12 +27,14 @@ RSpec.describe Namespace::PackageSetting do
valid_regexps = %w[SNAPSHOT .* v.+ v10.1.* (?:v.+|SNAPSHOT|TEMP)]
invalid_regexps = ['[', '(?:v.+|SNAPSHOT|TEMP']
- valid_regexps.each do |valid_regexp|
- it { is_expected.to allow_value(valid_regexp).for(:maven_duplicate_exception_regex) }
- end
+ [:maven_duplicate_exception_regex, :generic_duplicate_exception_regex].each do |attribute|
+ valid_regexps.each do |valid_regexp|
+ it { is_expected.to allow_value(valid_regexp).for(attribute) }
+ end
- invalid_regexps.each do |invalid_regexp|
- it { is_expected.not_to allow_value(invalid_regexp).for(:maven_duplicate_exception_regex) }
+ invalid_regexps.each do |invalid_regexp|
+ it { is_expected.not_to allow_value(invalid_regexp).for(attribute) }
+ end
end
end
end
@@ -41,8 +46,8 @@ RSpec.describe Namespace::PackageSetting do
context 'package types with package_settings' do
# As more package types gain settings they will be added to this list
- [:maven_package].each do |format|
- let_it_be(:package) { create(format) } # rubocop:disable Rails/SaveBang
+ [:maven_package, :generic_package].each do |format|
+ let_it_be(:package) { create(format, name: 'foo', version: 'beta') } # rubocop:disable Rails/SaveBang
let_it_be(:package_type) { package.package_type }
let_it_be(:package_setting) { package.project.namespace.package_settings }
@@ -50,6 +55,8 @@ RSpec.describe Namespace::PackageSetting do
true | '' | true
false | '' | false
false | '.*' | true
+ false | 'fo.*' | true
+ false | 'be.*' | true
end
with_them do
@@ -68,7 +75,7 @@ RSpec.describe Namespace::PackageSetting do
end
context 'package types without package_settings' do
- [:npm_package, :conan_package, :nuget_package, :pypi_package, :composer_package, :generic_package, :golang_package, :debian_package].each do |format|
+ [:npm_package, :conan_package, :nuget_package, :pypi_package, :composer_package, :golang_package, :debian_package].each do |format|
let_it_be(:package) { create(format) } # rubocop:disable Rails/SaveBang
let_it_be(:package_setting) { package.project.namespace.package_settings }
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index b166d541171..2cd66f42458 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -43,16 +43,6 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
end
end
- shared_examples 'locked update query' do
- it 'locks query with FOR UPDATE' do
- qr = ActiveRecord::QueryRecorder.new do
- subject
- end
- expect(qr.count).to eq 1
- expect(qr.log.first).to match /FOR UPDATE/
- end
- end
-
describe '#incorrect_traversal_ids' do
let!(:hierarchy) { described_class.new(root) }
@@ -63,12 +53,6 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
end
it { is_expected.to match_array Namespace.all }
-
- context 'when lock is true' do
- subject { hierarchy.incorrect_traversal_ids(lock: true).load }
-
- it_behaves_like 'locked update query'
- end
end
describe '#sync_traversal_ids!' do
@@ -79,14 +63,18 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
it { expect(hierarchy.incorrect_traversal_ids).to be_empty }
it_behaves_like 'hierarchy with traversal_ids'
- it_behaves_like 'locked update query'
+ it_behaves_like 'locked row' do
+ let(:recorded_queries) { ActiveRecord::QueryRecorder.new }
+ let(:row) { root }
- context 'when deadlocked' do
before do
- connection_double = double(:connection)
+ recorded_queries.record { subject }
+ end
+ end
- allow(Namespace).to receive(:connection).and_return(connection_double)
- allow(connection_double).to receive(:exec_query) { raise ActiveRecord::Deadlocked.new }
+ context 'when deadlocked' do
+ before do
+ allow(root).to receive(:lock!) { raise ActiveRecord::Deadlocked }
end
it { expect { subject }.to raise_error(ActiveRecord::Deadlocked) }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 96ecc9836d4..56afe49e15f 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -141,7 +141,7 @@ RSpec.describe Namespace do
end
it 'allows updating other attributes for existing record' do
- namespace = build(:namespace, path: 'j')
+ namespace = build(:namespace, path: 'j', owner: create(:user))
namespace.save(validate: false)
namespace.reload
@@ -212,6 +212,54 @@ RSpec.describe Namespace do
end
end
+ describe "after_commit :expire_child_caches" do
+ let(:namespace) { create(:group) }
+
+ it "expires the child caches when updated" do
+ child_1 = create(:group, parent: namespace, updated_at: 1.week.ago)
+ child_2 = create(:group, parent: namespace, updated_at: 1.day.ago)
+ grandchild = create(:group, parent: child_1, updated_at: 1.week.ago)
+ project_1 = create(:project, namespace: namespace, updated_at: 2.days.ago)
+ project_2 = create(:project, namespace: child_1, updated_at: 3.days.ago)
+ project_3 = create(:project, namespace: grandchild, updated_at: 4.years.ago)
+
+ freeze_time do
+ namespace.update!(path: "foo")
+
+ [namespace, child_1, child_2, grandchild, project_1, project_2, project_3].each do |record|
+ expect(record.reload.updated_at).to eq(Time.zone.now)
+ end
+ end
+ end
+
+ it "expires on name changes" do
+ expect(namespace).to receive(:expire_child_caches).once
+
+ namespace.update!(name: "Foo")
+ end
+
+ it "expires on path changes" do
+ expect(namespace).to receive(:expire_child_caches).once
+
+ namespace.update!(path: "bar")
+ end
+
+ it "expires on parent changes" do
+ expect(namespace).to receive(:expire_child_caches).once
+
+ namespace.update!(parent: create(:group))
+ end
+
+ it "doesn't expire on other field changes" do
+ expect(namespace).not_to receive(:expire_child_caches)
+
+ namespace.update!(
+ description: "Foo bar",
+ max_artifacts_size: 10
+ )
+ end
+ end
+
describe '#visibility_level_field' do
it { expect(namespace.visibility_level_field).to eq(:visibility_level) }
end
@@ -224,6 +272,41 @@ RSpec.describe Namespace do
it { expect(namespace.human_name).to eq(namespace.owner_name) }
end
+ describe '#any_project_has_container_registry_tags?' do
+ subject { namespace.any_project_has_container_registry_tags? }
+
+ let!(:project_without_registry) { create(:project, namespace: namespace) }
+
+ context 'without tags' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'with tags' do
+ before do
+ repositories = create_list(:container_repository, 3)
+ create(:project, namespace: namespace, container_repositories: repositories)
+
+ stub_container_registry_config(enabled: true)
+ end
+
+ it 'finds tags' do
+ stub_container_registry_tags(repository: :any, tags: ['tag'])
+
+ is_expected.to be_truthy
+ end
+
+ it 'does not cause N+1 query in fetching registries' do
+ stub_container_registry_tags(repository: :any, tags: [])
+ control_count = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }.count
+
+ other_repositories = create_list(:container_repository, 2)
+ create(:project, namespace: namespace, container_repositories: other_repositories)
+
+ expect { namespace.any_project_has_container_registry_tags? }.not_to exceed_query_limit(control_count + 1)
+ end
+ end
+ end
+
describe '#first_project_with_container_registry_tags' do
let(:container_repository) { create(:container_repository) }
let!(:project) { create(:project, namespace: namespace, container_repositories: [container_repository]) }
@@ -880,7 +963,7 @@ RSpec.describe Namespace do
end
describe '#use_traversal_ids?' do
- let_it_be(:namespace) { build(:namespace) }
+ let_it_be(:namespace, reload: true) { create(:namespace) }
subject { namespace.use_traversal_ids? }
@@ -901,30 +984,6 @@ RSpec.describe Namespace do
end
end
- context 'when use_traversal_ids feature flag is true' do
- it_behaves_like 'namespace traversal'
-
- describe '#self_and_descendants' do
- subject { namespace.self_and_descendants }
-
- it { expect(subject.to_sql).to include 'traversal_ids @>' }
- end
- end
-
- context 'when use_traversal_ids feature flag is false' do
- before do
- stub_feature_flags(use_traversal_ids: false)
- end
-
- it_behaves_like 'namespace traversal'
-
- describe '#self_and_descendants' do
- subject { namespace.self_and_descendants }
-
- it { expect(subject.to_sql).not_to include 'traversal_ids @>' }
- end
- end
-
describe '#users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 992b2246f01..4eabc266b40 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -1384,6 +1384,16 @@ RSpec.describe Note do
expect(notes.second.id).to eq(note2.id)
end
end
+
+ describe '.with_suggestions' do
+ it 'returns the correct note' do
+ note_with_suggestion = create(:note, suggestions: [create(:suggestion)])
+ note_without_suggestion = create(:note)
+
+ expect(described_class.with_suggestions).to include(note_with_suggestion)
+ expect(described_class.with_suggestions).not_to include(note_without_suggestion)
+ end
+ end
end
describe '#noteable_assignee_or_author?' do
diff --git a/spec/models/packages/dependency_spec.rb b/spec/models/packages/dependency_spec.rb
index 4437cad46cd..1575dec98c9 100644
--- a/spec/models/packages/dependency_spec.rb
+++ b/spec/models/packages/dependency_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Packages::Dependency, type: :model do
let_it_be(:package_dependency1) { create(:packages_dependency, name: 'foo', version_pattern: '~1.0.0') }
let_it_be(:package_dependency2) { create(:packages_dependency, name: 'bar', version_pattern: '~2.5.0') }
let_it_be(:expected_ids) { [package_dependency1.id, package_dependency2.id] }
+
let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2) }
let(:chunk_size) { 50 }
let(:rows_limit) { 50 }
@@ -40,6 +41,7 @@ RSpec.describe Packages::Dependency, type: :model do
context 'with a name bigger than column size' do
let_it_be(:big_name) { 'a' * (Packages::Dependency::MAX_STRING_LENGTH + 1) }
+
let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2).merge(big_name => '~1.0.0') }
it { is_expected.to match_array(expected_ids) }
@@ -47,6 +49,7 @@ RSpec.describe Packages::Dependency, type: :model do
context 'with a version pattern bigger than column size' do
let_it_be(:big_version_pattern) { 'a' * (Packages::Dependency::MAX_STRING_LENGTH + 1) }
+
let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2).merge('test' => big_version_pattern) }
it { is_expected.to match_array(expected_ids) }
@@ -65,6 +68,7 @@ RSpec.describe Packages::Dependency, type: :model do
let_it_be(:package_dependency5) { create(:packages_dependency, name: 'foo5', version_pattern: '~1.5.5') }
let_it_be(:package_dependency6) { create(:packages_dependency, name: 'foo6', version_pattern: '~1.5.6') }
let_it_be(:package_dependency7) { create(:packages_dependency, name: 'foo7', version_pattern: '~1.5.7') }
+
let(:expected_ids) { [package_dependency1.id, package_dependency2.id, package_dependency3.id, package_dependency4.id, package_dependency5.id, package_dependency6.id, package_dependency7.id] }
let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2, package_dependency3, package_dependency4, package_dependency5, package_dependency6, package_dependency7) }
@@ -86,6 +90,7 @@ RSpec.describe Packages::Dependency, type: :model do
let_it_be(:package_dependency1) { create(:packages_dependency, name: 'foo', version_pattern: '~1.0.0') }
let_it_be(:package_dependency2) { create(:packages_dependency, name: 'bar', version_pattern: '~2.5.0') }
let_it_be(:expected_array) { [package_dependency1, package_dependency2] }
+
let(:names_and_version_patterns) { build_names_and_version_patterns(package_dependency1, package_dependency2) }
subject { Packages::Dependency.for_package_names_and_version_patterns(names_and_version_patterns) }
diff --git a/spec/models/packages/go/module_version_spec.rb b/spec/models/packages/go/module_version_spec.rb
index 7fa416d8537..cace2160878 100644
--- a/spec/models/packages/go/module_version_spec.rb
+++ b/spec/models/packages/go/module_version_spec.rb
@@ -32,16 +32,19 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
describe '#name' do
context 'with ref and name specified' do
let_it_be(:version) { create :go_module_version, mod: mod, name: 'foobar', commit: project.repository.head_commit, ref: project.repository.find_tag('v1.0.0') }
+
it('returns that name') { expect(version.name).to eq('foobar') }
end
context 'with ref specified and name unspecified' do
let_it_be(:version) { create :go_module_version, mod: mod, commit: project.repository.head_commit, ref: project.repository.find_tag('v1.0.0') }
+
it('returns the name of the ref') { expect(version.name).to eq('v1.0.0') }
end
context 'with ref and name unspecified' do
let_it_be(:version) { create :go_module_version, mod: mod, commit: project.repository.head_commit }
+
it('returns nil') { expect(version.name).to eq(nil) }
end
end
@@ -49,11 +52,13 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
describe '#gomod' do
context 'with go.mod missing' do
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.0' }
+
it('returns nil') { expect(version.gomod).to eq(nil) }
end
context 'with go.mod present' do
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.1' }
+
it('returns the contents of go.mod') { expect(version.gomod).to eq("module #{mod.name}\n") }
end
end
@@ -62,6 +67,7 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
context 'with a root module' do
context 'with an empty module path' do
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.2' }
+
it_behaves_like '#files', 'all the files', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
end
end
@@ -69,12 +75,14 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
context 'with a root module and a submodule' do
context 'with an empty module path' do
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+
it_behaves_like '#files', 'files excluding the submodule', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
end
context 'with the submodule\'s path' do
let_it_be(:mod) { create :go_module, project: project, path: 'mod' }
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+
it_behaves_like '#files', 'the submodule\'s files', 'mod/go.mod', 'mod/a.go'
end
end
@@ -84,6 +92,7 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
context 'with a root module' do
context 'with an empty module path' do
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.2' }
+
it_behaves_like '#archive', 'all the files', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
end
end
@@ -91,12 +100,14 @@ RSpec.describe Packages::Go::ModuleVersion, type: :model do
context 'with a root module and a submodule' do
context 'with an empty module path' do
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+
it_behaves_like '#archive', 'files excluding the submodule', 'README.md', 'go.mod', 'a.go', 'pkg/b.go'
end
context 'with the submodule\'s path' do
let_it_be(:mod) { create :go_module, project: project, path: 'mod' }
let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.3' }
+
it_behaves_like '#archive', 'the submodule\'s files', 'go.mod', 'a.go'
end
end
diff --git a/spec/models/packages/helm/file_metadatum_spec.rb b/spec/models/packages/helm/file_metadatum_spec.rb
new file mode 100644
index 00000000000..c7c17b157e4
--- /dev/null
+++ b/spec/models/packages/helm/file_metadatum_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Helm::FileMetadatum, type: :model do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:package_file) }
+ end
+
+ describe 'validations' do
+ describe '#package_file' do
+ it { is_expected.to validate_presence_of(:package_file) }
+ end
+
+ describe '#valid_helm_package_type' do
+ let_it_be_with_reload(:helm_package_file) { create(:helm_package_file) }
+
+ let(:helm_file_metadatum) { helm_package_file.helm_file_metadatum }
+
+ before do
+ helm_package_file.package.package_type = :pypi
+ end
+
+ it 'validates package of type helm' do
+ expect(helm_file_metadatum).not_to be_valid
+ expect(helm_file_metadatum.errors.to_a).to contain_exactly('Package file Package type must be Helm')
+ end
+ end
+
+ describe '#channel' do
+ it 'validates #channel', :aggregate_failures do
+ is_expected.to validate_presence_of(:channel)
+
+ is_expected.to allow_value('a' * 63).for(:channel)
+ is_expected.not_to allow_value('a' * 64).for(:channel)
+
+ is_expected.to allow_value('release').for(:channel)
+ is_expected.to allow_value('my-repo').for(:channel)
+ is_expected.to allow_value('my-repo42').for(:channel)
+
+ # Do not allow empty
+ is_expected.not_to allow_value('').for(:channel)
+
+ # Do not allow Unicode
+ is_expected.not_to allow_value('hé').for(:channel)
+ end
+ end
+
+ describe '#metadata' do
+ it 'validates #metadata', :aggregate_failures do
+ is_expected.not_to validate_presence_of(:metadata)
+ is_expected.to allow_value({ 'name': 'foo', 'version': 'v1.0', 'apiVersion': 'v2' }).for(:metadata)
+ is_expected.not_to allow_value({}).for(:metadata)
+ is_expected.not_to allow_value({ 'version': 'v1.0', 'apiVersion': 'v2' }).for(:metadata)
+ is_expected.not_to allow_value({ 'name': 'foo', 'apiVersion': 'v2' }).for(:metadata)
+ is_expected.not_to allow_value({ 'name': 'foo', 'version': 'v1.0' }).for(:metadata)
+ end
+ end
+ end
+end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 9cf998a0639..f8ddd59ddc8 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -2,12 +2,18 @@
require 'spec_helper'
RSpec.describe Packages::PackageFile, type: :model do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package_file1) { create(:package_file, :xml, file_name: 'FooBar') }
+ let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
+ let_it_be(:debian_package) { create(:debian_package, project: project) }
+
describe 'relationships' do
it { is_expected.to belong_to(:package) }
it { is_expected.to have_one(:conan_file_metadatum) }
it { is_expected.to have_many(:package_file_build_infos).inverse_of(:package_file) }
it { is_expected.to have_many(:pipelines).through(:package_file_build_infos) }
it { is_expected.to have_one(:debian_file_metadatum).inverse_of(:package_file).class_name('Packages::Debian::FileMetadatum') }
+ it { is_expected.to have_one(:helm_file_metadatum).inverse_of(:package_file).class_name('Packages::Helm::FileMetadatum') }
end
describe 'validations' do
@@ -15,9 +21,6 @@ RSpec.describe Packages::PackageFile, type: :model do
end
context 'with package filenames' do
- let_it_be(:package_file1) { create(:package_file, :xml, file_name: 'FooBar') }
- let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
-
describe '.with_file_name' do
let(:filename) { 'FooBar' }
@@ -51,6 +54,13 @@ RSpec.describe Packages::PackageFile, type: :model do
end
end
+ describe '.for_package_ids' do
+ it 'returns matching packages' do
+ expect(described_class.for_package_ids([package_file1.package.id, package_file2.package.id]))
+ .to contain_exactly(package_file1, package_file2)
+ end
+ end
+
describe '.with_conan_package_reference' do
let_it_be(:non_matching_package_file) { create(:package_file, :nuget) }
let_it_be(:metadatum) { create(:conan_file_metadatum, :package_file) }
@@ -63,7 +73,6 @@ RSpec.describe Packages::PackageFile, type: :model do
end
describe '.for_rubygem_with_file_name' do
- let_it_be(:project) { create(:project) }
let_it_be(:non_ruby_package) { create(:nuget_package, project: project, package_type: :nuget) }
let_it_be(:ruby_package) { create(:rubygems_package, project: project, package_type: :rubygems) }
let_it_be(:file_name) { 'other.gem' }
@@ -77,6 +86,36 @@ RSpec.describe Packages::PackageFile, type: :model do
end
end
+ context 'Debian scopes' do
+ let_it_be(:debian_changes) { debian_package.package_files.last }
+ let_it_be(:debian_deb) { create(:debian_package_file, package: debian_package)}
+ let_it_be(:debian_udeb) { create(:debian_package_file, :udeb, package: debian_package)}
+
+ let_it_be(:debian_contrib) do
+ create(:debian_package_file, package: debian_package).tap do |pf|
+ pf.debian_file_metadatum.update!(component: 'contrib')
+ end
+ end
+
+ let_it_be(:debian_mipsel) do
+ create(:debian_package_file, package: debian_package).tap do |pf|
+ pf.debian_file_metadatum.update!(architecture: 'mipsel')
+ end
+ end
+
+ describe '#with_debian_file_type' do
+ it { expect(described_class.with_debian_file_type(:changes)).to contain_exactly(debian_changes) }
+ end
+
+ describe '#with_debian_component_name' do
+ it { expect(described_class.with_debian_component_name('contrib')).to contain_exactly(debian_contrib) }
+ end
+
+ describe '#with_debian_architecture_name' do
+ it { expect(described_class.with_debian_architecture_name('mipsel')).to contain_exactly(debian_mipsel) }
+ end
+ end
+
describe '#update_file_store callback' do
let_it_be(:package_file) { build(:package_file, :nuget, size: nil) }
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index cf52749a186..52ef61e3d44 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Packages::Package, type: :model do
describe '.sort_by_attribute' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group, name: 'project A') }
+
let!(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
let!(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
@@ -113,18 +114,6 @@ RSpec.describe Packages::Package, type: :model do
expect(subject).to match_array([package1, package2])
end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it 'returns package1 and package2' do
- expect(projects).to receive(:any?).and_call_original
-
- expect(subject).to match_array([package1, package2])
- end
- end
end
describe 'validations' do
@@ -184,6 +173,15 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value('!!().for(:name)().for(:name)').for(:name) }
end
+ context 'helm package' do
+ subject { build(:helm_package) }
+
+ it { is_expected.to allow_value('prometheus').for(:name) }
+ it { is_expected.to allow_value('rook-ceph').for(:name) }
+ it { is_expected.not_to allow_value('a+b').for(:name) }
+ it { is_expected.not_to allow_value('Hé').for(:name) }
+ end
+
context 'nuget package' do
subject { build_stubbed(:nuget_package) }
@@ -210,6 +208,19 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value("@scope%2e%2e%fpackage").for(:name) }
it { is_expected.not_to allow_value("@scope/sub/package").for(:name) }
end
+
+ context 'terraform module package' do
+ subject { build_stubbed(:terraform_module_package) }
+
+ it { is_expected.to allow_value('my-module/my-system').for(:name) }
+ it { is_expected.to allow_value('my/module').for(:name) }
+ it { is_expected.not_to allow_value('my-module').for(:name) }
+ it { is_expected.not_to allow_value('My-Module').for(:name) }
+ it { is_expected.not_to allow_value('my_module').for(:name) }
+ it { is_expected.not_to allow_value('my.module').for(:name) }
+ it { is_expected.not_to allow_value('../../../my-module').for(:name) }
+ it { is_expected.not_to allow_value('%2e%2e%2fmy-module').for(:name) }
+ end
end
describe '#version' do
@@ -387,7 +398,17 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value(nil).for(:version) }
end
+ context 'helm package' do
+ subject { build_stubbed(:helm_package) }
+
+ it { is_expected.not_to allow_value(nil).for(:version) }
+ it { is_expected.not_to allow_value('').for(:version) }
+ it { is_expected.to allow_value('v1.2.3').for(:version) }
+ it { is_expected.not_to allow_value('1.2.3').for(:version) }
+ end
+
it_behaves_like 'validating version to be SemVer compliant for', :npm_package
+ it_behaves_like 'validating version to be SemVer compliant for', :terraform_module_package
context 'nuget package' do
it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
@@ -485,6 +506,26 @@ RSpec.describe Packages::Package, type: :model do
end
end
+ describe '.with_package_type' do
+ let!(:package1) { create(:terraform_module_package) }
+ let!(:package2) { create(:npm_package) }
+ let(:package_type) { :terraform_module }
+
+ subject { described_class.with_package_type(package_type) }
+
+ it { is_expected.to eq([package1]) }
+ end
+
+ describe '.without_package_type' do
+ let!(:package1) { create(:npm_package) }
+ let!(:package2) { create(:terraform_module_package) }
+ let(:package_type) { :terraform_module }
+
+ subject { described_class.without_package_type(package_type) }
+
+ it { is_expected.to eq([package1]) }
+ end
+
context 'version scopes' do
let!(:package1) { create(:npm_package, version: '1.0.0') }
let!(:package2) { create(:npm_package, version: '1.0.1') }
@@ -565,22 +606,6 @@ RSpec.describe Packages::Package, type: :model do
end
end
- describe '.processed' do
- let!(:package1) { create(:nuget_package) }
- let!(:package2) { create(:npm_package) }
- let!(:package3) { create(:nuget_package) }
-
- subject { described_class.processed }
-
- it { is_expected.to match_array([package1, package2, package3]) }
-
- context 'with temporary packages' do
- let!(:package1) { create(:nuget_package, name: Packages::Nuget::TEMPORARY_PACKAGE_NAME) }
-
- it { is_expected.to match_array([package2, package3]) }
- end
- end
-
describe '.limit_recent' do
let!(:package1) { create(:nuget_package) }
let!(:package2) { create(:nuget_package) }
@@ -653,27 +678,37 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to match_array([pypi_package]) }
end
- describe '.displayable' do
+ context 'status scopes' do
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
let_it_be(:processing_package) { create(:maven_package, :processing) }
let_it_be(:error_package) { create(:maven_package, :error) }
- subject { described_class.displayable }
+ describe '.displayable' do
+ subject { described_class.displayable }
- it 'does not include non-displayable packages', :aggregate_failures do
- is_expected.to include(error_package)
- is_expected.not_to include(hidden_package)
- is_expected.not_to include(processing_package)
+ it 'does not include non-displayable packages', :aggregate_failures do
+ is_expected.to include(error_package)
+ is_expected.not_to include(hidden_package)
+ is_expected.not_to include(processing_package)
+ end
end
- end
- describe '.with_status' do
- let_it_be(:hidden_package) { create(:maven_package, :hidden) }
+ describe '.installable' do
+ subject { described_class.installable }
- subject { described_class.with_status(:hidden) }
+ it 'does not include non-displayable packages', :aggregate_failures do
+ is_expected.not_to include(error_package)
+ is_expected.not_to include(hidden_package)
+ is_expected.not_to include(processing_package)
+ end
+ end
+
+ describe '.with_status' do
+ subject { described_class.with_status(:hidden) }
- it 'returns packages with specified status' do
- is_expected.to match_array([hidden_package])
+ it 'returns packages with specified status' do
+ is_expected.to match_array([hidden_package])
+ end
end
end
end
@@ -896,6 +931,7 @@ RSpec.describe Packages::Package, type: :model do
let_it_be(:package_name) { 'composer-package-name' }
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json } ) }
+
let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
before do
diff --git a/spec/models/packages/tag_spec.rb b/spec/models/packages/tag_spec.rb
index 18ec99c3d51..842ba7ad518 100644
--- a/spec/models/packages/tag_spec.rb
+++ b/spec/models/packages/tag_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe Packages::Tag, type: :model do
let_it_be(:tag1) { create(:packages_tag, package: package, name: 'tag1') }
let_it_be(:tag2) { create(:packages_tag, package: package, name: 'tag2') }
let_it_be(:tag3) { create(:packages_tag, package: package, name: 'tag3') }
+
let(:name) { 'tag1' }
subject { described_class.with_name(name) }
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index f2659771a49..735f2225c21 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -47,20 +47,13 @@ RSpec.describe Pages::LookupPath do
describe '#source' do
let(:source) { lookup_path.source }
- shared_examples 'uses disk storage' do
- it 'uses disk storage', :aggregate_failures do
- expect(source[:type]).to eq('file')
- expect(source[:path]).to eq(project.full_path + "/public/")
- end
+ it 'uses disk storage', :aggregate_failures do
+ expect(source[:type]).to eq('file')
+ expect(source[:path]).to eq(project.full_path + "/public/")
end
- include_examples 'uses disk storage'
-
- it 'return nil when legacy storage is disabled and there is no deployment' do
- stub_feature_flags(pages_serve_from_legacy_storage: false)
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(described_class::LegacyStorageDisabledError, project_id: project.id)
- .and_call_original
+ it 'return nil when local storage is disabled and there is no deployment' do
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(source).to eq(nil)
end
@@ -107,14 +100,6 @@ RSpec.describe Pages::LookupPath do
)
end
end
-
- context 'when pages_serve_with_zip_file_protocol feature flag is disabled' do
- before do
- stub_feature_flags(pages_serve_with_zip_file_protocol: false)
- end
-
- include_examples 'uses disk storage'
- end
end
context 'when deployment were created during migration' do
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 4259c8b708b..b8c723b3847 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -210,6 +210,7 @@ RSpec.describe PlanLimits do
ci_active_jobs
storage_size_limit
daily_invites
+ web_hook_calls
] + disabled_max_artifact_size_columns
end
diff --git a/spec/models/project_auto_devops_spec.rb b/spec/models/project_auto_devops_spec.rb
index 8313879114f..d5f0b66b210 100644
--- a/spec/models/project_auto_devops_spec.rb
+++ b/spec/models/project_auto_devops_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe ProjectAutoDevops do
it 'does not create a gitlab deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.not_to change { DeployToken.count }
end
end
@@ -80,7 +80,7 @@ RSpec.describe ProjectAutoDevops do
it 'creates a gitlab deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.to change { DeployToken.count }.by(1)
end
end
@@ -90,7 +90,7 @@ RSpec.describe ProjectAutoDevops do
it 'creates a gitlab deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.to change { DeployToken.count }.by(1)
end
end
@@ -101,7 +101,7 @@ RSpec.describe ProjectAutoDevops do
it 'creates a deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.to change { DeployToken.count }.by(1)
end
end
@@ -114,7 +114,7 @@ RSpec.describe ProjectAutoDevops do
allow(Gitlab::CurrentSettings).to receive(:auto_devops_enabled?).and_return(true)
expect do
- auto_devops.save
+ auto_devops.save!
end.to change { DeployToken.count }.by(1)
end
end
@@ -125,7 +125,7 @@ RSpec.describe ProjectAutoDevops do
it 'does not create a deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.not_to change { DeployToken.count }
end
end
@@ -137,7 +137,7 @@ RSpec.describe ProjectAutoDevops do
it 'does not create a deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.not_to change { DeployToken.count }
end
end
@@ -149,7 +149,7 @@ RSpec.describe ProjectAutoDevops do
it 'does not create a deploy token' do
expect do
- auto_devops.save
+ auto_devops.save!
end.not_to change { DeployToken.count }
end
end
diff --git a/spec/models/project_feature_spec.rb b/spec/models/project_feature_spec.rb
index a56018f0fee..3fd7e57a5db 100644
--- a/spec/models/project_feature_spec.rb
+++ b/spec/models/project_feature_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe ProjectFeature do
context 'repository related features' do
before do
- project.project_feature.update(
+ project.project_feature.update!(
merge_requests_access_level: ProjectFeature::DISABLED,
builds_access_level: ProjectFeature::DISABLED,
repository_access_level: ProjectFeature::PRIVATE
diff --git a/spec/models/project_services/chat_notification_service_spec.rb b/spec/models/project_services/chat_notification_service_spec.rb
index 476d99364b6..62f97873a06 100644
--- a/spec/models/project_services/chat_notification_service_spec.rb
+++ b/spec/models/project_services/chat_notification_service_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe ChatNotificationService do
it { is_expected.to validate_presence_of :webhook }
end
+ describe 'validations' do
+ it { is_expected.to validate_inclusion_of(:labels_to_be_notified_behavior).in_array(%w[match_any match_all]).allow_blank }
+ end
+
describe '#can_test?' do
context 'with empty repository' do
it 'returns true' do
@@ -32,8 +36,9 @@ RSpec.describe ChatNotificationService do
describe '#execute' do
subject(:chat_service) { described_class.new }
+ let_it_be(:project) { create(:project, :repository) }
+
let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
let(:webhook_url) { 'https://example.gitlab.com/' }
let(:data) { Gitlab::DataBuilder::Push.build_sample(subject.project, user) }
@@ -76,9 +81,12 @@ RSpec.describe ChatNotificationService do
end
context 'when the data object has a label' do
- let(:label) { create(:label, project: project, name: 'Bug')}
- let(:issue) { create(:labeled_issue, project: project, labels: [label]) }
- let(:note) { create(:note, noteable: issue, project: project)}
+ let_it_be(:label) { create(:label, name: 'Bug') }
+ let_it_be(:label_2) { create(:label, name: 'Community contribution') }
+ let_it_be(:label_3) { create(:label, name: 'Backend') }
+ let_it_be(:issue) { create(:labeled_issue, project: project, labels: [label, label_2, label_3]) }
+ let_it_be(:note) { create(:note, noteable: issue, project: project) }
+
let(:data) { Gitlab::DataBuilder::Note.build(note, user) }
it 'notifies the chat service' do
@@ -87,23 +95,139 @@ RSpec.describe ChatNotificationService do
chat_service.execute(data)
end
- context 'and the chat_service has a label filter that does not matches the label' do
- subject(:chat_service) { described_class.new(labels_to_be_notified: '~some random label') }
+ shared_examples 'notifies the chat service' do
+ specify do
+ expect(chat_service).to receive(:notify).with(any_args)
+
+ chat_service.execute(data)
+ end
+ end
- it 'does not notify the chat service' do
- expect(chat_service).not_to receive(:notify)
+ shared_examples 'does not notify the chat service' do
+ specify do
+ expect(chat_service).not_to receive(:notify).with(any_args)
chat_service.execute(data)
end
end
- context 'and the chat_service has a label filter that matches the label' do
- subject(:chat_service) { described_class.new(labels_to_be_notified: '~Backend, ~Bug') }
+ context 'when labels_to_be_notified_behavior is not defined' do
+ subject(:chat_service) { described_class.new(labels_to_be_notified: label_filter) }
- it 'notifies the chat service' do
- expect(chat_service).to receive(:notify).with(any_args)
+ context 'no matching labels' do
+ let(:label_filter) { '~some random label' }
- chat_service.execute(data)
+ it_behaves_like 'does not notify the chat service'
+ end
+
+ context 'only one label matches' do
+ let(:label_filter) { '~some random label, ~Bug' }
+
+ it_behaves_like 'notifies the chat service'
+ end
+ end
+
+ context 'when labels_to_be_notified_behavior is blank' do
+ subject(:chat_service) { described_class.new(labels_to_be_notified: label_filter, labels_to_be_notified_behavior: '') }
+
+ context 'no matching labels' do
+ let(:label_filter) { '~some random label' }
+
+ it_behaves_like 'does not notify the chat service'
+ end
+
+ context 'only one label matches' do
+ let(:label_filter) { '~some random label, ~Bug' }
+
+ it_behaves_like 'notifies the chat service'
+ end
+ end
+
+ context 'when labels_to_be_notified_behavior is match_any' do
+ subject(:chat_service) do
+ described_class.new(
+ labels_to_be_notified: label_filter,
+ labels_to_be_notified_behavior: 'match_any'
+ )
+ end
+
+ context 'no label filter' do
+ let(:label_filter) { nil }
+
+ it_behaves_like 'notifies the chat service'
+ end
+
+ context 'no matching labels' do
+ let(:label_filter) { '~some random label' }
+
+ it_behaves_like 'does not notify the chat service'
+ end
+
+ context 'only one label matches' do
+ let(:label_filter) { '~some random label, ~Bug' }
+
+ it_behaves_like 'notifies the chat service'
+ end
+ end
+
+ context 'when labels_to_be_notified_behavior is match_all' do
+ subject(:chat_service) do
+ described_class.new(
+ labels_to_be_notified: label_filter,
+ labels_to_be_notified_behavior: 'match_all'
+ )
+ end
+
+ context 'no label filter' do
+ let(:label_filter) { nil }
+
+ it_behaves_like 'notifies the chat service'
+ end
+
+ context 'no matching labels' do
+ let(:label_filter) { '~some random label' }
+
+ it_behaves_like 'does not notify the chat service'
+ end
+
+ context 'only one label matches' do
+ let(:label_filter) { '~some random label, ~Bug' }
+
+ it_behaves_like 'does not notify the chat service'
+ end
+
+ context 'labels matches exactly' do
+ let(:label_filter) { '~Bug, ~Backend, ~Community contribution' }
+
+ it_behaves_like 'notifies the chat service'
+ end
+
+ context 'labels matches but object has more' do
+ let(:label_filter) { '~Bug, ~Backend' }
+
+ it_behaves_like 'notifies the chat service'
+ end
+
+ context 'labels are distributed on multiple objects' do
+ let(:label_filter) { '~Bug, ~Backend' }
+ let(:data) do
+ Gitlab::DataBuilder::Note.build(note, user).merge({
+ issue: {
+ labels: [
+ { title: 'Bug' }
+ ]
+ },
+ merge_request: {
+ labels: [
+ {
+ title: 'Backend'
+ }
+ ]
+ }
+ })
+ end
+
+ it_behaves_like 'does not notify the chat service'
end
end
end
diff --git a/spec/models/project_services/data_fields_spec.rb b/spec/models/project_services/data_fields_spec.rb
index 9a3042f9f8d..d3e6afe4978 100644
--- a/spec/models/project_services/data_fields_spec.rb
+++ b/spec/models/project_services/data_fields_spec.rb
@@ -138,8 +138,8 @@ RSpec.describe DataFields do
context 'when data are stored in both properties and data_fields' do
let(:service) do
- create(:jira_service, :without_properties_callback, active: false, properties: properties).tap do |service|
- create(:jira_tracker_data, properties.merge(service: service))
+ create(:jira_service, :without_properties_callback, active: false, properties: properties).tap do |integration|
+ create(:jira_tracker_data, properties.merge(integration: integration))
end
end
diff --git a/spec/models/project_services/hipchat_service_spec.rb b/spec/models/project_services/hipchat_service_spec.rb
index 82a4cde752b..42368c31ba0 100644
--- a/spec/models/project_services/hipchat_service_spec.rb
+++ b/spec/models/project_services/hipchat_service_spec.rb
@@ -2,91 +2,35 @@
require 'spec_helper'
+# HipchatService is partially removed and it will be remove completely
+# after the deletion of all the database records.
+# https://gitlab.com/gitlab-org/gitlab/-/issues/27954
RSpec.describe HipchatService do
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
+ let_it_be(:project) { create(:project) }
- describe 'Validations' do
- context 'when service is active' do
- before do
- subject.active = true
- end
+ subject(:service) { described_class.new(project: project) }
- it { is_expected.to validate_presence_of(:token) }
- end
+ it { is_expected.to be_valid }
- context 'when service is inactive' do
- before do
- subject.active = false
- end
+ describe '#to_param' do
+ subject { service.to_param }
- it { is_expected.not_to validate_presence_of(:token) }
- end
+ it { is_expected.to eq('hipchat') }
end
- describe "Execute" do
- let(:hipchat) { described_class.new }
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:api_url) { 'https://hipchat.example.com/v2/room/123456/notification?auth_token=verySecret' }
- let(:project_name) { project.full_name.gsub(/\s/, '') }
- let(:token) { 'verySecret' }
- let(:server_url) { 'https://hipchat.example.com'}
- let(:push_sample_data) do
- Gitlab::DataBuilder::Push.build_sample(project, user)
- end
-
- before do
- allow(hipchat).to receive_messages(
- project_id: project.id,
- project: project,
- room: 123456,
- server: server_url,
- token: token
- )
- WebMock.stub_request(:post, api_url)
- end
-
- it 'does nothing' do
- expect { hipchat.execute(push_sample_data) }.not_to raise_error
- end
+ describe '#supported_events' do
+ subject { service.supported_events }
- describe "#message_options" do
- it "is set to the defaults" do
- expect(hipchat.__send__(:message_options)).to eq({ notify: false, color: 'yellow' })
- end
-
- it "sets notify to true" do
- allow(hipchat).to receive(:notify).and_return('1')
-
- expect(hipchat.__send__(:message_options)).to eq({ notify: true, color: 'yellow' })
- end
-
- it "sets the color" do
- allow(hipchat).to receive(:color).and_return('red')
-
- expect(hipchat.__send__(:message_options)).to eq({ notify: false, color: 'red' })
- end
-
- context 'with a successful build' do
- it 'uses the green color' do
- data = { object_kind: 'pipeline',
- object_attributes: { status: 'success' } }
-
- expect(hipchat.__send__(:message_options, data)).to eq({ notify: false, color: 'green' })
- end
- end
+ it { is_expected.to be_empty }
+ end
- context 'with a failed build' do
- it 'uses the red color' do
- data = { object_kind: 'pipeline',
- object_attributes: { status: 'failed' } }
+ describe '#save' do
+ it 'prevents records from being created or updated' do
+ expect(service.save).to be_falsey
- expect(hipchat.__send__(:message_options, data)).to eq({ notify: false, color: 'red' })
- end
- end
+ expect(service.errors.full_messages).to include(
+ 'HipChat endpoint is deprecated and should not be created or modified.'
+ )
end
end
end
diff --git a/spec/models/project_services/issue_tracker_data_spec.rb b/spec/models/project_services/issue_tracker_data_spec.rb
index 3ddb7d9250f..a229285f09b 100644
--- a/spec/models/project_services/issue_tracker_data_spec.rb
+++ b/spec/models/project_services/issue_tracker_data_spec.rb
@@ -3,9 +3,7 @@
require 'spec_helper'
RSpec.describe IssueTrackerData do
- let(:service) { create(:custom_issue_tracker_service, active: false, properties: {}) }
-
- describe 'Associations' do
- it { is_expected.to belong_to :service }
+ describe 'associations' do
+ it { is_expected.to belong_to :integration }
end
end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index b50fa1edbc3..73e91bf9ea8 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -433,8 +433,8 @@ RSpec.describe JiraService do
context 'when data are stored in both properties and separated fields' do
let(:properties) { data_params }
let(:service) do
- create(:jira_service, :without_properties_callback, active: false, properties: properties).tap do |service|
- create(:jira_tracker_data, data_params.merge(service: service))
+ create(:jira_service, :without_properties_callback, active: false, properties: properties).tap do |integration|
+ create(:jira_tracker_data, data_params.merge(integration: integration))
end
end
diff --git a/spec/models/project_services/jira_tracker_data_spec.rb b/spec/models/project_services/jira_tracker_data_spec.rb
index a698d3fce5f..72bdbe40a74 100644
--- a/spec/models/project_services/jira_tracker_data_spec.rb
+++ b/spec/models/project_services/jira_tracker_data_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe JiraTrackerData do
describe 'associations' do
- it { is_expected.to belong_to(:service) }
+ it { is_expected.to belong_to(:integration) }
end
describe 'deployment_type' do
diff --git a/spec/models/project_services/mattermost_slash_commands_service_spec.rb b/spec/models/project_services/mattermost_slash_commands_service_spec.rb
index 4fff3bc56cc..87befdd4303 100644
--- a/spec/models/project_services/mattermost_slash_commands_service_spec.rb
+++ b/spec/models/project_services/mattermost_slash_commands_service_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe MattermostSlashCommandsService do
end
it 'saves the service' do
- expect { subject }.to change { project.services.count }.by(1)
+ expect { subject }.to change { project.integrations.count }.by(1)
end
it 'saves the token' do
diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb
index 53ab63ef030..5f3a94a5b99 100644
--- a/spec/models/project_services/microsoft_teams_service_spec.rb
+++ b/spec/models/project_services/microsoft_teams_service_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe MicrosoftTeamsService do
context 'with issue events' do
let(:opts) { { title: 'Awesome issue', description: 'please fix' } }
let(:issues_sample_data) do
- service = Issues::CreateService.new(project, user, opts)
+ service = Issues::CreateService.new(project: project, current_user: user, params: opts)
issue = service.execute
service.hook_data(issue, 'open')
end
@@ -96,7 +96,7 @@ RSpec.describe MicrosoftTeamsService do
end
let(:merge_sample_data) do
- service = MergeRequests::CreateService.new(project, user, opts)
+ service = MergeRequests::CreateService.new(project: project, current_user: user, params: opts)
merge_request = service.execute
service.hook_data(merge_request, 'open')
end
@@ -240,7 +240,7 @@ RSpec.describe MicrosoftTeamsService do
chat_service.execute(data)
- message = ChatMessage::PipelineMessage.new(data)
+ message = Integrations::ChatMessage::PipelineMessage.new(data)
expect(WebMock).to have_requested(:post, webhook_url)
.with(body: hash_including({ summary: message.summary }))
diff --git a/spec/models/project_services/open_project_tracker_data_spec.rb b/spec/models/project_services/open_project_tracker_data_spec.rb
index e6a3963ba87..1f7f01cfea4 100644
--- a/spec/models/project_services/open_project_tracker_data_spec.rb
+++ b/spec/models/project_services/open_project_tracker_data_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe OpenProjectTrackerData do
- describe 'Associations' do
- it { is_expected.to belong_to(:service) }
+ describe 'associations' do
+ it { is_expected.to belong_to(:integration) }
end
describe 'closed_status_id' do
diff --git a/spec/models/project_services/slack_service_spec.rb b/spec/models/project_services/slack_service_spec.rb
index 688a59fcf09..2e2c1c666d9 100644
--- a/spec/models/project_services/slack_service_spec.rb
+++ b/spec/models/project_services/slack_service_spec.rb
@@ -59,16 +59,22 @@ RSpec.describe SlackService do
context 'deployment notification' do
let_it_be(:deployment) { create(:deployment, user: user) }
- let(:data) { Gitlab::DataBuilder::Deployment.build(deployment) }
+ let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, Time.current) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_deployment_notification'
end
context 'wiki_page notification' do
- let_it_be(:wiki_page) { create(:wiki_page, wiki: project.wiki, message: 'user created page: Awesome wiki_page') }
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, message: 'user created page: Awesome wiki_page') }
let(:data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create') }
+ before do
+ # Skip this method that is not relevant to this test to prevent having
+ # to update project which is frozen
+ allow(project.wiki).to receive(:after_wiki_activity)
+ end
+
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_wiki_page_notification'
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 12c17e699e3..c57c2792f87 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to belong_to(:creator).class_name('User') }
it { is_expected.to belong_to(:pool_repository) }
it { is_expected.to have_many(:users) }
- it { is_expected.to have_many(:services) }
+ it { is_expected.to have_many(:integrations) }
it { is_expected.to have_many(:events) }
it { is_expected.to have_many(:merge_requests) }
it { is_expected.to have_many(:merge_request_metrics).class_name('MergeRequest::Metrics') }
@@ -46,13 +46,13 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:asana_service) }
it { is_expected.to have_many(:boards) }
it { is_expected.to have_one(:campfire_service) }
+ it { is_expected.to have_one(:datadog_service) }
it { is_expected.to have_one(:discord_service) }
it { is_expected.to have_one(:drone_ci_service) }
it { is_expected.to have_one(:emails_on_push_service) }
it { is_expected.to have_one(:pipelines_email_service) }
it { is_expected.to have_one(:irker_service) }
it { is_expected.to have_one(:pivotaltracker_service) }
- it { is_expected.to have_one(:hipchat_service) }
it { is_expected.to have_one(:flowdock_service) }
it { is_expected.to have_one(:assembla_service) }
it { is_expected.to have_one(:slack_slash_commands_service) }
@@ -114,7 +114,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:lfs_file_locks) }
it { is_expected.to have_many(:project_deploy_tokens) }
it { is_expected.to have_many(:deploy_tokens).through(:project_deploy_tokens) }
- it { is_expected.to have_many(:cycle_analytics_stages) }
+ it { is_expected.to have_many(:cycle_analytics_stages).inverse_of(:project) }
+ it { is_expected.to have_many(:value_streams).inverse_of(:project) }
it { is_expected.to have_many(:external_pull_requests) }
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:source_pipelines) }
@@ -131,6 +132,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::ProjectDistribution').dependent(:destroy) }
it { is_expected.to have_many(:pipeline_artifacts) }
it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
+ it { is_expected.to have_many(:timelogs) }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -215,7 +217,7 @@ RSpec.describe Project, factory_default: :keep do
it 'does not raise an error' do
project = create(:project)
- expect { project.update(ci_cd_settings: nil) }.not_to raise_exception
+ expect { project.update!(ci_cd_settings: nil) }.not_to raise_exception
end
end
@@ -873,13 +875,13 @@ RSpec.describe Project, factory_default: :keep do
end
it 'returns the most recent timestamp' do
- project.update(updated_at: nil,
+ project.update!(updated_at: nil,
last_activity_at: timestamp,
last_repository_updated_at: timestamp - 1.hour)
expect(project.last_activity_date).to be_like_time(timestamp)
- project.update(updated_at: timestamp,
+ project.update!(updated_at: timestamp,
last_activity_at: timestamp - 1.hour,
last_repository_updated_at: nil)
@@ -1076,14 +1078,14 @@ RSpec.describe Project, factory_default: :keep do
it 'returns nil and does not query services when there is no external issue tracker' do
project = create(:project)
- expect(project).not_to receive(:services)
+ expect(project).not_to receive(:integrations)
expect(project.external_issue_tracker).to eq(nil)
end
it 'retrieves external_issue_tracker querying services and cache it when there is external issue tracker' do
project = create(:redmine_project)
- expect(project).to receive(:services).once.and_call_original
+ expect(project).to receive(:integrations).once.and_call_original
2.times { expect(project.external_issue_tracker).to be_a_kind_of(RedmineService) }
end
end
@@ -1116,7 +1118,7 @@ RSpec.describe Project, factory_default: :keep do
it 'becomes false when external issue tracker service is destroyed' do
expect do
- Service.find(service.id).delete
+ Integration.find(service.id).delete
end.to change { subject }.to(false)
end
@@ -1133,7 +1135,7 @@ RSpec.describe Project, factory_default: :keep do
it 'does not become false when external issue tracker service is destroyed' do
expect do
- Service.find(service.id).delete
+ Integration.find(service.id).delete
end.not_to change { subject }
end
@@ -1191,7 +1193,7 @@ RSpec.describe Project, factory_default: :keep do
it 'becomes false if the external wiki service is destroyed' do
expect do
- Service.find(service.id).delete
+ Integration.find(service.id).delete
end.to change { subject }.to(false)
end
@@ -1277,7 +1279,9 @@ RSpec.describe Project, factory_default: :keep do
it 'is false if avatar is html page' do
project.update_attribute(:avatar, 'uploads/avatar.html')
- expect(project.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp'])
+ project.avatar_type
+
+ expect(project.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
end
end
@@ -2670,7 +2674,7 @@ RSpec.describe Project, factory_default: :keep do
context 'with pending pipeline' do
it 'returns empty relation' do
- pipeline.update(status: 'pending')
+ pipeline.update!(status: 'pending')
pending_build = create_build(pipeline)
expect { project.latest_successful_build_for_ref!(pending_build.name) }
@@ -2813,11 +2817,11 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#remove_import_data' do
- let_it_be(:import_data) { ProjectImportData.new(data: { 'test' => 'some data' }) }
+ let(:import_data) { ProjectImportData.new(data: { 'test' => 'some data' }) }
context 'when jira import' do
- let_it_be(:project, reload: true) { create(:project, import_type: 'jira', import_data: import_data) }
- let_it_be(:jira_import) { create(:jira_import_state, project: project) }
+ let!(:project) { create(:project, import_type: 'jira', import_data: import_data) }
+ let!(:jira_import) { create(:jira_import_state, project: project) }
it 'does remove import data' do
expect(project.mirror?).to be false
@@ -2827,8 +2831,7 @@ RSpec.describe Project, factory_default: :keep do
end
context 'when neither a mirror nor a jira import' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, import_type: 'github', import_data: import_data) }
+ let!(:project) { create(:project, import_type: 'github', import_data: import_data) }
it 'removes import data' do
expect(project.mirror?).to be false
@@ -2864,7 +2867,7 @@ RSpec.describe Project, factory_default: :keep do
end
it 'returns false when remote mirror is disabled' do
- project.remote_mirrors.first.update(enabled: false)
+ project.remote_mirrors.first.update!(enabled: false)
is_expected.to be_falsy
end
@@ -2895,7 +2898,7 @@ RSpec.describe Project, factory_default: :keep do
end
it 'does not sync disabled remote mirrors' do
- project.remote_mirrors.first.update(enabled: false)
+ project.remote_mirrors.first.update!(enabled: false)
expect_any_instance_of(RemoteMirror).not_to receive(:sync)
@@ -2933,7 +2936,7 @@ RSpec.describe Project, factory_default: :keep do
it 'fails stuck remote mirrors' do
project = create(:project, :repository, :remote_mirror)
- project.remote_mirrors.first.update(
+ project.remote_mirrors.first.update!(
update_status: :started,
last_update_started_at: 2.days.ago
)
@@ -3191,7 +3194,7 @@ RSpec.describe Project, factory_default: :keep do
end
it 'returns the root of the fork network when the directs source was deleted' do
- forked_project.destroy
+ forked_project.destroy!
expect(second_fork.fork_source).to eq(project)
end
@@ -3435,7 +3438,7 @@ RSpec.describe Project, factory_default: :keep do
let(:environment) { 'foo%bar/test' }
it 'matches literally for _' do
- ci_variable.update(environment_scope: 'foo%bar/*')
+ ci_variable.environment_scope = 'foo%bar/*'
is_expected.to contain_exactly(ci_variable)
end
@@ -3676,7 +3679,7 @@ RSpec.describe Project, factory_default: :keep do
it "updates the namespace_id when changed" do
namespace = create(:namespace)
- project.update(namespace: namespace)
+ project.update!(namespace: namespace)
expect(project.statistics.namespace_id).to eq namespace.id
end
@@ -3969,14 +3972,14 @@ RSpec.describe Project, factory_default: :keep do
expect(project).to receive(:visibility_level_allowed_as_fork).and_call_original
expect(project).to receive(:visibility_level_allowed_by_group).and_call_original
- project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
end
it 'does not validate the visibility' do
expect(project).not_to receive(:visibility_level_allowed_as_fork).and_call_original
expect(project).not_to receive(:visibility_level_allowed_by_group).and_call_original
- project.update(updated_at: Time.current)
+ project.update!(updated_at: Time.current)
end
end
@@ -4060,7 +4063,7 @@ RSpec.describe Project, factory_default: :keep do
project_2 = create(:project, :public, :merge_requests_disabled)
project_3 = create(:project, :public, :issues_disabled)
project_4 = create(:project, :public)
- project_4.project_feature.update(issues_access_level: ProjectFeature::PRIVATE, merge_requests_access_level: ProjectFeature::PRIVATE )
+ project_4.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE, merge_requests_access_level: ProjectFeature::PRIVATE )
project_ids = described_class.ids_with_issuables_available_for(user).pluck(:id)
@@ -4103,7 +4106,7 @@ RSpec.describe Project, factory_default: :keep do
let(:project) { create(:project, :public) }
it 'returns projects with the project feature access level nil' do
- project.project_feature.update(merge_requests_access_level: nil)
+ project.project_feature.update!(merge_requests_access_level: nil)
is_expected.to include(project)
end
@@ -4391,7 +4394,7 @@ RSpec.describe Project, factory_default: :keep do
it 'is run when the project is destroyed' do
expect(project).to receive(:legacy_remove_pages).and_call_original
- expect { project.destroy }.not_to raise_error
+ expect { project.destroy! }.not_to raise_error
end
end
@@ -4921,7 +4924,7 @@ RSpec.describe Project, factory_default: :keep do
context 'when enabled on group' do
it 'has auto devops implicitly enabled' do
- project.update(namespace: create(:group, :auto_devops_enabled))
+ project.update!(namespace: create(:group, :auto_devops_enabled))
expect(project).to have_auto_devops_implicitly_enabled
end
@@ -4930,7 +4933,7 @@ RSpec.describe Project, factory_default: :keep do
context 'when enabled on parent group' do
it 'has auto devops implicitly enabled' do
subgroup = create(:group, parent: create(:group, :auto_devops_enabled))
- project.update(namespace: subgroup)
+ project.update!(namespace: subgroup)
expect(project).to have_auto_devops_implicitly_enabled
end
@@ -5404,7 +5407,7 @@ RSpec.describe Project, factory_default: :keep do
before do
create_list(:group_badge, 2, group: project_group)
- project_group.update(parent: parent_group)
+ project_group.update!(parent: parent_group)
end
it 'returns the project and the project nested groups badges' do
@@ -5799,16 +5802,16 @@ RSpec.describe Project, factory_default: :keep do
end
it 'avoids N+1 database queries with more available services' do
- allow(Service).to receive(:available_services_names).and_return(%w[pushover])
+ allow(Integration).to receive(:available_services_names).and_return(%w[pushover])
control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_services }
- allow(Service).to receive(:available_services_names).and_call_original
+ allow(Integration).to receive(:available_services_names).and_call_original
expect { subject.find_or_initialize_services }.not_to exceed_query_limit(control_count)
end
context 'with disabled services' do
before do
- allow(Service).to receive(:available_services_names).and_return(%w[prometheus pushover teamcity])
+ allow(Integration).to receive(:available_services_names).and_return(%w[prometheus pushover teamcity])
allow(subject).to receive(:disabled_services).and_return(%w[prometheus])
end
@@ -5843,11 +5846,11 @@ RSpec.describe Project, factory_default: :keep do
describe '#find_or_initialize_service' do
it 'avoids N+1 database queries' do
- allow(Service).to receive(:available_services_names).and_return(%w[prometheus pushover])
+ allow(Integration).to receive(:available_services_names).and_return(%w[prometheus pushover])
control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_service('prometheus') }.count
- allow(Service).to receive(:available_services_names).and_call_original
+ allow(Integration).to receive(:available_services_names).and_call_original
expect { subject.find_or_initialize_service('prometheus') }.not_to exceed_query_limit(control_count)
end
@@ -6301,23 +6304,31 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#access_request_approvers_to_be_notified' do
- it 'returns a maximum of ten, active, non_requested maintainers of the project in recent_sign_in descending order' do
- group = create(:group, :public)
- project = create(:project, group: group)
+ let_it_be(:project) { create(:project, group: create(:group, :public)) }
+ it 'returns a maximum of ten maintainers of the project in recent_sign_in descending order' do
users = create_list(:user, 12, :with_sign_ins)
active_maintainers = users.map do |user|
- create(:project_member, :maintainer, user: user)
+ create(:project_member, :maintainer, user: user, project: project)
end
- create(:project_member, :maintainer, :blocked, project: project)
- create(:project_member, :developer, project: project)
- create(:project_member, :access_request, :maintainer, project: project)
-
- active_maintainers_in_recent_sign_in_desc_order = project.members_and_requesters.where(id: active_maintainers).order_recent_sign_in.limit(10)
+ active_maintainers_in_recent_sign_in_desc_order = project.members_and_requesters
+ .id_in(active_maintainers)
+ .order_recent_sign_in.limit(10)
expect(project.access_request_approvers_to_be_notified).to eq(active_maintainers_in_recent_sign_in_desc_order)
end
+
+ it 'returns active, non_invited, non_requested maintainers of the project' do
+ maintainer = create(:project_member, :maintainer, source: project)
+
+ create(:project_member, :developer, project: project)
+ create(:project_member, :maintainer, :invited, project: project)
+ create(:project_member, :maintainer, :access_request, project: project)
+ create(:project_member, :maintainer, :blocked, project: project)
+
+ expect(project.access_request_approvers_to_be_notified.to_a).to eq([maintainer])
+ end
end
describe '#pages_lookup_path' do
@@ -6478,17 +6489,17 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe 'with services and chat names' do
+ describe 'with integrations and chat names' do
subject { create(:project) }
- let(:service) { create(:service, project: subject) }
+ let(:integration) { create(:service, project: subject) }
before do
- create_list(:chat_name, 5, service: service)
+ create_list(:chat_name, 5, integration: integration)
end
it 'removes chat names on removal' do
- expect { subject.destroy }.to change { ChatName.count }.by(-5)
+ expect { subject.destroy! }.to change { ChatName.count }.by(-5)
end
end
@@ -6823,6 +6834,26 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#parent_loaded?' do
+ let_it_be(:project) { create(:project) }
+
+ before do
+ project.namespace = create(:namespace)
+
+ project.reload
+ end
+
+ it 'is false when the parent is not loaded' do
+ expect(project.parent_loaded?).to be_falsey
+ end
+
+ it 'is true when the parent is loaded' do
+ project.parent
+
+ expect(project.parent_loaded?).to be_truthy
+ end
+ end
+
describe '#bots' do
subject { project.bots }
@@ -6889,6 +6920,105 @@ RSpec.describe Project, factory_default: :keep do
end
end
end
+
+ describe '#activity_path' do
+ it 'returns the project activity_path' do
+ expected_path = "/#{project.namespace.path}/#{project.name}/activity"
+
+ expect(project.activity_path).to eq(expected_path)
+ end
+ end
+ end
+
+ describe '#default_branch_or_main' do
+ let(:project) { create(:project, :repository) }
+
+ it 'returns default branch' do
+ expect(project.default_branch_or_main).to eq(project.default_branch)
+ end
+
+ context 'when default branch is nil' do
+ let(:project) { create(:project, :empty_repo) }
+
+ it 'returns Gitlab::DefaultBranch.value' do
+ expect(project.default_branch_or_main).to eq(Gitlab::DefaultBranch.value)
+ end
+ end
+ end
+
+ describe '#increment_statistic_value' do
+ let(:project) { build_stubbed(:project) }
+
+ subject(:increment) do
+ project.increment_statistic_value(:build_artifacts_size, -10)
+ end
+
+ it 'increments the value' do
+ expect(ProjectStatistics)
+ .to receive(:increment_statistic)
+ .with(project, :build_artifacts_size, -10)
+
+ increment
+ end
+
+ context 'when the project is scheduled for removal' do
+ let(:project) { build_stubbed(:project, pending_delete: true) }
+
+ it 'does not increment the value' do
+ expect(ProjectStatistics).not_to receive(:increment_statistic)
+
+ increment
+ end
+ end
+ end
+
+ describe 'topics' do
+ let_it_be(:project) { create(:project, tag_list: 'topic1, topic2, topic3') }
+
+ it 'topic_list returns correct string array' do
+ expect(project.topic_list).to match_array(%w[topic1 topic2 topic3])
+ end
+
+ it 'topics returns correct tag records' do
+ expect(project.topics.first.class.name).to eq('ActsAsTaggableOn::Tag')
+ expect(project.topics.map(&:name)).to match_array(%w[topic1 topic2 topic3])
+ end
+
+ context 'aliases' do
+ it 'tag_list returns correct string array' do
+ expect(project.tag_list).to match_array(%w[topic1 topic2 topic3])
+ end
+
+ it 'tags returns correct tag records' do
+ expect(project.tags.first.class.name).to eq('ActsAsTaggableOn::Tag')
+ expect(project.tags.map(&:name)).to match_array(%w[topic1 topic2 topic3])
+ end
+ end
+
+ context 'intermediate state during background migration' do
+ before do
+ project.taggings.first.update!(context: 'tags')
+ project.instance_variable_set("@tag_list", nil)
+ project.reload
+ end
+
+ it 'tag_list returns string array including old and new topics' do
+ expect(project.tag_list).to match_array(%w[topic1 topic2 topic3])
+ end
+
+ it 'tags returns old and new tag records' do
+ expect(project.tags.first.class.name).to eq('ActsAsTaggableOn::Tag')
+ expect(project.tags.map(&:name)).to match_array(%w[topic1 topic2 topic3])
+ expect(project.taggings.map(&:context)).to match_array(%w[tags topics topics])
+ end
+
+ it 'update tag_list adds new topics and removes old topics' do
+ project.update!(tag_list: 'topic1, topic2, topic3, topic4')
+
+ expect(project.tags.map(&:name)).to match_array(%w[topic1 topic2 topic3 topic4])
+ expect(project.taggings.map(&:context)).to match_array(%w[topics topics topics topics])
+ end
+ end
end
def finish_job(export_job)
diff --git a/spec/models/project_team_spec.rb b/spec/models/project_team_spec.rb
index bbc056889d6..ce75e68de32 100644
--- a/spec/models/project_team_spec.rb
+++ b/spec/models/project_team_spec.rb
@@ -294,7 +294,7 @@ RSpec.describe ProjectTeam do
context 'when project is shared with group' do
before do
group = create(:group)
- project.project_group_links.create(
+ project.project_group_links.create!(
group: group,
group_access: Gitlab::Access::DEVELOPER)
@@ -309,7 +309,7 @@ RSpec.describe ProjectTeam do
context 'but share_with_group_lock is true' do
before do
- project.namespace.update(share_with_group_lock: true)
+ project.namespace.update!(share_with_group_lock: true)
end
it { expect(project.team.max_member_access(maintainer.id)).to eq(Gitlab::Access::NO_ACCESS) }
@@ -496,7 +496,7 @@ RSpec.describe ProjectTeam do
project.add_guest(promoted_guest)
project.add_guest(guest)
- project.project_group_links.create(
+ project.project_group_links.create!(
group: group,
group_access: Gitlab::Access::DEVELOPER
)
@@ -505,7 +505,7 @@ RSpec.describe ProjectTeam do
group.add_developer(group_developer)
group.add_developer(second_developer)
- project.project_group_links.create(
+ project.project_group_links.create!(
group: second_group,
group_access: Gitlab::Access::MAINTAINER
)
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index 673451b5e76..b4dff4c33ff 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
before do
allow(Dir).to receive(:glob).with(Rails.root.join('data', 'whats_new', '*.yml')).and_return(fixture_dir_glob)
+ Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[:all_tiers])
end
after do
@@ -24,16 +25,16 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
subject { ReleaseHighlight.paginated(page: page) }
context 'when there is another page of results' do
- let(:page) { 2 }
+ let(:page) { 3 }
it 'responds with paginated results' do
expect(subject[:items].first['title']).to eq('bright')
- expect(subject[:next_page]).to eq(3)
+ expect(subject[:next_page]).to eq(4)
end
end
context 'when there is NOT another page of results' do
- let(:page) { 3 }
+ let(:page) { 4 }
it 'responds with paginated results and no next_page' do
expect(subject[:items].first['title']).to eq("It's gonna be a bright")
@@ -54,8 +55,8 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
subject { ReleaseHighlight.paginated }
it 'uses multiple levels of cache' do
- expect(Rails.cache).to receive(:fetch).with("release_highlight:items:page-1:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION }).and_call_original
- expect(Rails.cache).to receive(:fetch).with("release_highlight:file_paths:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION }).and_call_original
+ expect(Rails.cache).to receive(:fetch).with("release_highlight:all_tiers:items:page-1:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION }).and_call_original
+ expect(Rails.cache).to receive(:fetch).with("release_highlight:all_tiers:file_paths:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION }).and_call_original
subject
end
@@ -101,7 +102,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
subject { ReleaseHighlight.most_recent_item_count }
it 'uses process memory cache' do
- expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:recent_item_count:#{Gitlab.revision}", expires_in: described_class::CACHE_DURATION)
+ expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:all_tiers:recent_item_count:#{Gitlab.revision}", expires_in: described_class::CACHE_DURATION)
subject
end
@@ -127,7 +128,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
subject { ReleaseHighlight.most_recent_version_digest }
it 'uses process memory cache' do
- expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:most_recent_version_digest:#{Gitlab.revision}", expires_in: described_class::CACHE_DURATION)
+ expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:all_tiers:most_recent_version_digest:#{Gitlab.revision}", expires_in: described_class::CACHE_DURATION)
subject
end
@@ -148,6 +149,33 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
end
end
+ describe '.load_items' do
+ context 'whats new for all tiers' do
+ before do
+ Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[:all_tiers])
+ end
+
+ it 'returns all items' do
+ items = described_class.load_items(page: 2)
+
+ expect(items.count).to eq(3)
+ end
+ end
+
+ context 'whats new for current tier only' do
+ before do
+ Gitlab::CurrentSettings.update!(whats_new_variant: ApplicationSetting.whats_new_variants[:current_tier])
+ end
+
+ it 'returns items with package=Free' do
+ items = described_class.load_items(page: 2)
+
+ expect(items.count).to eq(1)
+ expect(items.first['title']).to eq("View epics on a board")
+ end
+ end
+ end
+
describe 'QueryResult' do
subject { ReleaseHighlight::QueryResult.new(items: items, next_page: 2) }
@@ -157,4 +185,12 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
expect(subject.map(&:to_s)).to eq(items.map(&:to_s))
end
end
+
+ describe '.current_package' do
+ subject { described_class.current_package }
+
+ it 'returns Free' do
+ expect(subject).to eq('Free')
+ end
+ end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 540a8068b20..b88813b3328 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Release do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:release) { create(:release, project: project, author: user) }
it { expect(release).to be_valid }
@@ -37,6 +38,18 @@ RSpec.describe Release do
end
end
+ context 'when description of a release is longer than the limit' do
+ let(:description) { 'a' * (Gitlab::Database::MAX_TEXT_SIZE_LIMIT + 1) }
+ let(:release) { build(:release, project: project, description: description) }
+
+ it 'creates a validation error' do
+ release.validate
+
+ expect(release.errors.full_messages)
+ .to include("Description is too long (maximum is #{Gitlab::Database::MAX_TEXT_SIZE_LIMIT} characters)")
+ end
+ end
+
context 'when a release is tied to a milestone for another project' do
it 'creates a validation error' do
milestone = build(:milestone, project: create(:project))
@@ -53,7 +66,7 @@ RSpec.describe Release do
end
describe '#assets_count' do
- subject { release.assets_count }
+ subject { Release.find(release.id).assets_count }
it 'returns the number of sources' do
is_expected.to eq(Gitlab::Workhorse::ARCHIVE_FORMATS.count)
@@ -67,7 +80,7 @@ RSpec.describe Release do
end
it "excludes sources count when asked" do
- assets_count = release.assets_count(except: [:sources])
+ assets_count = Release.find(release.id).assets_count(except: [:sources])
expect(assets_count).to eq(1)
end
end
diff --git a/spec/models/releases/evidence_spec.rb b/spec/models/releases/evidence_spec.rb
index ca5d4b67b59..59133b2fa51 100644
--- a/spec/models/releases/evidence_spec.rb
+++ b/spec/models/releases/evidence_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Releases::Evidence do
let_it_be(:project) { create(:project) }
+
let(:release) { create(:release, project: project) }
describe 'associations' do
diff --git a/spec/models/releases/source_spec.rb b/spec/models/releases/source_spec.rb
index d10b2140550..227085951c0 100644
--- a/spec/models/releases/source_spec.rb
+++ b/spec/models/releases/source_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Releases::Source do
let_it_be(:project) { create(:project, :repository, name: 'finance-cal') }
+
let(:tag_name) { 'v1.0' }
describe '.all' do
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index a739f523008..7748846f6a5 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1006,19 +1006,58 @@ RSpec.describe Repository do
end
end
- context 'when specifying a path with wildcard' do
- let(:path) { 'files/*/*.png' }
+ context 'when specifying a wildcard path' do
+ let(:path) { '*.md' }
+
+ it 'returns files matching the path in the root folder' do
+ expect(result).to contain_exactly('CONTRIBUTING.md',
+ 'MAINTENANCE.md',
+ 'PROCESS.md',
+ 'README.md')
+ end
+ end
+
+ context 'when specifying a wildcard path for all' do
+ let(:path) { '**.md' }
+
+ it 'returns all matching files in all folders' do
+ expect(result).to contain_exactly('CONTRIBUTING.md',
+ 'MAINTENANCE.md',
+ 'PROCESS.md',
+ 'README.md',
+ 'files/markdown/ruby-style-guide.md',
+ 'with space/README.md')
+ end
+ end
+
+ context 'when specifying a path to subfolders using two asterisks and a slash' do
+ let(:path) { 'files/**/*.md' }
it 'returns all files matching the path' do
- expect(result).to contain_exactly('files/images/logo-black.png',
- 'files/images/logo-white.png')
+ expect(result).to contain_exactly('files/markdown/ruby-style-guide.md')
+ end
+ end
+
+ context 'when specifying a wildcard path to subfolder with just two asterisks' do
+ let(:path) { 'files/**.md' }
+
+ it 'returns all files in the matching path' do
+ expect(result).to contain_exactly('files/markdown/ruby-style-guide.md')
+ end
+ end
+
+ context 'when specifying a wildcard path to subfolder with one asterisk' do
+ let(:path) { 'files/*/*.md' }
+
+ it 'returns all files in the matching path' do
+ expect(result).to contain_exactly('files/markdown/ruby-style-guide.md')
end
end
- context 'when specifying an extension with wildcard' do
- let(:path) { '*.rb' }
+ context 'when specifying a wildcard path for an unknown number of subfolder levels' do
+ let(:path) { '**/*.rb' }
- it 'returns all files matching the extension' do
+ it 'returns all matched files in all subfolders' do
expect(result).to contain_exactly('encoding/russian.rb',
'files/ruby/popen.rb',
'files/ruby/regex.rb',
@@ -1026,6 +1065,14 @@ RSpec.describe Repository do
end
end
+ context 'when specifying a wildcard path to one level of subfolders' do
+ let(:path) { '*/*.rb' }
+
+ it 'returns all matched files in one subfolder' do
+ expect(result).to contain_exactly('encoding/russian.rb')
+ end
+ end
+
context 'when sending regexp' do
let(:path) { '.*\.rb' }
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
deleted file mode 100644
index d8eb4ebc432..00000000000
--- a/spec/models/service_spec.rb
+++ /dev/null
@@ -1,887 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Service do
- using RSpec::Parameterized::TableSyntax
-
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
-
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to belong_to :group }
- it { is_expected.to have_one :service_hook }
- it { is_expected.to have_one :jira_tracker_data }
- it { is_expected.to have_one :issue_tracker_data }
- end
-
- describe 'validations' do
- it { is_expected.to validate_presence_of(:type) }
-
- where(:project_id, :group_id, :template, :instance, :valid) do
- 1 | nil | false | false | true
- nil | 1 | false | false | true
- nil | nil | true | false | true
- nil | nil | false | true | true
- nil | nil | false | false | false
- nil | nil | true | true | false
- 1 | 1 | false | false | false
- 1 | nil | true | false | false
- 1 | nil | false | true | false
- nil | 1 | true | false | false
- nil | 1 | false | true | false
- end
-
- with_them do
- it 'validates the service' do
- expect(build(:service, project_id: project_id, group_id: group_id, template: template, instance: instance).valid?).to eq(valid)
- end
- end
-
- context 'with existing services' do
- before_all do
- create(:service, :template)
- create(:service, :instance)
- create(:service, project: project)
- create(:service, group: group, project: nil)
- end
-
- it 'allows only one service template per type' do
- expect(build(:service, :template)).to be_invalid
- end
-
- it 'allows only one instance service per type' do
- expect(build(:service, :instance)).to be_invalid
- end
-
- it 'allows only one project service per type' do
- expect(build(:service, project: project)).to be_invalid
- end
-
- it 'allows only one group service per type' do
- expect(build(:service, group: group, project: nil)).to be_invalid
- end
- end
- end
-
- describe 'Scopes' do
- describe '.by_type' do
- let!(:service1) { create(:jira_service) }
- let!(:service2) { create(:jira_service) }
- let!(:service3) { create(:redmine_service) }
-
- subject { described_class.by_type(type) }
-
- context 'when type is "JiraService"' do
- let(:type) { 'JiraService' }
-
- it { is_expected.to match_array([service1, service2]) }
- end
-
- context 'when type is "RedmineService"' do
- let(:type) { 'RedmineService' }
-
- it { is_expected.to match_array([service3]) }
- end
- end
-
- describe '.for_group' do
- let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
- let!(:service2) { create(:jira_service) }
-
- it 'returns the right group service' do
- expect(described_class.for_group(group)).to match_array([service1])
- end
- end
-
- describe '.confidential_note_hooks' do
- it 'includes services where confidential_note_events is true' do
- create(:service, active: true, confidential_note_events: true)
-
- expect(described_class.confidential_note_hooks.count).to eq 1
- end
-
- it 'excludes services where confidential_note_events is false' do
- create(:service, active: true, confidential_note_events: false)
-
- expect(described_class.confidential_note_hooks.count).to eq 0
- end
- end
-
- describe '.alert_hooks' do
- it 'includes services where alert_events is true' do
- create(:service, active: true, alert_events: true)
-
- expect(described_class.alert_hooks.count).to eq 1
- end
-
- it 'excludes services where alert_events is false' do
- create(:service, active: true, alert_events: false)
-
- expect(described_class.alert_hooks.count).to eq 0
- end
- end
- end
-
- describe '#operating?' do
- it 'is false when the service is not active' do
- expect(build(:service).operating?).to eq(false)
- end
-
- it 'is false when the service is not persisted' do
- expect(build(:service, active: true).operating?).to eq(false)
- end
-
- it 'is true when the service is active and persisted' do
- expect(create(:service, active: true).operating?).to eq(true)
- end
- end
-
- describe "Test Button" do
- let(:service) { build(:service, project: project) }
-
- describe '#can_test?' do
- subject { service.can_test? }
-
- context 'when repository is not empty' do
- let(:project) { build(:project, :repository) }
-
- it { is_expected.to be true }
- end
-
- context 'when repository is empty' do
- let(:project) { build(:project) }
-
- it { is_expected.to be true }
- end
-
- context 'when instance-level service' do
- Service.available_services_types.each do |service_type|
- let(:service) do
- service_type.constantize.new(instance: true)
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- context 'when group-level service' do
- Service.available_services_types.each do |service_type|
- let(:service) do
- service_type.constantize.new(group_id: group.id)
- end
-
- it { is_expected.to be_falsey }
- end
- end
- end
-
- describe '#test' do
- let(:data) { 'test' }
-
- context 'when repository is not empty' do
- let(:project) { build(:project, :repository) }
-
- it 'test runs execute' do
- expect(service).to receive(:execute).with(data)
-
- service.test(data)
- end
- end
-
- context 'when repository is empty' do
- let(:project) { build(:project) }
-
- it 'test runs execute' do
- expect(service).to receive(:execute).with(data)
-
- service.test(data)
- end
- end
- end
- end
-
- describe '#project_level?' do
- it 'is true when service has a project' do
- expect(build(:service, project: project)).to be_project_level
- end
-
- it 'is false when service has no project' do
- expect(build(:service, project: nil)).not_to be_project_level
- end
- end
-
- describe '.find_or_initialize_non_project_specific_integration' do
- let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
- let!(:service2) { create(:jira_service) }
-
- it 'returns the right service' do
- expect(Service.find_or_initialize_non_project_specific_integration('jira', group_id: group)).to eq(service1)
- end
-
- it 'does not create a new service' do
- expect { Service.find_or_initialize_non_project_specific_integration('redmine', group_id: group) }.not_to change { Service.count }
- end
- end
-
- describe '.find_or_initialize_all_non_project_specific' do
- shared_examples 'service instances' do
- it 'returns the available service instances' do
- expect(Service.find_or_initialize_all_non_project_specific(Service.for_instance).pluck(:type)).to match_array(Service.available_services_types(include_project_specific: false))
- end
-
- it 'does not create service instances' do
- expect { Service.find_or_initialize_all_non_project_specific(Service.for_instance) }.not_to change { Service.count }
- end
- end
-
- it_behaves_like 'service instances'
-
- context 'with all existing instances' do
- before do
- Service.insert_all(
- Service.available_services_types(include_project_specific: false).map { |type| { instance: true, type: type } }
- )
- end
-
- it_behaves_like 'service instances'
-
- context 'with a previous existing service (MockCiService) and a new service (Asana)' do
- before do
- Service.insert({ type: 'MockCiService', instance: true })
- Service.delete_by(type: 'AsanaService', instance: true)
- end
-
- it_behaves_like 'service instances'
- end
- end
-
- context 'with a few existing instances' do
- before do
- create(:jira_service, :instance)
- end
-
- it_behaves_like 'service instances'
- end
- end
-
- describe 'template' do
- shared_examples 'retrieves service templates' do
- it 'returns the available service templates' do
- expect(Service.find_or_create_templates.pluck(:type)).to match_array(Service.available_services_types(include_project_specific: false))
- end
- end
-
- describe '.find_or_create_templates' do
- it 'creates service templates' do
- expect { Service.find_or_create_templates }.to change { Service.count }.from(0).to(Service.available_services_names(include_project_specific: false).size)
- end
-
- it_behaves_like 'retrieves service templates'
-
- context 'with all existing templates' do
- before do
- Service.insert_all(
- Service.available_services_types(include_project_specific: false).map { |type| { template: true, type: type } }
- )
- end
-
- it 'does not create service templates' do
- expect { Service.find_or_create_templates }.not_to change { Service.count }
- end
-
- it_behaves_like 'retrieves service templates'
-
- context 'with a previous existing service (Previous) and a new service (Asana)' do
- before do
- Service.insert({ type: 'PreviousService', template: true })
- Service.delete_by(type: 'AsanaService', template: true)
- end
-
- it_behaves_like 'retrieves service templates'
- end
- end
-
- context 'with a few existing templates' do
- before do
- create(:jira_service, :template)
- end
-
- it 'creates the rest of the service templates' do
- expect { Service.find_or_create_templates }.to change { Service.count }.from(1).to(Service.available_services_names(include_project_specific: false).size)
- end
-
- it_behaves_like 'retrieves service templates'
- end
- end
-
- describe '.build_from_integration' do
- context 'when integration is invalid' do
- let(:integration) do
- build(:prometheus_service, :template, active: true, properties: {})
- .tap { |integration| integration.save(validate: false) }
- end
-
- it 'sets service to inactive' do
- service = described_class.build_from_integration(integration, project_id: project.id)
-
- expect(service).to be_valid
- expect(service.active).to be false
- end
- end
-
- context 'when integration is an instance-level integration' do
- let(:integration) { create(:jira_service, :instance) }
-
- it 'sets inherit_from_id from integration' do
- service = described_class.build_from_integration(integration, project_id: project.id)
-
- expect(service.inherit_from_id).to eq(integration.id)
- end
- end
-
- context 'when integration is a group-level integration' do
- let(:integration) { create(:jira_service, group: group, project: nil) }
-
- it 'sets inherit_from_id from integration' do
- service = described_class.build_from_integration(integration, project_id: project.id)
-
- expect(service.inherit_from_id).to eq(integration.id)
- end
- end
-
- describe 'build issue tracker from an integration' do
- let(:url) { 'http://jira.example.com' }
- let(:api_url) { 'http://api-jira.example.com' }
- let(:username) { 'jira-username' }
- let(:password) { 'jira-password' }
- let(:data_params) do
- {
- url: url, api_url: api_url,
- username: username, password: password
- }
- end
-
- shared_examples 'service creation from an integration' do
- it 'creates a correct service for a project integration' do
- service = described_class.build_from_integration(integration, project_id: project.id)
-
- expect(service).to be_active
- expect(service.url).to eq(url)
- expect(service.api_url).to eq(api_url)
- expect(service.username).to eq(username)
- expect(service.password).to eq(password)
- expect(service.template).to eq(false)
- expect(service.instance).to eq(false)
- expect(service.project).to eq(project)
- expect(service.group).to eq(nil)
- end
-
- it 'creates a correct service for a group integration' do
- service = described_class.build_from_integration(integration, group_id: group.id)
-
- expect(service).to be_active
- expect(service.url).to eq(url)
- expect(service.api_url).to eq(api_url)
- expect(service.username).to eq(username)
- expect(service.password).to eq(password)
- expect(service.template).to eq(false)
- expect(service.instance).to eq(false)
- expect(service.project).to eq(nil)
- expect(service.group).to eq(group)
- end
- end
-
- # this will be removed as part of https://gitlab.com/gitlab-org/gitlab/issues/29404
- context 'when data are stored in properties' do
- let(:properties) { data_params }
- let!(:integration) do
- create(:jira_service, :without_properties_callback, template: true, properties: properties.merge(additional: 'something'))
- end
-
- it_behaves_like 'service creation from an integration'
- end
-
- context 'when data are stored in separated fields' do
- let(:integration) do
- create(:jira_service, :template, data_params.merge(properties: {}))
- end
-
- it_behaves_like 'service creation from an integration'
- end
-
- context 'when data are stored in both properties and separated fields' do
- let(:properties) { data_params }
- let(:integration) do
- create(:jira_service, :without_properties_callback, active: true, template: true, properties: properties).tap do |service|
- create(:jira_tracker_data, data_params.merge(service: service))
- end
- end
-
- it_behaves_like 'service creation from an integration'
- end
- end
- end
-
- describe "for pushover service" do
- let!(:service_template) do
- PushoverService.create(
- template: true,
- properties: {
- device: 'MyDevice',
- sound: 'mic',
- priority: 4,
- api_key: '123456789'
- })
- end
-
- describe 'is prefilled for projects pushover service' do
- it "has all fields prefilled" do
- service = project.find_or_initialize_service('pushover')
-
- expect(service.template).to eq(false)
- expect(service.device).to eq('MyDevice')
- expect(service.sound).to eq('mic')
- expect(service.priority).to eq(4)
- expect(service.api_key).to eq('123456789')
- end
- end
- end
- end
-
- describe '.default_integration' do
- context 'with an instance-level service' do
- let_it_be(:instance_service) { create(:jira_service, :instance) }
-
- it 'returns the instance service' do
- expect(described_class.default_integration('JiraService', project)).to eq(instance_service)
- end
-
- it 'returns nil for nonexistent service type' do
- expect(described_class.default_integration('HipchatService', project)).to eq(nil)
- end
-
- context 'with a group service' do
- let_it_be(:group_service) { create(:jira_service, group_id: group.id, project_id: nil) }
-
- it 'returns the group service for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_service)
- end
-
- it 'returns the instance service for a group' do
- expect(described_class.default_integration('JiraService', group)).to eq(instance_service)
- end
-
- context 'with a subgroup' do
- let_it_be(:subgroup) { create(:group, parent: group) }
- let!(:project) { create(:project, group: subgroup) }
-
- it 'returns the closest group service for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_service)
- end
-
- it 'returns the closest group service for a subgroup' do
- expect(described_class.default_integration('JiraService', subgroup)).to eq(group_service)
- end
-
- context 'having a service with custom settings' do
- let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil) }
-
- it 'returns the closest group service for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(subgroup_service)
- end
- end
-
- context 'having a service inheriting settings' do
- let!(:subgroup_service) { create(:jira_service, group_id: subgroup.id, project_id: nil, inherit_from_id: group_service.id) }
-
- it 'returns the closest group service which does not inherit from its parent for a project' do
- expect(described_class.default_integration('JiraService', project)).to eq(group_service)
- end
- end
- end
- end
- end
- end
-
- describe '.create_from_active_default_integrations' do
- context 'with an active service template' do
- let_it_be(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
-
- it 'creates a service from the template' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
-
- expect(project.reload.services.size).to eq(1)
- expect(project.reload.services.first.api_url).to eq(template_integration.api_url)
- expect(project.reload.services.first.inherit_from_id).to be_nil
- end
-
- context 'with an active instance-level integration' do
- let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
-
- it 'creates a service from the instance-level integration' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
-
- expect(project.reload.services.size).to eq(1)
- expect(project.reload.services.first.api_url).to eq(instance_integration.api_url)
- expect(project.reload.services.first.inherit_from_id).to eq(instance_integration.id)
- end
-
- context 'passing a group' do
- it 'creates a service from the instance-level integration' do
- described_class.create_from_active_default_integrations(group, :group_id)
-
- expect(group.reload.services.size).to eq(1)
- expect(group.reload.services.first.api_url).to eq(instance_integration.api_url)
- expect(group.reload.services.first.inherit_from_id).to eq(instance_integration.id)
- end
- end
-
- context 'with an active group-level integration' do
- let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
-
- it 'creates a service from the group-level integration' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
-
- expect(project.reload.services.size).to eq(1)
- expect(project.reload.services.first.api_url).to eq(group_integration.api_url)
- expect(project.reload.services.first.inherit_from_id).to eq(group_integration.id)
- end
-
- context 'passing a group' do
- let!(:subgroup) { create(:group, parent: group) }
-
- it 'creates a service from the group-level integration' do
- described_class.create_from_active_default_integrations(subgroup, :group_id)
-
- expect(subgroup.reload.services.size).to eq(1)
- expect(subgroup.reload.services.first.api_url).to eq(group_integration.api_url)
- expect(subgroup.reload.services.first.inherit_from_id).to eq(group_integration.id)
- end
- end
-
- context 'with an active subgroup' do
- let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
- let!(:subgroup) { create(:group, parent: group) }
- let(:project) { create(:project, group: subgroup) }
-
- it 'creates a service from the subgroup-level integration' do
- described_class.create_from_active_default_integrations(project, :project_id, with_templates: true)
-
- expect(project.reload.services.size).to eq(1)
- expect(project.reload.services.first.api_url).to eq(subgroup_integration.api_url)
- expect(project.reload.services.first.inherit_from_id).to eq(subgroup_integration.id)
- end
-
- context 'passing a group' do
- let!(:sub_subgroup) { create(:group, parent: subgroup) }
-
- it 'creates a service from the subgroup-level integration' do
- described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
-
- expect(sub_subgroup.reload.services.size).to eq(1)
- expect(sub_subgroup.reload.services.first.api_url).to eq(subgroup_integration.api_url)
- expect(sub_subgroup.reload.services.first.inherit_from_id).to eq(subgroup_integration.id)
- end
-
- context 'having a service inheriting settings' do
- let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, inherit_from_id: group_integration.id, api_url: 'https://prometheus.subgroup.com/') }
-
- it 'creates a service from the group-level integration' do
- described_class.create_from_active_default_integrations(sub_subgroup, :group_id)
-
- expect(sub_subgroup.reload.services.size).to eq(1)
- expect(sub_subgroup.reload.services.first.api_url).to eq(group_integration.api_url)
- expect(sub_subgroup.reload.services.first.inherit_from_id).to eq(group_integration.id)
- end
- end
- end
- end
- end
- end
- end
- end
-
- describe '.inherited_descendants_from_self_or_ancestors_from' do
- let_it_be(:subgroup1) { create(:group, parent: group) }
- let_it_be(:subgroup2) { create(:group, parent: group) }
- let_it_be(:project1) { create(:project, group: subgroup1) }
- let_it_be(:project2) { create(:project, group: subgroup2) }
- let_it_be(:group_integration) { create(:prometheus_service, group: group, project: nil) }
- let_it_be(:subgroup_integration1) { create(:prometheus_service, group: subgroup1, project: nil, inherit_from_id: group_integration.id) }
- let_it_be(:subgroup_integration2) { create(:prometheus_service, group: subgroup2, project: nil) }
- let_it_be(:project_integration1) { create(:prometheus_service, group: nil, project: project1, inherit_from_id: group_integration.id) }
- let_it_be(:project_integration2) { create(:prometheus_service, group: nil, project: project2, inherit_from_id: subgroup_integration2.id) }
-
- it 'returns the groups and projects inheriting from integration ancestors', :aggregate_failures do
- expect(described_class.inherited_descendants_from_self_or_ancestors_from(group_integration)).to eq([subgroup_integration1, project_integration1])
- expect(described_class.inherited_descendants_from_self_or_ancestors_from(subgroup_integration2)).to eq([project_integration2])
- end
- end
-
- describe "{property}_changed?" do
- let(:service) do
- BambooService.create(
- project: project,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: "password"
- }
- )
- end
-
- it "returns false when the property has not been assigned a new value" do
- service.username = "key_changed"
- expect(service.bamboo_url_changed?).to be_falsy
- end
-
- it "returns true when the property has been assigned a different value" do
- service.bamboo_url = "http://example.com"
- expect(service.bamboo_url_changed?).to be_truthy
- end
-
- it "returns true when the property has been assigned a different value twice" do
- service.bamboo_url = "http://example.com"
- service.bamboo_url = "http://example.com"
- expect(service.bamboo_url_changed?).to be_truthy
- end
-
- it "returns false when the property has been re-assigned the same value" do
- service.bamboo_url = 'http://gitlab.com'
- expect(service.bamboo_url_changed?).to be_falsy
- end
-
- it "returns false when the property has been assigned a new value then saved" do
- service.bamboo_url = 'http://example.com'
- service.save
- expect(service.bamboo_url_changed?).to be_falsy
- end
- end
-
- describe "{property}_touched?" do
- let(:service) do
- BambooService.create(
- project: project,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: "password"
- }
- )
- end
-
- it "returns false when the property has not been assigned a new value" do
- service.username = "key_changed"
- expect(service.bamboo_url_touched?).to be_falsy
- end
-
- it "returns true when the property has been assigned a different value" do
- service.bamboo_url = "http://example.com"
- expect(service.bamboo_url_touched?).to be_truthy
- end
-
- it "returns true when the property has been assigned a different value twice" do
- service.bamboo_url = "http://example.com"
- service.bamboo_url = "http://example.com"
- expect(service.bamboo_url_touched?).to be_truthy
- end
-
- it "returns true when the property has been re-assigned the same value" do
- service.bamboo_url = 'http://gitlab.com'
- expect(service.bamboo_url_touched?).to be_truthy
- end
-
- it "returns false when the property has been assigned a new value then saved" do
- service.bamboo_url = 'http://example.com'
- service.save
- expect(service.bamboo_url_changed?).to be_falsy
- end
- end
-
- describe "{property}_was" do
- let(:service) do
- BambooService.create(
- project: project,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: "password"
- }
- )
- end
-
- it "returns nil when the property has not been assigned a new value" do
- service.username = "key_changed"
- expect(service.bamboo_url_was).to be_nil
- end
-
- it "returns the previous value when the property has been assigned a different value" do
- service.bamboo_url = "http://example.com"
- expect(service.bamboo_url_was).to eq('http://gitlab.com')
- end
-
- it "returns initial value when the property has been re-assigned the same value" do
- service.bamboo_url = 'http://gitlab.com'
- expect(service.bamboo_url_was).to eq('http://gitlab.com')
- end
-
- it "returns initial value when the property has been assigned multiple values" do
- service.bamboo_url = "http://example.com"
- service.bamboo_url = "http://example2.com"
- expect(service.bamboo_url_was).to eq('http://gitlab.com')
- end
-
- it "returns nil when the property has been assigned a new value then saved" do
- service.bamboo_url = 'http://example.com'
- service.save
- expect(service.bamboo_url_was).to be_nil
- end
- end
-
- describe 'initialize service with no properties' do
- let(:service) do
- BugzillaService.create(
- project: project,
- project_url: 'http://gitlab.example.com'
- )
- end
-
- it 'does not raise error' do
- expect { service }.not_to raise_error
- end
-
- it 'sets data correctly' do
- expect(service.data_fields.project_url).to eq('http://gitlab.example.com')
- end
- end
-
- describe '#api_field_names' do
- let(:fake_service) do
- Class.new(Service) do
- def fields
- [
- { name: 'token' },
- { name: 'api_token' },
- { name: 'key' },
- { name: 'api_key' },
- { name: 'password' },
- { name: 'password_field' },
- { name: 'safe_field' }
- ]
- end
- end
- end
-
- let(:service) do
- fake_service.new(properties: [
- { token: 'token-value' },
- { api_token: 'api_token-value' },
- { key: 'key-value' },
- { api_key: 'api_key-value' },
- { password: 'password-value' },
- { password_field: 'password_field-value' },
- { safe_field: 'safe_field-value' }
- ])
- end
-
- it 'filters out sensitive fields' do
- expect(service.api_field_names).to eq(['safe_field'])
- end
- end
-
- context 'logging' do
- let(:service) { build(:service, project: project) }
- let(:test_message) { "test message" }
- let(:arguments) do
- {
- service_class: service.class.name,
- project_path: project.full_path,
- project_id: project.id,
- message: test_message,
- additional_argument: 'some argument'
- }
- end
-
- it 'logs info messages using json logger' do
- expect(Gitlab::JsonLogger).to receive(:info).with(arguments)
-
- service.log_info(test_message, additional_argument: 'some argument')
- end
-
- it 'logs error messages using json logger' do
- expect(Gitlab::JsonLogger).to receive(:error).with(arguments)
-
- service.log_error(test_message, additional_argument: 'some argument')
- end
-
- context 'when project is nil' do
- let(:project) { nil }
- let(:arguments) do
- {
- service_class: service.class.name,
- project_path: nil,
- project_id: nil,
- message: test_message,
- additional_argument: 'some argument'
- }
- end
-
- it 'logs info messages using json logger' do
- expect(Gitlab::JsonLogger).to receive(:info).with(arguments)
-
- service.log_info(test_message, additional_argument: 'some argument')
- end
- end
- end
-
- describe '#external_wiki?' do
- where(:type, :active, :result) do
- 'ExternalWikiService' | true | true
- 'ExternalWikiService' | false | false
- 'SlackService' | true | false
- end
-
- with_them do
- it 'returns the right result' do
- expect(build(:service, type: type, active: active).external_wiki?).to eq(result)
- end
- end
- end
-
- describe '.available_services_names' do
- it 'calls the right methods' do
- expect(described_class).to receive(:services_names).and_call_original
- expect(described_class).to receive(:dev_services_names).and_call_original
- expect(described_class).to receive(:project_specific_services_names).and_call_original
-
- described_class.available_services_names
- end
-
- it 'does not call project_specific_services_names with include_project_specific false' do
- expect(described_class).to receive(:services_names).and_call_original
- expect(described_class).to receive(:dev_services_names).and_call_original
- expect(described_class).not_to receive(:project_specific_services_names)
-
- described_class.available_services_names(include_project_specific: false)
- end
-
- it 'does not call dev_services_names with include_dev false' do
- expect(described_class).to receive(:services_names).and_call_original
- expect(described_class).not_to receive(:dev_services_names)
- expect(described_class).to receive(:project_specific_services_names).and_call_original
-
- described_class.available_services_names(include_dev: false)
- end
-
- it { expect(described_class.available_services_names).to include('jenkins') }
- end
-
- describe '.project_specific_services_names' do
- it do
- expect(described_class.project_specific_services_names)
- .to include(*described_class::PROJECT_SPECIFIC_SERVICE_NAMES)
- end
- end
-end
diff --git a/spec/models/sidebars/menu_spec.rb b/spec/models/sidebars/menu_spec.rb
deleted file mode 100644
index 320f5f1ad1e..00000000000
--- a/spec/models/sidebars/menu_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Menu do
- let(:menu) { described_class.new(context) }
- let(:context) { Sidebars::Context.new(current_user: nil, container: nil) }
-
- describe '#all_active_routes' do
- it 'gathers all active routes of items and the current menu' do
- menu_item1 = Sidebars::MenuItem.new(context)
- menu_item2 = Sidebars::MenuItem.new(context)
- menu_item3 = Sidebars::MenuItem.new(context)
- menu.add_item(menu_item1)
- menu.add_item(menu_item2)
- menu.add_item(menu_item3)
-
- allow(menu).to receive(:active_routes).and_return({ path: 'foo' })
- allow(menu_item1).to receive(:active_routes).and_return({ path: %w(bar test) })
- allow(menu_item2).to receive(:active_routes).and_return({ controller: 'fooc' })
- allow(menu_item3).to receive(:active_routes).and_return({ controller: 'barc' })
-
- expect(menu.all_active_routes).to eq({ path: %w(foo bar test), controller: %w(fooc barc) })
- end
-
- it 'does not include routes for non renderable items' do
- menu_item = Sidebars::MenuItem.new(context)
- menu.add_item(menu_item)
-
- allow(menu).to receive(:active_routes).and_return({ path: 'foo' })
- allow(menu_item).to receive(:render?).and_return(false)
- allow(menu_item).to receive(:active_routes).and_return({ controller: 'bar' })
-
- expect(menu.all_active_routes).to eq({ path: ['foo'] })
- end
- end
-
- describe '#render?' do
- context 'when the menus has no items' do
- it 'returns true' do
- expect(menu.render?).to be true
- end
- end
-
- context 'when the menu has items' do
- let(:menu_item) { Sidebars::MenuItem.new(context) }
-
- before do
- menu.add_item(menu_item)
- end
-
- context 'when items are not renderable' do
- it 'returns false' do
- allow(menu_item).to receive(:render?).and_return(false)
-
- expect(menu.render?).to be false
- end
- end
-
- context 'when there are renderable items' do
- it 'returns true' do
- expect(menu.render?).to be true
- end
- end
- end
- end
-end
diff --git a/spec/models/sidebars/panel_spec.rb b/spec/models/sidebars/panel_spec.rb
deleted file mode 100644
index 0e539460810..00000000000
--- a/spec/models/sidebars/panel_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Panel do
- let(:context) { Sidebars::Context.new(current_user: nil, container: nil) }
- let(:panel) { Sidebars::Panel.new(context) }
- let(:menu1) { Sidebars::Menu.new(context) }
- let(:menu2) { Sidebars::Menu.new(context) }
-
- describe '#renderable_menus' do
- it 'returns only renderable menus' do
- panel.add_menu(menu1)
- panel.add_menu(menu2)
-
- allow(menu1).to receive(:render?).and_return(true)
- allow(menu2).to receive(:render?).and_return(false)
-
- expect(panel.renderable_menus).to eq([menu1])
- end
- end
-
- describe '#has_renderable_menus?' do
- it 'returns false when no renderable menus' do
- expect(panel.has_renderable_menus?).to be false
- end
-
- it 'returns true when no renderable menus' do
- panel.add_menu(menu1)
-
- expect(panel.has_renderable_menus?).to be true
- end
- end
-end
diff --git a/spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb b/spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb
deleted file mode 100644
index bc1815558d3..00000000000
--- a/spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::LearnGitlab::Menu do
- let(:project) { build(:project) }
- let(:experiment_enabled) { true }
- let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project, learn_gitlab_experiment_enabled: experiment_enabled) }
-
- subject { described_class.new(context) }
-
- it 'does not contain any sub menu' do
- expect(subject.instance_variable_get(:@items)).to be_empty
- end
-
- describe '#render?' do
- context 'when learn gitlab experiment is enabled' do
- it 'returns true' do
- expect(subject.render?).to eq true
- end
- end
-
- context 'when learn gitlab experiment is disabled' do
- let(:experiment_enabled) { false }
-
- it 'returns false' do
- expect(subject.render?).to eq false
- end
- end
- end
-end
diff --git a/spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb b/spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb
deleted file mode 100644
index db124c2252e..00000000000
--- a/spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::ProjectOverview::MenuItems::Releases do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:user) { project.owner }
- let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
-
- subject { described_class.new(context) }
-
- describe '#render?' do
- context 'when project repository is empty' do
- it 'returns false' do
- allow(project).to receive(:empty_repo?).and_return(true)
-
- expect(subject.render?).to eq false
- end
- end
-
- context 'when project repository is not empty' do
- context 'when user can read releases' do
- it 'returns true' do
- expect(subject.render?).to eq true
- end
- end
-
- context 'when user cannot read releases' do
- let(:user) { nil }
-
- it 'returns false' do
- expect(subject.render?).to eq false
- end
- end
- end
- end
-end
diff --git a/spec/models/sidebars/projects/menus/project_overview/menu_spec.rb b/spec/models/sidebars/projects/menus/project_overview/menu_spec.rb
deleted file mode 100644
index 105a28ce953..00000000000
--- a/spec/models/sidebars/projects/menus/project_overview/menu_spec.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Menus::ProjectOverview::Menu do
- let(:project) { build(:project) }
- let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
-
- subject { described_class.new(context) }
-
- it 'has the required items' do
- items = subject.instance_variable_get(:@items)
-
- expect(items[0]).to be_a(Sidebars::Projects::Menus::ProjectOverview::MenuItems::Details)
- expect(items[1]).to be_a(Sidebars::Projects::Menus::ProjectOverview::MenuItems::Activity)
- expect(items[2]).to be_a(Sidebars::Projects::Menus::ProjectOverview::MenuItems::Releases)
- end
-end
diff --git a/spec/models/sidebars/projects/panel_spec.rb b/spec/models/sidebars/projects/panel_spec.rb
deleted file mode 100644
index bad9b17bc83..00000000000
--- a/spec/models/sidebars/projects/panel_spec.rb
+++ /dev/null
@@ -1,14 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Sidebars::Projects::Panel do
- let(:project) { build(:project) }
- let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
-
- subject { described_class.new(context) }
-
- it 'has a scope menu' do
- expect(subject.scope_menu).to be_a(Sidebars::Projects::Menus::Scope::Menu)
- end
-end
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index 09f9cf8e222..41991821922 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -826,18 +826,6 @@ RSpec.describe Snippet do
allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return(default_branch)
end
- context 'when default branch in settings is "master"' do
- let(:default_branch) { 'master' }
-
- it 'does nothing' do
- expect(File.read(head_path).squish).to eq 'ref: refs/heads/master'
-
- expect(snippet.repository.raw_repository).not_to receive(:write_ref)
-
- subject
- end
- end
-
context 'when default branch in settings is different from "master"' do
let(:default_branch) { 'main' }
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index 6a252b444f9..c3432907112 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Timelog do
- subject { build(:timelog) }
+ subject { create(:timelog) }
- let(:issue) { create(:issue) }
- let(:merge_request) { create(:merge_request) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:merge_request) { create(:merge_request) }
+ it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:issue).touch(true) }
it { is_expected.to belong_to(:merge_request).touch(true) }
@@ -16,6 +17,8 @@ RSpec.describe Timelog do
it { is_expected.to validate_presence_of(:time_spent) }
it { is_expected.to validate_presence_of(:user) }
+ it { expect(subject.project_id).not_to be_nil }
+
describe 'Issuable validation' do
it 'is invalid if issue_id and merge_request_id are missing' do
subject.attributes = { issue: nil, merge_request: nil }
@@ -51,27 +54,34 @@ RSpec.describe Timelog do
end
describe 'scopes' do
- describe 'for_issues_in_group' do
- it 'return timelogs created for group issues' do
- group = create(:group)
- subgroup = create(:group, parent: group)
-
- create(:issue_timelog)
- timelog1 = create(:issue_timelog, issue: create(:issue, project: create(:project, group: group)))
- timelog2 = create(:issue_timelog, issue: create(:issue, project: create(:project, group: subgroup)))
-
- expect(described_class.for_issues_in_group(group)).to contain_exactly(timelog1, timelog2)
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_project) { create(:project, :empty_repo, group: group) }
+ let_it_be(:group_issue) { create(:issue, project: group_project) }
+ let_it_be(:group_merge_request) { create(:merge_request, source_project: group_project) }
+
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup_project) { create(:project, :empty_repo, group: subgroup) }
+ let_it_be(:subgroup_issue) { create(:issue, project: subgroup_project) }
+ let_it_be(:subgroup_merge_request) { create(:merge_request, source_project: subgroup_project) }
+
+ let_it_be(:timelog) { create(:issue_timelog, spent_at: 65.days.ago) }
+ let_it_be(:timelog1) { create(:issue_timelog, spent_at: 15.days.ago, issue: group_issue) }
+ let_it_be(:timelog2) { create(:issue_timelog, spent_at: 5.days.ago, issue: subgroup_issue) }
+ let_it_be(:timelog3) { create(:merge_request_timelog, spent_at: 65.days.ago) }
+ let_it_be(:timelog4) { create(:merge_request_timelog, spent_at: 15.days.ago, merge_request: group_merge_request) }
+ let_it_be(:timelog5) { create(:merge_request_timelog, spent_at: 5.days.ago, merge_request: subgroup_merge_request) }
+
+ describe 'in_group' do
+ it 'return timelogs created for group issues and merge requests' do
+ expect(described_class.in_group(group)).to contain_exactly(timelog1, timelog2, timelog4, timelog5)
end
end
describe 'between_times' do
it 'returns collection of timelogs within given times' do
- create(:issue_timelog, spent_at: 65.days.ago)
- timelog1 = create(:issue_timelog, spent_at: 15.days.ago)
- timelog2 = create(:issue_timelog, spent_at: 5.days.ago)
- timelogs = described_class.between_times(20.days.ago, 1.day.ago)
+ timelogs = described_class.between_times(20.days.ago, 10.days.ago)
- expect(timelogs).to contain_exactly(timelog1, timelog2)
+ expect(timelogs).to contain_exactly(timelog1, timelog4)
end
end
end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index c4146b347d7..caa0a886abf 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -452,11 +452,15 @@ RSpec.describe Todo do
end
end
- describe '.pluck_user_id' do
- subject { described_class.pluck_user_id }
+ describe '.distinct_user_ids' do
+ subject { described_class.distinct_user_ids }
- let_it_be(:todo) { create(:todo) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:todo) { create(:todo, user: user1) }
+ let_it_be(:todo) { create(:todo, user: user1) }
+ let_it_be(:todo) { create(:todo, user: user2) }
- it { is_expected.to eq([todo.user_id]) }
+ it { is_expected.to contain_exactly(user1.id, user2.id) }
end
end
diff --git a/spec/models/user_preference_spec.rb b/spec/models/user_preference_spec.rb
index 27ddaea763d..5806f123871 100644
--- a/spec/models/user_preference_spec.rb
+++ b/spec/models/user_preference_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe UserPreference do
describe 'sort_by preferences' do
shared_examples_for 'a sort_by preference' do
it 'allows nil sort fields' do
- user_preference.update(attribute => nil)
+ user_preference.update!(attribute => nil)
expect(user_preference).to be_valid
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 3abf2a651a0..cb34917f073 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -20,6 +20,10 @@ RSpec.describe User do
it { is_expected.to include_module(AsyncDeviseEmail) }
end
+ describe 'constants' do
+ it { expect(described_class::COUNT_CACHE_VALIDITY_PERIOD).to be_a(Integer) }
+ end
+
describe 'delegations' do
it { is_expected.to delegate_method(:path).to(:namespace).with_prefix }
@@ -79,6 +83,7 @@ RSpec.describe User do
it { is_expected.to have_one(:user_detail) }
it { is_expected.to have_one(:atlassian_identity) }
it { is_expected.to have_one(:user_highest_role) }
+ it { is_expected.to have_one(:credit_card_validation) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
it { is_expected.to have_many(:members) }
it { is_expected.to have_many(:project_members) }
@@ -132,7 +137,7 @@ RSpec.describe User do
it 'creates `user_detail` when `bio` is first updated' do
user = create(:user)
- expect { user.update(bio: 'my bio') }.to change { user.user_detail.persisted? }.from(false).to(true)
+ expect { user.update!(bio: 'my bio') }.to change { user.user_detail.persisted? }.from(false).to(true)
end
end
@@ -655,9 +660,10 @@ RSpec.describe User do
it 'does not accept not verified emails' do
email = create(:email)
user = email.user
- user.update(notification_email: email.email)
+ user.notification_email = email.email
expect(user).to be_invalid
+ expect(user.errors[:notification_email]).to include('is not an email you own')
end
end
@@ -665,7 +671,7 @@ RSpec.describe User do
it 'accepts verified emails' do
email = create(:email, :confirmed, email: 'test@test.com')
user = email.user
- user.update(public_email: email.email)
+ user.notification_email = email.email
expect(user).to be_valid
end
@@ -673,9 +679,10 @@ RSpec.describe User do
it 'does not accept not verified emails' do
email = create(:email)
user = email.user
- user.update(public_email: email.email)
+ user.public_email = email.email
expect(user).to be_invalid
+ expect(user.errors[:public_email]).to include('is not an email you own')
end
end
@@ -721,6 +728,7 @@ RSpec.describe User do
let_it_be(:blocked_user) { create(:user, :blocked) }
let_it_be(:ldap_blocked_user) { create(:omniauth_user, :ldap_blocked) }
let_it_be(:blocked_pending_approval_user) { create(:user, :blocked_pending_approval) }
+ let_it_be(:banned_user) { create(:user, :banned) }
describe '.blocked' do
subject { described_class.blocked }
@@ -731,7 +739,7 @@ RSpec.describe User do
ldap_blocked_user
)
- expect(subject).not_to include(active_user, blocked_pending_approval_user)
+ expect(subject).not_to include(active_user, blocked_pending_approval_user, banned_user)
end
end
@@ -742,6 +750,14 @@ RSpec.describe User do
expect(subject).to contain_exactly(blocked_pending_approval_user)
end
end
+
+ describe '.banned' do
+ subject { described_class.banned }
+
+ it 'returns only banned users' do
+ expect(subject).to contain_exactly(banned_user)
+ end
+ end
end
describe ".with_two_factor" do
@@ -1056,6 +1072,21 @@ RSpec.describe User do
.to contain_exactly(user)
end
end
+
+ describe '.for_todos' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+
+ let_it_be(:todo1) { create(:todo, target: issue, author: user1, user: user1) }
+ let_it_be(:todo2) { create(:todo, target: issue, author: user1, user: user1) }
+ let_it_be(:todo3) { create(:todo, target: issue, author: user2, user: user2) }
+
+ it 'returns users for the given todos' do
+ expect(described_class.for_todos(issue.todos))
+ .to contain_exactly(user1, user2)
+ end
+ end
end
describe "Respond to" do
@@ -1274,7 +1305,7 @@ RSpec.describe User do
let(:secondary) { create(:email, :confirmed, email: 'secondary@example.com', user: user) }
before do
- user.emails.create(email_attrs)
+ user.emails.create!(email_attrs)
user.tap { |u| u.update!(notification_email: email_attrs[:email]) }.reload
end
@@ -1366,6 +1397,26 @@ RSpec.describe User do
end
end
+ describe '#credit_card_validated_at' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when credit_card_validation does not exist' do
+ it 'returns nil' do
+ expect(user.credit_card_validated_at).to be nil
+ end
+ end
+
+ context 'when credit_card_validation exists' do
+ it 'returns the credit card validated time' do
+ credit_card_validated_time = Time.current - 1.day
+
+ create(:credit_card_validation, credit_card_validated_at: credit_card_validated_time, user: user)
+
+ expect(user.credit_card_validated_at).to eq(credit_card_validated_time)
+ end
+ end
+ end
+
describe '#update_tracked_fields!', :clean_gitlab_redis_shared_state do
let(:request) { OpenStruct.new(remote_ip: "127.0.0.1") }
let(:user) { create(:user) }
@@ -1428,7 +1479,7 @@ RSpec.describe User do
let!(:accessible_deploy_keys_project) { create(:deploy_keys_project, project: project) }
before do
- public_deploy_keys_project.deploy_key.update(public: true)
+ public_deploy_keys_project.deploy_key.update!(public: true)
project.add_developer(user)
end
@@ -1518,13 +1569,13 @@ RSpec.describe User do
it 'receives callback when external changes' do
expect(user).to receive(:ensure_user_rights_and_limits)
- user.update(external: false)
+ user.update!(external: false)
end
it 'ensures correct rights and limits for user' do
stub_config_setting(default_can_create_group: true)
- expect { user.update(external: false) }.to change { user.can_create_group }.to(true)
+ expect { user.update!(external: false) }.to change { user.can_create_group }.to(true)
.and change { user.projects_limit }.to(Gitlab::CurrentSettings.default_projects_limit)
end
end
@@ -1535,11 +1586,11 @@ RSpec.describe User do
it 'receives callback when external changes' do
expect(user).to receive(:ensure_user_rights_and_limits)
- user.update(external: true)
+ user.update!(external: true)
end
it 'ensures correct rights and limits for user' do
- expect { user.update(external: true) }.to change { user.can_create_group }.to(false)
+ expect { user.update!(external: true) }.to change { user.can_create_group }.to(false)
.and change { user.projects_limit }.to(0)
end
end
@@ -1892,6 +1943,12 @@ RSpec.describe User do
expect(described_class.filter_items('blocked')).to include user
end
+ it 'filters by banned' do
+ expect(described_class).to receive(:banned).and_return([user])
+
+ expect(described_class.filter_items('banned')).to include user
+ end
+
it 'filters by blocked pending approval' do
expect(described_class).to receive(:blocked_pending_approval).and_return([user])
@@ -2435,7 +2492,7 @@ RSpec.describe User do
end
context 'with a redirect route matching the given path' do
- let!(:redirect_route) { user.namespace.redirect_routes.create(path: 'foo') }
+ let!(:redirect_route) { user.namespace.redirect_routes.create!(path: 'foo') }
context 'without the follow_redirects option' do
it 'returns nil' do
@@ -2511,8 +2568,9 @@ RSpec.describe User do
it 'is false if avatar is html page' do
user.update_attribute(:avatar, 'uploads/avatar.html')
+ user.avatar_type
- expect(user.avatar_type).to eq(['file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp'])
+ expect(user.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
end
end
@@ -2535,7 +2593,7 @@ RSpec.describe User do
expect(Gitlab::AvatarCache).to receive(:delete_by_email).with(*user.verified_emails)
- user.update(avatar: fixture_file_upload('spec/fixtures/dk.png'))
+ user.update!(avatar: fixture_file_upload('spec/fixtures/dk.png'))
end
end
@@ -2849,12 +2907,12 @@ RSpec.describe User do
expect(user.starred?(project1)).to be_truthy
expect(user.starred?(project2)).to be_truthy
- star1.destroy
+ star1.destroy!
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_truthy
- star2.destroy
+ star2.destroy!
expect(user.starred?(project1)).to be_falsey
expect(user.starred?(project2)).to be_falsey
@@ -3404,7 +3462,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
- member.destroy
+ member.destroy!
expect(user.authorized_projects).not_to include(project)
end
@@ -3429,7 +3487,7 @@ RSpec.describe User do
expect(user2.authorized_projects).to include(project)
- project.destroy
+ project.destroy!
expect(user2.authorized_projects).not_to include(project)
end
@@ -3443,7 +3501,7 @@ RSpec.describe User do
expect(user.authorized_projects).to include(project)
- group.destroy
+ group.destroy!
expect(user.authorized_projects).not_to include(project)
end
@@ -4200,16 +4258,45 @@ RSpec.describe User do
end
describe '#invalidate_issue_cache_counts' do
- let(:user) { build_stubbed(:user) }
+ let_it_be(:user) { create(:user) }
- it 'invalidates cache for issue counter' do
- cache_mock = double
+ subject do
+ user.invalidate_issue_cache_counts
+ user.save!
+ end
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count'])
+ shared_examples 'invalidates the cached value' do
+ it 'invalidates cache for issue counter' do
+ expect(Rails.cache).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count'])
- allow(Rails).to receive(:cache).and_return(cache_mock)
+ subject
+ end
+ end
- user.invalidate_issue_cache_counts
+ it_behaves_like 'invalidates the cached value'
+
+ context 'if feature flag assigned_open_issues_cache is enabled' do
+ it 'calls the recalculate worker' do
+ expect(Users::UpdateOpenIssueCountWorker).to receive(:perform_async).with(user.id)
+
+ subject
+ end
+
+ it_behaves_like 'invalidates the cached value'
+ end
+
+ context 'if feature flag assigned_open_issues_cache is disabled' do
+ before do
+ stub_feature_flags(assigned_open_issues_cache: false)
+ end
+
+ it 'does not call the recalculate worker' do
+ expect(Users::UpdateOpenIssueCountWorker).not_to receive(:perform_async).with(user.id)
+
+ subject
+ end
+
+ it_behaves_like 'invalidates the cached value'
end
end
@@ -4414,9 +4501,10 @@ RSpec.describe User do
end
it 'adds the namespace errors to the user' do
- user.update(username: new_username)
+ user.username = new_username
- expect(user.errors.full_messages.first).to eq('A user, alias, or group already exists with that username.')
+ expect(user).to be_invalid
+ expect(user.errors[:base]).to include('A user, alias, or group already exists with that username.')
end
end
end
@@ -5238,6 +5326,26 @@ RSpec.describe User do
end
end
+ describe 'user credit card validation' do
+ context 'when user is initialized' do
+ let(:user) { build(:user) }
+
+ it { expect(user.credit_card_validation).not_to be_present }
+ end
+
+ context 'when create user without credit card validation' do
+ let(:user) { create(:user) }
+
+ it { expect(user.credit_card_validation).not_to be_present }
+ end
+
+ context 'when user credit card validation exists' do
+ let(:user) { create(:user, :with_credit_card_validation) }
+
+ it { expect(user.credit_card_validation).to be_persisted }
+ end
+ end
+
describe 'user detail' do
context 'when user is initialized' do
let(:user) { build(:user) }
@@ -5307,21 +5415,21 @@ RSpec.describe User do
with_them do
context 'when state was changed' do
- subject { user.update(attributes) }
+ subject { user.update!(attributes) }
include_examples 'update highest role with exclusive lease'
end
end
context 'when state was not changed' do
- subject { user.update(email: 'newmail@example.com') }
+ subject { user.update!(email: 'newmail@example.com') }
include_examples 'does not update the highest role'
end
end
describe 'destroy user' do
- subject { user.destroy }
+ subject { user.destroy! }
include_examples 'does not update the highest role'
end
@@ -5343,7 +5451,7 @@ RSpec.describe User do
context 'when user is a ghost user' do
before do
- user.update(user_type: :ghost)
+ user.update!(user_type: :ghost)
end
it { is_expected.to be false }
@@ -5361,7 +5469,7 @@ RSpec.describe User do
with_them do
before do
- user.update(user_type: user_type)
+ user.update!(user_type: user_type)
end
it { is_expected.to be expected_result }
@@ -5384,7 +5492,7 @@ RSpec.describe User do
context 'when user is an internal user' do
before do
- user.update(user_type: :ghost)
+ user.update!(user_type: :ghost)
end
it { is_expected.to be :forbidden }
@@ -5418,7 +5526,7 @@ RSpec.describe User do
context 'when user is an internal user' do
before do
- user.update(user_type: 'alert_bot')
+ user.update!(user_type: 'alert_bot')
end
it_behaves_like 'does not require password to be present'
@@ -5426,7 +5534,7 @@ RSpec.describe User do
context 'when user is a project bot user' do
before do
- user.update(user_type: 'project_bot')
+ user.update!(user_type: 'project_bot')
end
it_behaves_like 'does not require password to be present'
@@ -5600,4 +5708,47 @@ RSpec.describe User do
end
end
end
+
+ describe '.dormant' do
+ it 'returns dormant users' do
+ freeze_time do
+ not_that_long_ago = (described_class::MINIMUM_INACTIVE_DAYS - 1).days.ago.to_date
+ too_long_ago = described_class::MINIMUM_INACTIVE_DAYS.days.ago.to_date
+
+ create(:user, :deactivated, last_activity_on: too_long_ago)
+
+ User::INTERNAL_USER_TYPES.map do |user_type|
+ create(:user, state: :active, user_type: user_type, last_activity_on: too_long_ago)
+ end
+
+ create(:user, last_activity_on: not_that_long_ago)
+
+ dormant_user = create(:user, last_activity_on: too_long_ago)
+
+ expect(described_class.dormant).to contain_exactly(dormant_user)
+ end
+ end
+ end
+
+ describe '.with_no_activity' do
+ it 'returns users with no activity' do
+ freeze_time do
+ not_that_long_ago = (described_class::MINIMUM_INACTIVE_DAYS - 1).days.ago.to_date
+ too_long_ago = described_class::MINIMUM_INACTIVE_DAYS.days.ago.to_date
+
+ create(:user, :deactivated, last_activity_on: nil)
+
+ User::INTERNAL_USER_TYPES.map do |user_type|
+ create(:user, state: :active, user_type: user_type, last_activity_on: nil)
+ end
+
+ create(:user, last_activity_on: not_that_long_ago)
+ create(:user, last_activity_on: too_long_ago)
+
+ user_with_no_activity = create(:user, last_activity_on: nil)
+
+ expect(described_class.with_no_activity).to contain_exactly(user_with_no_activity)
+ end
+ end
+ end
end
diff --git a/spec/models/user_status_spec.rb b/spec/models/user_status_spec.rb
index 51dd91149cc..87d1fa14aca 100644
--- a/spec/models/user_status_spec.rb
+++ b/spec/models/user_status_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe UserStatus do
it 'is expected to be deleted when the user is deleted' do
status = create(:user_status)
- expect { status.user.destroy }.to change { described_class.count }.from(1).to(0)
+ expect { status.user.destroy! }.to change { described_class.count }.from(1).to(0)
end
describe '#clear_status_after=' do
diff --git a/spec/models/users/credit_card_validation_spec.rb b/spec/models/users/credit_card_validation_spec.rb
new file mode 100644
index 00000000000..fb9f6e35038
--- /dev/null
+++ b/spec/models/users/credit_card_validation_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::CreditCardValidation do
+ it { is_expected.to belong_to(:user) }
+end
diff --git a/spec/models/wiki_page/meta_spec.rb b/spec/models/wiki_page/meta_spec.rb
index 24906d4fb79..37a282657d9 100644
--- a/spec/models/wiki_page/meta_spec.rb
+++ b/spec/models/wiki_page/meta_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe WikiPage::Meta do
subject { described_class.find(meta.id) }
let_it_be(:meta) do
- described_class.create(title: generate(:wiki_page_title), project: project)
+ described_class.create!(title: generate(:wiki_page_title), project: project)
end
context 'there are no slugs' do
@@ -124,6 +124,7 @@ RSpec.describe WikiPage::Meta do
context 'the slug is already in the DB (but not canonical)' do
let_it_be(:slug_record) { create(:wiki_page_slug, wiki_page_meta: meta) }
+
let(:slug) { slug_record.slug }
let(:query_limit) { 4 }
@@ -132,6 +133,7 @@ RSpec.describe WikiPage::Meta do
context 'the slug is already in the DB (and canonical)' do
let_it_be(:slug_record) { create(:wiki_page_slug, :canonical, wiki_page_meta: meta) }
+
let(:slug) { slug_record.slug }
let(:query_limit) { 4 }
@@ -181,7 +183,7 @@ RSpec.describe WikiPage::Meta do
# an old slug that = canonical_slug
different_slug = generate(:sluggified_title)
create(:wiki_page_meta, project: project, canonical_slug: different_slug)
- .slugs.create(slug: wiki_page.slug)
+ .slugs.create!(slug: wiki_page.slug)
end
shared_examples 'metadata examples' do
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index be94eca550c..579a9e664cf 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -620,16 +620,12 @@ RSpec.describe WikiPage do
end
describe "#versions" do
- include_context 'subject is persisted page'
+ let(:subject) { create_wiki_page }
it "returns an array of all commits for the page" do
- 3.times { |i| subject.update(content: "content #{i}") }
-
- expect(subject.versions.count).to eq(4)
- end
-
- it 'returns instances of WikiPageVersion' do
- expect(subject.versions).to all( be_a(Gitlab::Git::WikiPageVersion) )
+ expect do
+ 3.times { |i| subject.update(content: "content #{i}") }
+ end.to change { subject.versions.count }.by(3)
end
end
@@ -640,6 +636,7 @@ RSpec.describe WikiPage do
let_it_be(:existing_page) { create_wiki_page(title: 'test page') }
let_it_be(:directory_page) { create_wiki_page(title: 'parent directory/child page') }
let_it_be(:page_with_special_characters) { create_wiki_page(title: 'test+page') }
+
let(:untitled_page) { described_class.new(wiki) }
where(:page, :title, :changed) do
@@ -776,8 +773,11 @@ RSpec.describe WikiPage do
end
describe '#historical?' do
- include_context 'subject is persisted page'
+ let!(:container) { create(:project) }
+
+ subject { create_wiki_page }
+ let(:wiki) { subject.wiki }
let(:old_version) { subject.versions.last.id }
let(:old_page) { wiki.find_page(subject.title, old_version) }
let(:latest_version) { subject.versions.first.id }
diff --git a/spec/policies/service_policy_spec.rb b/spec/policies/integration_policy_spec.rb
index 84c74ca7e31..d490045c1e1 100644
--- a/spec/policies/service_policy_spec.rb
+++ b/spec/policies/integration_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ServicePolicy, :models do
+RSpec.describe IntegrationPolicy, :models do
let_it_be(:user) { create(:user) }
let(:project) { integration.project }
diff --git a/spec/policies/project_member_policy_spec.rb b/spec/policies/project_member_policy_spec.rb
index ab8f8b83e7f..aebbe685bb3 100644
--- a/spec/policies/project_member_policy_spec.rb
+++ b/spec/policies/project_member_policy_spec.rb
@@ -16,12 +16,22 @@ RSpec.describe ProjectMemberPolicy do
context 'with regular member' do
let(:member_user) { create(:user) }
+ it { is_expected.to be_allowed(:read_project) }
it { is_expected.to be_allowed(:update_project_member) }
it { is_expected.to be_allowed(:destroy_project_member) }
it { is_expected.not_to be_allowed(:destroy_project_bot_member) }
end
+ context 'when user is project owner' do
+ let(:member_user) { project.owner }
+ let(:member) { project.members.find_by!(user: member_user) }
+
+ it { is_expected.to be_allowed(:read_project) }
+ it { is_expected.to be_disallowed(:update_project_member) }
+ it { is_expected.to be_disallowed(:destroy_project_member) }
+ end
+
context 'with a bot member' do
let(:member_user) { create(:user, :project_bot) }
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index f2c941080b5..46da42a4787 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe ProjectPolicy do
end
it 'does not include the issues permissions' do
- expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue
+ expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident
end
it 'disables boards and lists permissions' do
@@ -72,7 +72,7 @@ RSpec.describe ProjectPolicy do
it 'does not include the issues permissions' do
create(:jira_service, project: project)
- expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue
+ expect_disallowed :read_issue, :read_issue_iid, :create_issue, :update_issue, :admin_issue, :create_incident
end
end
end
@@ -393,6 +393,34 @@ RSpec.describe ProjectPolicy do
end
end
+ describe 'read_storage_disk_path' do
+ context 'when no user' do
+ let(:current_user) { anonymous }
+
+ it { expect_disallowed(:read_storage_disk_path) }
+ end
+
+ context 'admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { expect_allowed(:read_storage_disk_path) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { expect_disallowed(:read_storage_disk_path) }
+ end
+ end
+
+ %w(guest reporter developer maintainer owner).each do |role|
+ context role do
+ let(:current_user) { send(role) }
+
+ it { expect_disallowed(:read_storage_disk_path) }
+ end
+ end
+ end
+
context 'alert bot' do
let(:current_user) { User.alert_bot }
@@ -892,6 +920,8 @@ RSpec.describe ProjectPolicy do
end
describe 'design permissions' do
+ include DesignManagementTestHelpers
+
let(:current_user) { guest }
let(:design_permissions) do
@@ -899,12 +929,14 @@ RSpec.describe ProjectPolicy do
end
context 'when design management is not available' do
+ before do
+ enable_design_management(false)
+ end
+
it { is_expected.not_to be_allowed(*design_permissions) }
end
context 'when design management is available' do
- include DesignManagementTestHelpers
-
before do
enable_design_management
end
diff --git a/spec/presenters/alert_management/alert_presenter_spec.rb b/spec/presenters/alert_management/alert_presenter_spec.rb
index 243301502ce..21c0cb3fead 100644
--- a/spec/presenters/alert_management/alert_presenter_spec.rb
+++ b/spec/presenters/alert_management/alert_presenter_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe AlertManagement::AlertPresenter do
end
let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload) }
+
let(:alert_url) { "http://localhost/#{project.full_path}/-/alert_management/#{alert.iid}/details" }
subject(:presenter) { described_class.new(alert) }
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index 47402fea2b5..38bdf3b9364 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -2,52 +2,96 @@
require 'spec_helper'
-RSpec.describe BlobPresenter, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
-
- let(:git_blob) do
- Gitlab::Git::Blob.find(
- repository,
- 'fa1b1e6c004a68b7d8763b86455da9e6b23e36d6',
- 'files/ruby/regex.rb'
- )
+RSpec.describe BlobPresenter do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { project.owner }
+
+ let(:repository) { project.repository }
+ let(:blob) { repository.blob_at('HEAD', 'files/ruby/regex.rb') }
+
+ subject(:presenter) { described_class.new(blob, current_user: user) }
+
+ describe '#web_url' do
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
+ end
+
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
end
- let(:blob) { Blob.new(git_blob) }
+ describe '#edit_blob_path' do
+ it { expect(presenter.edit_blob_path).to eq("/#{project.full_path}/-/edit/#{blob.commit_id}/#{blob.path}") }
+ end
- describe '.web_url' do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
- let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.tree.blobs.first, repository) }
+ describe '#raw_path' do
+ it { expect(presenter.raw_path).to eq("/#{project.full_path}/-/raw/#{blob.commit_id}/#{blob.path}") }
+ end
- subject { described_class.new(blob) }
+ describe '#replace_path' do
+ it { expect(presenter.replace_path).to eq("/#{project.full_path}/-/create/#{blob.commit_id}/#{blob.path}") }
+ end
- it { expect(subject.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
+ describe '#ide_edit_path' do
+ it { expect(presenter.ide_edit_path).to eq("/-/ide/project/#{project.full_path}/edit/HEAD/-/files/ruby/regex.rb") }
end
- describe '#web_path' do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
- let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.tree.blobs.first, repository) }
+ describe '#fork_and_edit_path' do
+ it 'generates expected URI + query' do
+ uri = URI.parse(presenter.fork_and_edit_path)
+ query = Rack::Utils.parse_query(uri.query)
+
+ expect(uri.path).to eq("/#{project.full_path}/-/forks")
+ expect(query).to include('continue[to]' => presenter.edit_blob_path, 'namespace_key' => user.namespace_id.to_s)
+ end
- subject { described_class.new(blob) }
+ context 'current_user is nil' do
+ let(:user) { nil }
- it { expect(subject.web_path).to eq("/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
+ it { expect(presenter.fork_and_edit_path).to be_nil }
+ end
+ end
+
+ describe '#ide_fork_and_edit_path' do
+ it 'generates expected URI + query' do
+ uri = URI.parse(presenter.ide_fork_and_edit_path)
+ query = Rack::Utils.parse_query(uri.query)
+
+ expect(uri.path).to eq("/#{project.full_path}/-/forks")
+ expect(query).to include('continue[to]' => presenter.ide_edit_path, 'namespace_key' => user.namespace_id.to_s)
+ end
+
+ context 'current_user is nil' do
+ let(:user) { nil }
+
+ it { expect(presenter.ide_fork_and_edit_path).to be_nil }
+ end
+ end
+
+ context 'given a Gitlab::Graphql::Representation::TreeEntry' do
+ let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(super(), repository) }
+
+ describe '#web_url' do
+ it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
+ end
+
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
+ end
end
describe '#highlight' do
- subject { described_class.new(blob) }
+ let(:git_blob) { blob.__getobj__ }
it 'returns highlighted content' do
expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: nil)
- subject.highlight
+ presenter.highlight
end
it 'returns plain content when :plain is true' do
expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: true, language: nil)
- subject.highlight(plain: true)
+ presenter.highlight(plain: true)
end
context '"to" param is present' do
@@ -60,7 +104,7 @@ RSpec.describe BlobPresenter, :seed_helper do
it 'returns limited highlighted content' do
expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', "line one\n", plain: nil, language: nil)
- subject.highlight(to: 1)
+ presenter.highlight(to: 1)
end
end
@@ -72,7 +116,46 @@ RSpec.describe BlobPresenter, :seed_helper do
it 'passes language to inner call' do
expect(Gitlab::Highlight).to receive(:highlight).with('files/ruby/regex.rb', git_blob.data, plain: nil, language: 'ruby')
- subject.highlight
+ presenter.highlight
+ end
+ end
+ end
+
+ describe '#plain_data' do
+ let(:blob) { repository.blob_at('HEAD', file) }
+
+ subject { described_class.new(blob).plain_data }
+
+ context 'when blob is binary' do
+ let(:file) { 'files/images/logo-black.png' }
+
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when blob is markup' do
+ let(:file) { 'README.md' }
+
+ it 'returns plain content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
+ end
+ end
+
+ context 'when blob has syntax' do
+ let(:file) { 'files/ruby/regex.rb' }
+
+ it 'returns highlighted syntax content' do
+ expect(subject)
+ .to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
+ end
+ end
+
+ context 'when blob has plain data' do
+ let(:file) { 'LICENSE' }
+
+ it 'returns plain text highlighted content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
end
end
end
diff --git a/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb b/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb
index 06d5422eed3..94a743d4d89 100644
--- a/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_artifacts/code_quality_mr_diff_presenter_spec.rb
@@ -4,11 +4,12 @@ require 'spec_helper'
RSpec.describe Ci::PipelineArtifacts::CodeQualityMrDiffPresenter do
let(:pipeline_artifact) { create(:ci_pipeline_artifact, :with_codequality_mr_diff_report) }
+ let(:merge_request) { double(id: 123456789, new_paths: filenames) }
subject(:presenter) { described_class.new(pipeline_artifact) }
describe '#for_files' do
- subject(:quality_data) { presenter.for_files(filenames) }
+ subject(:quality_data) { presenter.for_files(merge_request) }
context 'when code quality has data' do
context 'when filenames is empty' do
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index 5cb9d340e06..2d3c0d85eda 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Ci::PipelinePresenter do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:project) { create(:project, :test_repo) }
let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
+
let(:current_user) { user }
subject(:presenter) do
@@ -246,6 +247,7 @@ RSpec.describe Ci::PipelinePresenter do
context 'permissions' do
let_it_be_with_refind(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) }
+
let(:pipeline) { merge_request.all_pipelines.take }
shared_examples 'private merge requests' do
diff --git a/spec/presenters/group_member_presenter_spec.rb b/spec/presenters/group_member_presenter_spec.rb
index 6bd3005fbb6..352f81356e0 100644
--- a/spec/presenters/group_member_presenter_spec.rb
+++ b/spec/presenters/group_member_presenter_spec.rb
@@ -142,7 +142,7 @@ RSpec.describe GroupMemberPresenter do
let(:expected_roles) { { 'Developer' => 30, 'Maintainer' => 40, 'Owner' => 50, 'Reporter' => 20 } }
before do
- entity.parent = group
+ entity.update!(parent: group)
end
end
end
diff --git a/spec/presenters/label_presenter_spec.rb b/spec/presenters/label_presenter_spec.rb
index 44c68a6102f..bab0d9a1065 100644
--- a/spec/presenters/label_presenter_spec.rb
+++ b/spec/presenters/label_presenter_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe LabelPresenter do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
+
let(:label) { build_stubbed(:label, project: project).present(issuable_subject: project) }
let(:group_label) { build_stubbed(:group_label, group: group).present(issuable_subject: project) }
diff --git a/spec/presenters/packages/composer/packages_presenter_spec.rb b/spec/presenters/packages/composer/packages_presenter_spec.rb
index c1d8c9816a6..1f638e5b935 100644
--- a/spec/presenters/packages/composer/packages_presenter_spec.rb
+++ b/spec/presenters/packages/composer/packages_presenter_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe ::Packages::Composer::PackagesPresenter do
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+
let!(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
let!(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
diff --git a/spec/presenters/packages/conan/package_presenter_spec.rb b/spec/presenters/packages/conan/package_presenter_spec.rb
index dad9460c8eb..6d82c5ef547 100644
--- a/spec/presenters/packages/conan/package_presenter_spec.rb
+++ b/spec/presenters/packages/conan/package_presenter_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
let_it_be(:package) { create(:conan_package) }
let_it_be(:project) { package.project }
let_it_be(:conan_package_reference) { '123456789'}
+
let(:params) { { package_scope: :instance } }
shared_examples 'no existing package' do
diff --git a/spec/presenters/packages/detail/package_presenter_spec.rb b/spec/presenters/packages/detail/package_presenter_spec.rb
index 4c3e0228583..d8f1c98e762 100644
--- a/spec/presenters/packages/detail/package_presenter_spec.rb
+++ b/spec/presenters/packages/detail/package_presenter_spec.rb
@@ -50,6 +50,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
name: package.name,
package_files: expected_package_files,
package_type: package.package_type,
+ status: package.status,
project_id: package.project_id,
tags: package.tags.as_json,
updated_at: package.updated_at,
@@ -125,6 +126,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
context 'with nuget_metadatum' do
let_it_be(:package) { create(:nuget_package, project: project) }
let_it_be(:nuget_metadatum) { create(:nuget_metadatum, package: package) }
+
let(:expected_package_details) { super().merge(nuget_metadatum: nuget_metadatum) }
it 'returns nuget_metadatum' do
diff --git a/spec/presenters/packages/npm/package_presenter_spec.rb b/spec/presenters/packages/npm/package_presenter_spec.rb
index 0e8cda5bafd..e524edaadc6 100644
--- a/spec/presenters/packages/npm/package_presenter_spec.rb
+++ b/spec/presenters/packages/npm/package_presenter_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ::Packages::Npm::PackagePresenter do
let_it_be(:project) { create(:project) }
let_it_be(:package_name) { "@#{project.root_namespace.path}/test" }
+
let!(:package1) { create(:npm_package, version: '1.0.4', project: project, name: package_name) }
let!(:package2) { create(:npm_package, version: '1.0.6', project: project, name: package_name) }
let!(:latest_package) { create(:npm_package, version: '1.0.11', project: project, name: package_name) }
@@ -16,8 +17,8 @@ RSpec.describe ::Packages::Npm::PackagePresenter do
context 'for packages without dependencies' do
it { is_expected.to be_a(Hash) }
- it { expect(subject[package1.version]).to match_schema('public_api/v4/packages/npm_package_version') }
- it { expect(subject[package2.version]).to match_schema('public_api/v4/packages/npm_package_version') }
+ it { expect(subject[package1.version].with_indifferent_access).to match_schema('public_api/v4/packages/npm_package_version') }
+ it { expect(subject[package2.version].with_indifferent_access).to match_schema('public_api/v4/packages/npm_package_version') }
described_class::NPM_VALID_DEPENDENCY_TYPES.each do |dependency_type|
it { expect(subject.dig(package1.version, dependency_type)).to be nil }
@@ -31,8 +32,8 @@ RSpec.describe ::Packages::Npm::PackagePresenter do
end
it { is_expected.to be_a(Hash) }
- it { expect(subject[package1.version]).to match_schema('public_api/v4/packages/npm_package_version') }
- it { expect(subject[package2.version]).to match_schema('public_api/v4/packages/npm_package_version') }
+ it { expect(subject[package1.version].with_indifferent_access).to match_schema('public_api/v4/packages/npm_package_version') }
+ it { expect(subject[package2.version].with_indifferent_access).to match_schema('public_api/v4/packages/npm_package_version') }
described_class::NPM_VALID_DEPENDENCY_TYPES.each do |dependency_type|
it { expect(subject.dig(package1.version, dependency_type.to_s)).to be_any }
end
diff --git a/spec/presenters/packages/nuget/search_results_presenter_spec.rb b/spec/presenters/packages/nuget/search_results_presenter_spec.rb
index fed20c8e39d..39ec7251dfd 100644
--- a/spec/presenters/packages/nuget/search_results_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/search_results_presenter_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Packages::Nuget::SearchResultsPresenter do
let_it_be(:packages_c) { create_list(:nuget_package, 5, project: project, name: 'DummyPackageC') }
let_it_be(:search_results) { OpenStruct.new(total_count: 3, results: [package_a, packages_b, packages_c].flatten) }
let_it_be(:presenter) { described_class.new(search_results) }
+
let(:total_count) { presenter.total_count }
let(:data) { presenter.data }
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index a9a5ecb3299..c834e183e5e 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -72,6 +72,7 @@ RSpec.describe ProjectPresenter do
context 'when repository is not empty' do
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:release) { create(:release, project: project, author: user) }
it 'returns files and readme if user has repository access' do
@@ -271,6 +272,7 @@ RSpec.describe ProjectPresenter do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:release) { create(:release, project: project, author: user) }
+
let(:presenter) { described_class.new(project, current_user: user) }
describe '#files_anchor_data' do
@@ -792,6 +794,12 @@ RSpec.describe ProjectPresenter do
end
end
+ describe '#add_code_quality_ci_yml_path' do
+ subject { presenter.add_code_quality_ci_yml_path }
+
+ it { is_expected.to match(/code_quality_walkthrough=true.*template=Code-Quality/) }
+ end
+
describe 'empty_repo_upload_experiment?' do
subject { presenter.empty_repo_upload_experiment? }
diff --git a/spec/presenters/prometheus_alert_presenter_spec.rb b/spec/presenters/prometheus_alert_presenter_spec.rb
index b9f18e2be28..739bbaeb98f 100644
--- a/spec/presenters/prometheus_alert_presenter_spec.rb
+++ b/spec/presenters/prometheus_alert_presenter_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe PrometheusAlertPresenter do
subject { presenter.humanized_text }
let_it_be(:prometheus_metric) { create(:prometheus_metric, project: project) }
+
let(:prometheus_alert) { create(:prometheus_alert, operator: operator, project: project, environment: environment, prometheus_metric: prometheus_metric) }
let(:operator) { :gt }
diff --git a/spec/presenters/release_presenter_spec.rb b/spec/presenters/release_presenter_spec.rb
index 4bf12183eff..97c05a1b7a2 100644
--- a/spec/presenters/release_presenter_spec.rb
+++ b/spec/presenters/release_presenter_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ReleasePresenter do
include Gitlab::Routing.url_helpers
let_it_be(:project) { create(:project, :repository) }
+
let(:developer) { create(:user) }
let(:guest) { create(:user) }
let(:user) { developer }
diff --git a/spec/presenters/service_hook_presenter_spec.rb b/spec/presenters/service_hook_presenter_spec.rb
index adef34a882b..bc6e505d9e1 100644
--- a/spec/presenters/service_hook_presenter_spec.rb
+++ b/spec/presenters/service_hook_presenter_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe ServiceHookPresenter do
let(:web_hook_log) { create(:web_hook_log, web_hook: service_hook) }
- let(:service_hook) { create(:service_hook, service: service) }
+ let(:service_hook) { create(:service_hook, integration: service) }
let(:service) { create(:drone_ci_service, project: project) }
let(:project) { create(:project) }
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 83fe37effc0..42eca6b5a49 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -80,45 +80,6 @@ RSpec.describe SnippetBlobPresenter do
end
end
- describe '#plain_data' do
- let(:blob) { blob_at(file) }
-
- subject { described_class.new(blob).plain_data }
-
- context 'when blob is binary' do
- let(:file) { 'files/images/logo-black.png' }
-
- it 'returns nil' do
- expect(subject).to be_nil
- end
- end
-
- context 'when blob is markup' do
- let(:file) { 'README.md' }
-
- it 'returns plain content' do
- expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
- end
- end
-
- context 'when blob has syntax' do
- let(:file) { 'files/ruby/regex.rb' }
-
- it 'returns highlighted syntax content' do
- expect(subject)
- .to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
- end
- end
-
- context 'when blob has plain data' do
- let(:file) { 'LICENSE' }
-
- it 'returns plain text highlighted content' do
- expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
- end
- end
- end
-
describe 'route helpers' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/presenters/terraform/modules_presenter_spec.rb b/spec/presenters/terraform/modules_presenter_spec.rb
new file mode 100644
index 00000000000..f5ad53e16ca
--- /dev/null
+++ b/spec/presenters/terraform/modules_presenter_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Terraform::ModulesPresenter do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:module_system) { 'my-system' }
+ let_it_be(:package_name) { "my-module/#{module_system}" }
+ let_it_be(:package1) { create(:terraform_module_package, version: '1.0.1', project: project, name: package_name) }
+ let_it_be(:package2) { create(:terraform_module_package, version: '1.0.10', project: project, name: package_name) }
+
+ let(:packages) { project.packages.terraform_module.with_name(package_name) }
+ let(:presenter) { described_class.new(packages, module_system) }
+
+ describe '#modules' do
+ subject { presenter.modules }
+
+ it { is_expected.to be_an(Array) }
+ it { expect(subject.first).to be_a(Hash) }
+ it { expect(subject).to match_schema('public_api/v4/packages/terraform/modules/v1/modules') }
+ end
+end
diff --git a/spec/presenters/user_presenter_spec.rb b/spec/presenters/user_presenter_spec.rb
index fdc20216a02..9c51c0b0078 100644
--- a/spec/presenters/user_presenter_spec.rb
+++ b/spec/presenters/user_presenter_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe UserPresenter do
let_it_be(:user) { create(:user) }
+
subject(:presenter) { described_class.new(user) }
describe '#web_path' do
diff --git a/spec/presenters/web_hook_log_presenter_spec.rb b/spec/presenters/web_hook_log_presenter_spec.rb
index 68c8c6e2a1b..ec930be266d 100644
--- a/spec/presenters/web_hook_log_presenter_spec.rb
+++ b/spec/presenters/web_hook_log_presenter_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe WebHookLogPresenter do
end
context 'service hook' do
- let(:web_hook) { create(:service_hook, service: service) }
+ let(:web_hook) { create(:service_hook, integration: service) }
let(:service) { create(:drone_ci_service, project: project) }
it { is_expected.to eq(project_service_hook_log_path(project, service, web_hook_log)) }
@@ -38,7 +38,7 @@ RSpec.describe WebHookLogPresenter do
end
context 'service hook' do
- let(:web_hook) { create(:service_hook, service: service) }
+ let(:web_hook) { create(:service_hook, integration: service) }
let(:service) { create(:drone_ci_service, project: project) }
it { is_expected.to eq(retry_project_service_hook_log_path(project, service, web_hook_log)) }
diff --git a/spec/requests/api/api_guard/admin_mode_middleware_spec.rb b/spec/requests/api/api_guard/admin_mode_middleware_spec.rb
index 63bcec4b52a..ba7a01a2cd9 100644
--- a/spec/requests/api/api_guard/admin_mode_middleware_spec.rb
+++ b/spec/requests/api/api_guard/admin_mode_middleware_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe API::APIGuard::AdminModeMiddleware, :request_store do
let(:app) do
Class.new(API::API) do
get 'willfail' do
- raise StandardError.new('oh noes!')
+ raise StandardError, 'oh noes!'
end
end
end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index b3e425630e5..46430e55ff2 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -170,20 +170,6 @@ RSpec.describe API::API do
expect(response.media_type).to eq('application/json')
expect(response.body).to include('{"id":')
end
-
- context 'when api_always_use_application_json is disabled' do
- before do
- stub_feature_flags(api_always_use_application_json: false)
- end
-
- it 'returns text/plain' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('text/plain')
- expect(response.body).to include('#<API::Entities::User:')
- end
- end
end
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 5298f93886d..a38ba782c44 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe API::Branches do
stub_feature_flags(branch_list_keyset_pagination: false)
end
- describe "GET /projects/:id/repository/branches" do
+ describe "GET /projects/:id/repository/branches", :use_clean_rails_redis_caching do
let(:route) { "/projects/#{project_id}/repository/branches" }
shared_examples_for 'repository branches' do
@@ -53,7 +53,7 @@ RSpec.describe API::Branches do
end
it 'determines only a limited number of merged branch names' do
- expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
+ expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).at_least(:once).and_call_original
get api(route, current_user), params: base_params.merge(per_page: 2)
@@ -111,7 +111,7 @@ RSpec.describe API::Branches do
end
it 'determines only a limited number of merged branch names' do
- expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
+ expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).at_least(:once).and_call_original
get api(route, current_user), params: base_params.merge(per_page: 2)
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index d0c2b383013..092cd00630e 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -362,6 +362,25 @@ RSpec.describe API::Ci::Pipelines do
it do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
+
+ expect(json_response).to all match a_hash_including(
+ 'duration' => be_nil,
+ 'queued_duration' => (be >= 0.0)
+ )
+ end
+ end
+
+ context 'when filtering to only running jobs' do
+ let(:query) { { 'scope' => 'running' } }
+
+ it do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+
+ expect(json_response).to all match a_hash_including(
+ 'duration' => (be >= 0.0),
+ 'queued_duration' => (be >= 0.0)
+ )
end
end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index cf0d8a632f1..63da3340a45 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -378,7 +378,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
{
"name" => "release",
"script" =>
- ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""],
+ ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\" --assets-link \"{\\\"url\\\":\\\"https://example.com/assets/1\\\",\\\"name\\\":\\\"asset1\\\"}\""],
"timeout" => 3600,
"when" => "on_success",
"allow_failure" => false
@@ -502,8 +502,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect { request_job }.to exceed_all_query_limit(1).for_model(::Ci::JobArtifact)
end
- it 'queries the ci_builds table more than five times' do
- expect { request_job }.to exceed_all_query_limit(5).for_model(::Ci::Build)
+ it 'queries the ci_builds table more than three times' do
+ expect { request_job }.to exceed_all_query_limit(3).for_model(::Ci::Build)
end
end
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 7984b1d4ca8..b38630183f4 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -91,6 +91,21 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
it_behaves_like 'not executing any extra queries for the application context' do
let(:subject_proc) { proc { request } }
end
+
+ context 'when it exceeds the application limits' do
+ before do
+ create(:ci_runner, runner_type: :project_type, projects: [project])
+ create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+ end
+
+ it 'does not create runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('runner_projects.base' => ['Maximum number of ci registered project runners (1) exceeded'])
+ expect(project.runners.reload.size).to eq(1)
+ end
+ end
end
context 'when group token is used' do
@@ -117,6 +132,21 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
it_behaves_like 'not executing any extra queries for the application context' do
let(:subject_proc) { proc { request } }
end
+
+ context 'when it exceeds the application limits' do
+ before do
+ create(:ci_runner, runner_type: :group_type, groups: [group])
+ create(:plan_limits, :default_plan, ci_registered_group_runners: 1)
+ end
+
+ it 'does not create runner' do
+ request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('runner_namespaces.base' => ['Maximum number of ci registered group runners (1) exceeded'])
+ expect(group.runners.reload.size).to eq(1)
+ end
+ end
end
end
diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb
index 670456e5dba..1727bc830fc 100644
--- a/spec/requests/api/ci/runners_spec.rb
+++ b/spec/requests/api/ci/runners_spec.rb
@@ -999,6 +999,19 @@ RSpec.describe API::Ci::Runners do
end.to change { project.runners.count }.by(+1)
expect(response).to have_gitlab_http_status(:created)
end
+
+ context 'when it exceeds the application limits' do
+ before do
+ create(:plan_limits, :default_plan, ci_registered_project_runners: 1)
+ end
+
+ it 'does not enable specific runner' do
+ expect do
+ post api("/projects/#{project.id}/runners", admin), params: { runner_id: new_project_runner.id }
+ end.not_to change { project.runners.count }
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
end
it 'enables a instance type runner' do
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
index 9d63d675a02..42c6c987872 100644
--- a/spec/requests/api/debian_group_packages_spec.rb
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -5,35 +5,35 @@ RSpec.describe API::DebianGroupPackages do
include HttpBasicAuthHelpers
include WorkhorseHelpers
- include_context 'Debian repository shared context', :group do
- describe 'GET groups/:id/packages/debian/dists/*distribution/Release.gpg' do
- let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/Release.gpg" }
+ include_context 'Debian repository shared context', :group, false do
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
- it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
end
- describe 'GET groups/:id/packages/debian/dists/*distribution/Release' do
- let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/Release" }
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution}/Release" }
- it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Release'
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, 'TODO Release'
end
- describe 'GET groups/:id/packages/debian/dists/*distribution/InRelease' do
- let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/InRelease" }
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution}/InRelease" }
- it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
end
- describe 'GET groups/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
- let(:url) { "/groups/#{group.id}/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/groups/#{container.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
- it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Packages'
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, 'TODO Packages'
end
- describe 'GET groups/:id/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/groups/#{group.id}/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+ describe 'GET groups/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/groups/#{container.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
- it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO File'
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, 'TODO File'
end
end
end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index 4941f2a77f4..f400b6e928c 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -5,49 +5,49 @@ RSpec.describe API::DebianProjectPackages do
include HttpBasicAuthHelpers
include WorkhorseHelpers
- include_context 'Debian repository shared context', :project do
+ include_context 'Debian repository shared context', :project, true do
describe 'GET projects/:id/packages/debian/dists/*distribution/Release.gpg' do
- let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/Release.gpg" }
+ let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution}/Release.gpg" }
- it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
end
describe 'GET projects/:id/packages/debian/dists/*distribution/Release' do
- let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/Release" }
+ let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution}/Release" }
- it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Release'
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, 'TODO Release'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/InRelease' do
- let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/InRelease" }
+ let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution}/InRelease" }
- it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :not_found
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
- let(:url) { "/projects/#{project.id}/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+ let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
- it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Packages'
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, 'TODO Packages'
end
describe 'GET projects/:id/packages/debian/pool/:component/:letter/:source_package/:file_name' do
- let(:url) { "/projects/#{project.id}/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+ let(:url) { "/projects/#{container.id}/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
- it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO File'
+ it_behaves_like 'Debian repository read endpoint', 'GET request', :success, 'TODO File'
end
describe 'PUT projects/:id/packages/debian/:file_name' do
let(:method) { :put }
- let(:url) { "/projects/#{project.id}/packages/debian/#{file_name}" }
+ let(:url) { "/projects/#{container.id}/packages/debian/#{file_name}" }
- it_behaves_like 'Debian project repository PUT endpoint', :created, nil
+ it_behaves_like 'Debian repository write endpoint', 'upload request', :created
end
describe 'PUT projects/:id/packages/debian/:file_name/authorize' do
let(:method) { :put }
- let(:url) { "/projects/#{project.id}/packages/debian/#{file_name}/authorize" }
+ let(:url) { "/projects/#{container.id}/packages/debian/#{file_name}/authorize" }
- it_behaves_like 'Debian project repository PUT endpoint', :created, nil, is_authorize: true
+ it_behaves_like 'Debian repository write endpoint', 'upload authorize request', :created
end
end
end
diff --git a/spec/requests/api/deploy_tokens_spec.rb b/spec/requests/api/deploy_tokens_spec.rb
index 7a31ff725c8..e8426270622 100644
--- a/spec/requests/api/deploy_tokens_spec.rb
+++ b/spec/requests/api/deploy_tokens_spec.rb
@@ -8,7 +8,11 @@ RSpec.describe API::DeployTokens do
let_it_be(:project) { create(:project, creator_id: creator.id) }
let_it_be(:group) { create(:group) }
let!(:deploy_token) { create(:deploy_token, projects: [project]) }
+ let!(:revoked_deploy_token) { create(:deploy_token, projects: [project], revoked: true) }
+ let!(:expired_deploy_token) { create(:deploy_token, projects: [project], expires_at: '1988-01-11T04:33:04-0600') }
let!(:group_deploy_token) { create(:deploy_token, :group, groups: [group]) }
+ let!(:revoked_group_deploy_token) { create(:deploy_token, :group, groups: [group], revoked: true) }
+ let!(:expired_group_deploy_token) { create(:deploy_token, :group, groups: [group], expires_at: '1988-01-11T04:33:04-0600') }
describe 'GET /deploy_tokens' do
subject do
@@ -36,8 +40,31 @@ RSpec.describe API::DeployTokens do
it 'returns all deploy tokens' do
subject
+ token_ids = json_response.map { |token| token['id'] }
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/deploy_tokens')
+ expect(token_ids).to match_array([
+ deploy_token.id,
+ revoked_deploy_token.id,
+ expired_deploy_token.id,
+ group_deploy_token.id,
+ revoked_group_deploy_token.id,
+ expired_group_deploy_token.id
+ ])
+ end
+
+ context 'and active=true' do
+ it 'only returns active deploy tokens' do
+ get api('/deploy_tokens?active=true', user)
+
+ token_ids = json_response.map { |token| token['id'] }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(token_ids).to match_array([
+ deploy_token.id,
+ group_deploy_token.id
+ ])
+ end
end
end
end
@@ -82,7 +109,22 @@ RSpec.describe API::DeployTokens do
subject
token_ids = json_response.map { |token| token['id'] }
- expect(token_ids).not_to include(other_deploy_token.id)
+ expect(token_ids).to match_array([
+ deploy_token.id,
+ expired_deploy_token.id,
+ revoked_deploy_token.id
+ ])
+ end
+
+ context 'and active=true' do
+ it 'only returns active deploy tokens for the project' do
+ get api("/projects/#{project.id}/deploy_tokens?active=true", user)
+
+ token_ids = json_response.map { |token| token['id'] }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(token_ids).to match_array([deploy_token.id])
+ end
end
end
end
@@ -119,8 +161,10 @@ RSpec.describe API::DeployTokens do
it 'returns all deploy tokens for the group' do
subject
+ token_ids = json_response.map { |token| token['id'] }
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/deploy_tokens')
+ expect(token_ids.length).to be(3)
end
it 'does not return deploy tokens for other groups' do
@@ -129,6 +173,17 @@ RSpec.describe API::DeployTokens do
token_ids = json_response.map { |token| token['id'] }
expect(token_ids).not_to include(other_deploy_token.id)
end
+
+ context 'and active=true' do
+ it 'only returns active deploy tokens for the group' do
+ get api("/groups/#{group.id}/deploy_tokens?active=true", user)
+
+ token_ids = json_response.map { |token| token['id'] }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(token_ids).to eql([group_deploy_token.id])
+ end
+ end
end
end
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index c89c59a2151..bbfe37cb70b 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -12,9 +12,11 @@ RSpec.describe API::Deployments do
describe 'GET /projects/:id/deployments' do
let_it_be(:project) { create(:project, :repository) }
- let_it_be(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now) }
- let_it_be(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'master', created_at: 1.day.ago, updated_at: 2.hours.ago) }
- let_it_be(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'master', created_at: 2.days.ago, updated_at: 1.hour.ago) }
+ let_it_be(:production) { create(:environment, :production, project: project) }
+ let_it_be(:staging) { create(:environment, :staging, project: project) }
+ let_it_be(:deployment_1) { create(:deployment, :success, project: project, environment: production, ref: 'master', created_at: Time.now, updated_at: Time.now) }
+ let_it_be(:deployment_2) { create(:deployment, :success, project: project, environment: staging, ref: 'master', created_at: 1.day.ago, updated_at: 2.hours.ago) }
+ let_it_be(:deployment_3) { create(:deployment, :success, project: project, environment: staging, ref: 'master', created_at: 2.days.ago, updated_at: 1.hour.ago) }
def perform_request(params = {})
get api("/projects/#{project.id}/deployments", user), params: params
@@ -36,17 +38,26 @@ RSpec.describe API::Deployments do
context 'with updated_at filters specified' do
it 'returns projects deployments with last update in specified datetime range' do
- perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago })
+ perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago, order_by: :updated_at })
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response.first['id']).to eq(deployment_3.id)
end
+
+ context 'when forbidden order_by is specified' do
+ it 'returns projects deployments with last update in specified datetime range' do
+ perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago, order_by: :id })
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include('`updated_at` filter and `updated_at` sorting must be paired')
+ end
+ end
end
context 'with the environment filter specifed' do
it 'returns deployments for the environment' do
- perform_request({ environment: deployment_1.environment.name })
+ perform_request({ environment: production.name })
expect(json_response.size).to eq(1)
expect(json_response.first['iid']).to eq(deployment_1.iid)
@@ -68,7 +79,7 @@ RSpec.describe API::Deployments do
end
it 'returns ordered deployments' do
- expect(json_response.map { |i| i['id'] }).to eq([deployment_2.id, deployment_1.id, deployment_3.id])
+ expect(json_response.map { |i| i['id'] }).to eq([deployment_3.id, deployment_2.id, deployment_1.id])
end
context 'with invalid order_by' do
@@ -475,7 +486,7 @@ RSpec.describe API::Deployments do
let(:project) { create(:project, :repository) }
let!(:deployment) { create(:deployment, :success, project: project) }
- subject { get api("/projects/#{project.id}/deployments?order_by=updated_at&sort=asc", user) }
+ subject { get api("/projects/#{project.id}/deployments?order_by=id&sort=asc", user) }
it 'succeeds', :aggregate_failures do
subject
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index aa1a4643593..5d40e8c529a 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -3,28 +3,18 @@
require 'spec_helper'
RSpec.describe API::Environments do
- let(:user) { create(:user) }
- let(:non_member) { create(:user) }
- let(:project) { create(:project, :private, :repository, namespace: user.namespace) }
- let!(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:project) { create(:project, :private, :repository, namespace: user.namespace) }
+ let_it_be_with_reload(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
end
- describe 'GET /projects/:id/environments' do
+ describe 'GET /projects/:id/environments', :aggregate_failures do
context 'as member of the project' do
it 'returns project environments' do
- project_data_keys = %w(
- id description default_branch tag_list
- ssh_url_to_repo http_url_to_repo web_url readme_url
- name name_with_namespace
- path path_with_namespace
- star_count forks_count
- created_at last_activity_at
- avatar_url namespace
- )
-
get api("/projects/#{project.id}/environments", user)
expect(response).to have_gitlab_http_status(:ok)
@@ -33,12 +23,95 @@ RSpec.describe API::Environments do
expect(json_response.size).to eq(1)
expect(json_response.first['name']).to eq(environment.name)
expect(json_response.first['external_url']).to eq(environment.external_url)
- expect(json_response.first['project'].keys).to contain_exactly(*project_data_keys)
- expect(json_response.first).not_to have_key("last_deployment")
+ expect(json_response.first['project']).to match_schema('public_api/v4/project')
+ expect(json_response.first['enable_advanced_logs_querying']).to eq(false)
+ expect(json_response.first).not_to have_key('last_deployment')
+ expect(json_response.first).not_to have_key('gitlab_managed_apps_logs_path')
+ end
+
+ context 'when the user can read pod logs' do
+ context 'with successful deployment on cluster' do
+ let_it_be(:deployment) { create(:deployment, :on_cluster, :success, environment: environment, project: project) }
+
+ it 'returns environment with enable_advanced_logs_querying and logs_api_path' do
+ get api("/projects/#{project.id}/environments", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['gitlab_managed_apps_logs_path']).to eq(
+ "/#{project.full_path}/-/logs/k8s.json?cluster_id=#{deployment.cluster_id}"
+ )
+ end
+ end
+
+ context 'when elastic stack is available' do
+ before do
+ allow_next_found_instance_of(Environment) do |env|
+ allow(env).to receive(:elastic_stack_available?).and_return(true)
+ end
+ end
+
+ it 'returns environment with enable_advanced_logs_querying and logs_api_path' do
+ get api("/projects/#{project.id}/environments", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['enable_advanced_logs_querying']).to eq(true)
+ expect(json_response.first['logs_api_path']).to eq(
+ "/#{project.full_path}/-/logs/elasticsearch.json?environment_name=#{environment.name}"
+ )
+ end
+ end
+
+ context 'when elastic stack is not available' do
+ before do
+ allow_next_found_instance_of(Environment) do |env|
+ allow(env).to receive(:elastic_stack_available?).and_return(false)
+ end
+ end
+
+ it 'returns environment with enable_advanced_logs_querying logs_api_path' do
+ get api("/projects/#{project.id}/environments", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['enable_advanced_logs_querying']).to eq(false)
+ expect(json_response.first['logs_api_path']).to eq(
+ "/#{project.full_path}/-/logs/k8s.json?environment_name=#{environment.name}"
+ )
+ end
+ end
+ end
+
+ context 'when the user cannot read pod logs' do
+ before do
+ allow_next_found_instance_of(User) do |user|
+ allow(user).to receive(:can?).and_call_original
+ allow(user).to receive(:can?).with(:read_pod_logs, project).and_return(false)
+ end
+ end
+
+ it 'does not contain enable_advanced_logs_querying' do
+ get api("/projects/#{project.id}/environments", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response.first).not_to have_key('enable_advanced_logs_querying')
+ expect(json_response.first).not_to have_key('logs_api_path')
+ expect(json_response.first).not_to have_key('gitlab_managed_apps_logs_path')
+ end
end
context 'when filtering' do
- let!(:environment2) { create(:environment, project: project) }
+ let_it_be(:environment2) { create(:environment, project: project) }
it 'returns environment by name' do
get api("/projects/#{project.id}/environments?name=#{environment.name}", user)
diff --git a/spec/requests/api/graphql/ci/job_spec.rb b/spec/requests/api/graphql/ci/job_spec.rb
index 78f7d3e149b..b0514a0a963 100644
--- a/spec/requests/api/graphql/ci/job_spec.rb
+++ b/spec/requests/api/graphql/ci/job_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe 'Query.project(fullPath).pipelines.job(id)' do
include GraphqlHelpers
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
let_it_be(:user) { create_default(:user) }
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
@@ -35,13 +39,20 @@ RSpec.describe 'Query.project(fullPath).pipelines.job(id)' do
let(:terminal_type) { 'CiJob' }
it 'retrieves scalar fields' do
+ job_2.update!(
+ created_at: 40.seconds.ago,
+ queued_at: 32.seconds.ago,
+ started_at: 30.seconds.ago,
+ finished_at: 5.seconds.ago
+ )
post_graphql(query, current_user: user)
expect(graphql_data_at(*path)).to match a_hash_including(
'id' => global_id_of(job_2),
'name' => job_2.name,
'allowFailure' => job_2.allow_failure,
- 'duration' => job_2.duration,
+ 'duration' => 25,
+ 'queuedDuration' => 2.0,
'status' => job_2.status.upcase
)
end
diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb
index 7933251b8e9..f207636283f 100644
--- a/spec/requests/api/graphql/ci/pipelines_spec.rb
+++ b/spec/requests/api/graphql/ci/pipelines_spec.rb
@@ -8,6 +8,130 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:user) { create(:user) }
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
+ describe 'duration fields' do
+ let_it_be(:pipeline) do
+ create(:ci_pipeline, project: project)
+ end
+
+ let(:query_path) do
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipelines],
+ [:nodes]
+ ]
+ end
+
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, 'queuedDuration duration'))
+ end
+
+ before do
+ pipeline.update!(
+ created_at: 1.minute.ago,
+ started_at: 55.seconds.ago
+ )
+ create(:ci_build, :success,
+ pipeline: pipeline,
+ started_at: 55.seconds.ago,
+ finished_at: 10.seconds.ago)
+ pipeline.update_duration
+ pipeline.save!
+
+ post_graphql(query, current_user: user)
+ end
+
+ it 'includes the duration fields' do
+ path = query_path.map(&:first)
+ expect(graphql_data_at(*path, :queued_duration)).to eq [5.0]
+ expect(graphql_data_at(*path, :duration)).to eq [45]
+ end
+ end
+
+ describe '.stages' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project) }
+ let_it_be(:stage) { create(:ci_stage_entity, pipeline: pipeline, project: project) }
+ let_it_be(:other_stage) { create(:ci_stage_entity, pipeline: pipeline, project: project, name: 'other') }
+
+ let(:first_n) { var('Int') }
+ let(:query_path) do
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipelines],
+ [:nodes],
+ [:stages, { first: first_n }],
+ [:nodes]
+ ]
+ end
+
+ let(:query) do
+ with_signature([first_n], wrap_fields(query_graphql_path(query_path, :name)))
+ end
+
+ before_all do
+ # see app/services/ci/ensure_stage_service.rb to explain why we use stage_id
+ create(:ci_build, pipeline: pipeline, stage_id: stage.id, name: 'linux: [foo]')
+ create(:ci_build, pipeline: pipeline, stage_id: stage.id, name: 'linux: [bar]')
+ create(:ci_build, pipeline: pipeline, stage_id: other_stage.id, name: 'linux: [baz]')
+ end
+
+ it 'is null if the user is a guest' do
+ project.add_guest(user)
+
+ post_graphql(query, current_user: user, variables: first_n.with(1))
+
+ expect(graphql_data_at(:project, :pipelines, :nodes)).to contain_exactly a_hash_including('stages' => be_nil)
+ end
+
+ it 'is present if the user has reporter access' do
+ project.add_reporter(user)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(:project, :pipelines, :nodes, :stages, :nodes, :name))
+ .to contain_exactly(eq(stage.name), eq(other_stage.name))
+ end
+
+ describe '.groups' do
+ let(:query_path) do
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipelines],
+ [:nodes],
+ [:stages],
+ [:nodes],
+ [:groups],
+ [:nodes]
+ ]
+ end
+
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, :name))
+ end
+
+ it 'is empty if the user is a guest' do
+ project.add_guest(user)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(:project, :pipelines, :nodes, :stages, :nodes, :groups)).to be_empty
+ end
+
+ it 'is present if the user has reporter access' do
+ project.add_reporter(user)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(:project, :pipelines, :nodes, :stages, :nodes, :groups, :nodes, :name))
+ .to contain_exactly('linux', 'linux')
+ end
+ end
+ end
+
describe '.jobs' do
let(:first_n) { var('Int') }
let(:query_path) do
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
new file mode 100644
index 00000000000..e1f84d23209
--- /dev/null
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -0,0 +1,144 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.runner(id)' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create_default(:user, :admin) }
+
+ let_it_be(:active_runner) do
+ create(:ci_runner, :instance, description: 'Runner 1', contacted_at: 2.hours.ago,
+ active: true, version: 'adfe156', revision: 'a', locked: true, ip_address: '127.0.0.1', maximum_timeout: 600,
+ access_level: 0, tag_list: %w[tag1 tag2], run_untagged: true)
+ end
+
+ let_it_be(:inactive_runner) do
+ create(:ci_runner, :instance, description: 'Runner 2', contacted_at: 1.day.ago, active: false,
+ version: 'adfe157', revision: 'b', ip_address: '10.10.10.10', access_level: 1, run_untagged: true)
+ end
+
+ def get_runner(id)
+ case id
+ when :active_runner
+ active_runner
+ when :inactive_runner
+ inactive_runner
+ end
+ end
+
+ shared_examples 'runner details fetch' do |runner_id|
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: get_runner(runner_id).to_global_id.to_s }]
+ ]
+ end
+
+ it 'retrieves expected fields' do
+ post_graphql(query, current_user: user)
+
+ runner_data = graphql_data_at(:runner)
+ expect(runner_data).not_to be_nil
+
+ runner = get_runner(runner_id)
+ expect(runner_data).to match a_hash_including(
+ 'id' => "gid://gitlab/Ci::Runner/#{runner.id}",
+ 'description' => runner.description,
+ 'contactedAt' => runner.contacted_at&.iso8601,
+ 'version' => runner.version,
+ 'shortSha' => runner.short_sha,
+ 'revision' => runner.revision,
+ 'locked' => runner.locked,
+ 'active' => runner.active,
+ 'status' => runner.status.to_s.upcase,
+ 'maximumTimeout' => runner.maximum_timeout,
+ 'accessLevel' => runner.access_level.to_s.upcase,
+ 'runUntagged' => runner.run_untagged,
+ 'ipAddress' => runner.ip_address,
+ 'runnerType' => 'INSTANCE_TYPE'
+ )
+ expect(runner_data['tagList']).to match_array runner.tag_list
+ end
+ end
+
+ shared_examples 'retrieval by unauthorized user' do |runner_id|
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, all_graphql_fields_for('CiRunner')))
+ end
+
+ let(:query_path) do
+ [
+ [:runner, { id: get_runner(runner_id).to_global_id.to_s }]
+ ]
+ end
+
+ it 'returns null runner' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(:runner)).to be_nil
+ end
+ end
+
+ describe 'for active runner' do
+ it_behaves_like 'runner details fetch', :active_runner
+ end
+
+ describe 'for inactive runner' do
+ it_behaves_like 'runner details fetch', :inactive_runner
+ end
+
+ describe 'by regular user' do
+ let(:user) { create_default(:user) }
+
+ it_behaves_like 'retrieval by unauthorized user', :active_runner
+ end
+
+ describe 'by unauthenticated user' do
+ let(:user) { nil }
+
+ it_behaves_like 'retrieval by unauthorized user', :active_runner
+ end
+
+ describe 'Query limits' do
+ def runner_query(runner)
+ <<~SINGLE
+ runner(id: "#{runner.to_global_id}") {
+ #{all_graphql_fields_for('CiRunner')}
+ }
+ SINGLE
+ end
+
+ let(:single_query) do
+ <<~QUERY
+ {
+ active: #{runner_query(active_runner)}
+ }
+ QUERY
+ end
+
+ let(:double_query) do
+ <<~QUERY
+ {
+ active: #{runner_query(active_runner)}
+ inactive: #{runner_query(inactive_runner)}
+ }
+ QUERY
+ end
+
+ it 'does not execute more queries per runner', :aggregate_failures do
+ # warm-up license cache and so on:
+ post_graphql(single_query, current_user: user)
+
+ control = ActiveRecord::QueryRecorder.new { post_graphql(single_query, current_user: user) }
+
+ expect { post_graphql(double_query, current_user: user) }
+ .not_to exceed_query_limit(control)
+ expect(graphql_data_at(:active)).not_to be_nil
+ expect(graphql_data_at(:inactive)).not_to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
new file mode 100644
index 00000000000..778fe5b129e
--- /dev/null
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.runners' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create_default(:user, :admin) }
+
+ describe 'Query.runners' do
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:instance_runner) { create(:ci_runner, :instance, version: 'abc', revision: '123', description: 'Instance runner', ip_address: '127.0.0.1') }
+ let_it_be(:project_runner) { create(:ci_runner, :project, active: false, version: 'def', revision: '456', description: 'Project runner', projects: [project], ip_address: '127.0.0.1') }
+
+ let(:runners_graphql_data) { graphql_data['runners'] }
+
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('CiRunner')}
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ runners(type:#{runner_type},status:#{status}) {
+ #{fields}
+ }
+ }
+ )
+ end
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ shared_examples 'a working graphql query returning expected runner' do
+ it_behaves_like 'a working graphql query'
+
+ it 'returns expected runner' do
+ expect(runners_graphql_data['nodes'].map { |n| n['id'] }).to contain_exactly(expected_runner.to_global_id.to_s)
+ end
+ end
+
+ context 'runner_type is INSTANCE_TYPE and status is ACTIVE' do
+ let(:runner_type) { 'INSTANCE_TYPE' }
+ let(:status) { 'ACTIVE' }
+
+ let!(:expected_runner) { instance_runner }
+
+ it_behaves_like 'a working graphql query returning expected runner'
+ end
+
+ context 'runner_type is PROJECT_TYPE and status is NOT_CONNECTED' do
+ let(:runner_type) { 'PROJECT_TYPE' }
+ let(:status) { 'NOT_CONNECTED' }
+
+ let!(:expected_runner) { project_runner }
+
+ it_behaves_like 'a working graphql query returning expected runner'
+ end
+ end
+
+ describe 'pagination' do
+ let(:data_path) { [:runners] }
+
+ def pagination_query(params)
+ graphql_query_for(:runners, params, "#{page_info} nodes { id }")
+ end
+
+ def pagination_results_data(runners)
+ runners.map { |runner| GitlabSchema.parse_gid(runner['id'], expected_type: ::Ci::Runner).model_id.to_i }
+ end
+
+ let_it_be(:runners) do
+ common_args = {
+ version: 'abc',
+ revision: '123',
+ ip_address: '127.0.0.1'
+ }
+
+ [
+ create(:ci_runner, :instance, created_at: 4.days.ago, contacted_at: 3.days.ago, **common_args),
+ create(:ci_runner, :instance, created_at: 30.hours.ago, contacted_at: 1.day.ago, **common_args),
+ create(:ci_runner, :instance, created_at: 1.day.ago, contacted_at: 1.hour.ago, **common_args),
+ create(:ci_runner, :instance, created_at: 2.days.ago, contacted_at: 2.days.ago, **common_args),
+ create(:ci_runner, :instance, created_at: 3.days.ago, contacted_at: 1.second.ago, **common_args)
+ ]
+ end
+
+ context 'when sorted by contacted_at ascending' do
+ let(:ordered_runners) { runners.sort_by(&:contacted_at) }
+
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :CONTACTED_ASC }
+ let(:first_param) { 2 }
+ let(:expected_results) { ordered_runners.map(&:id) }
+ end
+ end
+
+ context 'when sorted by created_at' do
+ let(:ordered_runners) { runners.sort_by(&:created_at).reverse }
+
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { :CREATED_DESC }
+ let(:first_param) { 2 }
+ let(:expected_results) { ordered_runners.map(&:id) }
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/template_spec.rb b/spec/requests/api/graphql/ci/template_spec.rb
new file mode 100644
index 00000000000..1bbef7d7f30
--- /dev/null
+++ b/spec/requests/api/graphql/ci/template_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Querying CI template' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ let(:query) do
+ <<~QUERY
+ {
+ project(fullPath: "#{project.full_path}") {
+ name
+ ciTemplate(name: "#{template_name}") {
+ name
+ content
+ }
+ }
+ }
+ QUERY
+ end
+
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ context 'when the template exists' do
+ let(:template_name) { 'Android' }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns correct data' do
+ expect(graphql_data.dig('project', 'ciTemplate', 'name')).to eq(template_name)
+ expect(graphql_data.dig('project', 'ciTemplate', 'content')).not_to be_blank
+ end
+ end
+
+ context 'when the template does not exist' do
+ let(:template_name) { 'doesnotexist' }
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns correct data' do
+ expect(graphql_data.dig('project', 'ciTemplate')).to eq(nil)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/group/milestones_spec.rb b/spec/requests/api/graphql/group/milestones_spec.rb
index a5b489d72fd..601cab6aade 100644
--- a/spec/requests/api/graphql/group/milestones_spec.rb
+++ b/spec/requests/api/graphql/group/milestones_spec.rb
@@ -197,18 +197,6 @@ RSpec.describe 'Milestones through GroupQuery' do
}
})
end
-
- context 'when the graphql_milestone_stats feature flag is disabled' do
- before do
- stub_feature_flags(graphql_milestone_stats: false)
- end
-
- it 'returns nil for the stats field' do
- expect(post_query).to eq({
- 'stats' => nil
- })
- end
- end
end
end
end
diff --git a/spec/requests/api/graphql/group/packages_spec.rb b/spec/requests/api/graphql/group/packages_spec.rb
index 85775598b2e..adee556db3a 100644
--- a/spec/requests/api/graphql/group/packages_spec.rb
+++ b/spec/requests/api/graphql/group/packages_spec.rb
@@ -7,46 +7,19 @@ RSpec.describe 'getting a package list for a group' do
let_it_be(:resource) { create(:group, :private) }
let_it_be(:group_two) { create(:group, :private) }
- let_it_be(:project) { create(:project, :repository, group: resource) }
- let_it_be(:another_project) { create(:project, :repository, group: resource) }
- let_it_be(:group_two_project) { create(:project, :repository, group: group_two) }
+ let_it_be(:project1) { create(:project, :repository, group: resource) }
+ let_it_be(:project2) { create(:project, :repository, group: resource) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:package) { create(:package, project: project) }
- let_it_be(:npm_package) { create(:npm_package, project: group_two_project) }
- let_it_be(:maven_package) { create(:maven_package, project: project) }
- let_it_be(:debian_package) { create(:debian_package, project: another_project) }
- let_it_be(:composer_package) { create(:composer_package, project: another_project) }
- let_it_be(:composer_metadatum) do
- create(:composer_metadatum, package: composer_package,
- target_sha: 'afdeh',
- composer_json: { name: 'x', type: 'y', license: 'z', version: 1 })
- end
-
- let(:package_names) { graphql_data_at(:group, :packages, :nodes, :name) }
- let(:target_shas) { graphql_data_at(:group, :packages, :nodes, :metadata, :target_sha) }
- let(:packages) { graphql_data_at(:group, :packages, :nodes) }
-
- let(:fields) do
- <<~QUERY
- nodes {
- #{all_graphql_fields_for('packages'.classify, excluded: ['project'])}
- metadata { #{query_graphql_fragment('ComposerMetadata')} }
- }
- QUERY
- end
-
- let(:query) do
- graphql_query_for(
- 'group',
- { 'fullPath' => resource.full_path },
- query_graphql_field('packages', {}, fields)
- )
- end
+ let(:resource_type) { :group }
it_behaves_like 'group and project packages query'
context 'with a batched query' do
+ let_it_be(:group_two_project) { create(:project, :repository, group: group_two) }
+ let_it_be(:group_one_package) { create(:npm_package, project: project1) }
+ let_it_be(:group_two_package) { create(:npm_package, project: group_two_project) }
+
let(:batch_query) do
<<~QUERY
{
@@ -65,12 +38,7 @@ RSpec.describe 'getting a package list for a group' do
end
it 'returns an error for the second group and data for the first' do
- expect(a_packages_names).to contain_exactly(
- package.name,
- maven_package.name,
- debian_package.name,
- composer_package.name
- )
+ expect(a_packages_names).to contain_exactly(group_one_package.name)
expect_graphql_errors_to_include [/Packages can be requested only for one group at a time/]
expect(graphql_data_at(:b, :packages)).to be(nil)
end
diff --git a/spec/requests/api/graphql/issue/issue_spec.rb b/spec/requests/api/graphql/issue/issue_spec.rb
index e8b8caf6c2d..42ca3348384 100644
--- a/spec/requests/api/graphql/issue/issue_spec.rb
+++ b/spec/requests/api/graphql/issue/issue_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe 'Query.issue(id)' do
post_graphql(query, current_user: current_user)
end
- it "returns the Issue and field #{params['field']}" do
+ it "returns the issue and field #{params['field']}" do
expect(issue_data.keys).to eq([field])
end
end
@@ -86,7 +86,7 @@ RSpec.describe 'Query.issue(id)' do
context 'when selecting multiple fields' do
let(:issue_fields) { ['title', 'description', 'updatedBy { username }'] }
- it 'returns the Issue with the specified fields' do
+ it 'returns the issue with the specified fields' do
post_graphql(query, current_user: current_user)
expect(issue_data.keys).to eq %w[title description updatedBy]
@@ -115,7 +115,7 @@ RSpec.describe 'Query.issue(id)' do
end
end
- context 'when passed a non-Issue gid' do
+ context 'when passed a non-issue gid' do
let(:mr) { create(:merge_request) }
it 'returns an error' do
diff --git a/spec/requests/api/graphql/merge_request/merge_request_spec.rb b/spec/requests/api/graphql/merge_request/merge_request_spec.rb
new file mode 100644
index 00000000000..75dd01a0763
--- /dev/null
+++ b/spec/requests/api/graphql/merge_request/merge_request_spec.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.merge_request(id)' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:merge_request_params) { { 'id' => merge_request.to_global_id.to_s } }
+
+ let(:merge_request_data) { graphql_data['mergeRequest'] }
+ let(:merge_request_fields) { all_graphql_fields_for('MergeRequest'.classify) }
+
+ let(:query) do
+ graphql_query_for('mergeRequest', merge_request_params, merge_request_fields)
+ end
+
+ it_behaves_like 'a working graphql query' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+ end
+
+ it_behaves_like 'a noteable graphql type we can query' do
+ let(:noteable) { merge_request }
+ let(:project) { merge_request.project }
+ let(:path_to_noteable) { [:merge_request] }
+
+ before do
+ project.add_reporter(current_user)
+ end
+
+ def query(fields)
+ graphql_query_for('mergeRequest', merge_request_params, fields)
+ end
+ end
+
+ context 'when the user does not have access to the merge request' do
+ it 'returns nil' do
+ post_graphql(query)
+
+ expect(merge_request_data).to be nil
+ end
+ end
+
+ context 'when the user does have access' do
+ before do
+ project.add_reporter(current_user)
+ end
+
+ it 'returns the merge request' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_request_data).to include(
+ 'title' => merge_request.title,
+ 'description' => merge_request.description
+ )
+ end
+
+ context 'when selecting any single field' do
+ where(:field) do
+ scalar_fields_of('MergeRequest').map { |name| [name] }
+ end
+
+ with_them do
+ it_behaves_like 'a working graphql query' do
+ let(:merge_request_fields) do
+ field
+ end
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it "returns the merge request and field #{params['field']}" do
+ expect(merge_request_data.keys).to eq([field])
+ end
+ end
+ end
+ end
+
+ context 'when selecting multiple fields' do
+ let(:merge_request_fields) { ['title', 'description', 'author { username }'] }
+
+ it 'returns the merge request with the specified fields' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_request_data.keys).to eq %w[title description author]
+ expect(merge_request_data['title']).to eq(merge_request.title)
+ expect(merge_request_data['description']).to eq(merge_request.description)
+ expect(merge_request_data['author']['username']).to eq(merge_request.author.username)
+ end
+ end
+
+ context 'when passed a non-merge request gid' do
+ let(:issue) { create(:issue) }
+
+ it 'returns an error' do
+ gid = issue.to_global_id.to_s
+ merge_request_params['id'] = gid
+
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_errors).not_to be nil
+ expect(graphql_errors.first['message']).to eq("\"#{gid}\" does not represent an instance of MergeRequest")
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/metadata_query_spec.rb b/spec/requests/api/graphql/metadata_query_spec.rb
index 6344ec371c8..840bd7c018c 100644
--- a/spec/requests/api/graphql/metadata_query_spec.rb
+++ b/spec/requests/api/graphql/metadata_query_spec.rb
@@ -8,16 +8,48 @@ RSpec.describe 'getting project information' do
let(:query) { graphql_query_for('metadata', {}, all_graphql_fields_for('Metadata')) }
context 'logged in' do
- it 'returns version and revision' do
- post_graphql(query, current_user: create(:user))
-
- expect(graphql_errors).to be_nil
- expect(graphql_data).to eq(
+ let(:expected_data) do
+ {
'metadata' => {
'version' => Gitlab::VERSION,
- 'revision' => Gitlab.revision
+ 'revision' => Gitlab.revision,
+ 'kas' => {
+ 'enabled' => Gitlab::Kas.enabled?,
+ 'version' => expected_kas_version,
+ 'externalUrl' => expected_kas_external_url
+ }
}
- )
+ }
+ end
+
+ context 'kas is enabled' do
+ let(:expected_kas_version) { Gitlab::Kas.version }
+ let(:expected_kas_external_url) { Gitlab::Kas.external_url }
+
+ before do
+ allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
+ post_graphql(query, current_user: create(:user))
+ end
+
+ it 'returns version, revision, kas_enabled, kas_version, kas_external_url' do
+ expect(graphql_errors).to be_nil
+ expect(graphql_data).to eq(expected_data)
+ end
+ end
+
+ context 'kas is disabled' do
+ let(:expected_kas_version) { nil }
+ let(:expected_kas_external_url) { nil }
+
+ before do
+ allow(Gitlab::Kas).to receive(:enabled?).and_return(false)
+ post_graphql(query, current_user: create(:user))
+ end
+
+ it 'returns version and revision' do
+ expect(graphql_errors).to be_nil
+ expect(graphql_data).to eq(expected_data)
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/boards/destroy_spec.rb b/spec/requests/api/graphql/mutations/boards/destroy_spec.rb
index a6d894e698d..23e099e94b6 100644
--- a/spec/requests/api/graphql/mutations/boards/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/boards/destroy_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe Mutations::Boards::Destroy do
let_it_be(:current_user, reload: true) { create(:user) }
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:board) { create(:board, project: project) }
- let_it_be(:other_board) { create(:board, project: project) }
+ let_it_be(:other_board, refind: true) { create(:board, project: project) }
+
let(:mutation) do
variables = {
id: GitlabSchema.id_from_object(board).to_s
diff --git a/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb b/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
index 42f690f53ed..83309ead352 100644
--- a/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
@@ -6,72 +6,23 @@ RSpec.describe Mutations::Boards::Lists::Destroy do
include GraphqlHelpers
let_it_be(:current_user, reload: true) { create(:user) }
- let_it_be(:project, reload: true) { create(:project) }
- let_it_be(:board) { create(:board, project: project) }
- let_it_be(:list) { create(:list, board: board) }
- let(:mutation) do
- variables = {
- list_id: GitlabSchema.id_from_object(list).to_s
- }
- graphql_mutation(:destroy_board_list, variables)
- end
-
- subject { post_graphql_mutation(mutation, current_user: current_user) }
-
- def mutation_response
- graphql_mutation_response(:destroy_board_list)
- end
-
- context 'when the user does not have permission' do
- it_behaves_like 'a mutation that returns a top-level access error'
-
- it 'does not destroy the list' do
- expect { subject }.not_to change { List.count }
- end
- end
-
- context 'when the user has permission' do
- before do
- project.add_maintainer(current_user)
- end
-
- context 'when given id is not for a list' do
- let_it_be(:list) { build_stubbed(:issue, project: project) }
-
- it 'returns an error' do
- subject
+ it_behaves_like 'board lists destroy request' do
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:list, refind: true) { create(:list, board: board) }
- expect(graphql_errors.first['message']).to include('does not represent an instance of List')
- end
+ let(:variables) do
+ {
+ list_id: GitlabSchema.id_from_object(list).to_s
+ }
end
- context 'when everything is ok' do
- it 'destroys the list' do
- expect { subject }.to change { List.count }.from(2).to(1)
- end
-
- it 'returns an empty list' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(mutation_response).to have_key('list')
- expect(mutation_response['list']).to be_nil
- end
+ let(:mutation) do
+ graphql_mutation(:destroy_board_list, variables)
end
- context 'when the list is not destroyable' do
- let_it_be(:list) { create(:list, board: board, list_type: :backlog) }
-
- it 'does not destroy the list' do
- expect { subject }.not_to change { List.count }.from(3)
- end
-
- it 'returns an error and not nil list' do
- subject
-
- expect(mutation_response['errors']).not_to be_empty
- expect(mutation_response['list']).not_to be_nil
- end
- end
+ let(:mutation_response) { graphql_mutation_response(:destroy_board_list) }
+ let(:klass) { List }
end
end
diff --git a/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb b/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
index 8e24e053211..c7885879a9d 100644
--- a/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
@@ -11,46 +11,9 @@ RSpec.describe 'Update of an existing board list' do
let_it_be(:list) { create(:list, board: board, position: 0) }
let_it_be(:list2) { create(:list, board: board) }
let_it_be(:input) { { list_id: list.to_global_id.to_s, position: 1, collapsed: true } }
+
let(:mutation) { graphql_mutation(:update_board_list, input) }
let(:mutation_response) { graphql_mutation_response(:update_board_list) }
- context 'the user is not allowed to read board lists' do
- it_behaves_like 'a mutation that returns a top-level access error'
- end
-
- before do
- list.update_preferences_for(current_user, collapsed: false)
- end
-
- context 'when user has permissions to admin board lists' do
- before do
- group.add_reporter(current_user)
- end
-
- it 'updates the list position and collapsed state' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['list']).to include(
- 'position' => 1,
- 'collapsed' => true
- )
- end
- end
-
- context 'when user has permissions to read board lists' do
- before do
- group.add_guest(current_user)
- end
-
- it 'updates the list collapsed state but not the list position' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['list']).to include(
- 'position' => 0,
- 'collapsed' => true
- )
- end
- end
+ it_behaves_like 'a GraphQL request to update board list'
end
diff --git a/spec/requests/api/graphql/mutations/ci/job_play_spec.rb b/spec/requests/api/graphql/mutations/ci/job_play_spec.rb
new file mode 100644
index 00000000000..0874e225259
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job_play_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'JobPlay' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let_it_be(:job) { create(:ci_build, pipeline: pipeline, name: 'build') }
+
+ let(:mutation) do
+ variables = {
+ id: job.to_global_id.to_s
+ }
+ graphql_mutation(:job_play, variables,
+ <<-QL
+ errors
+ job {
+ id
+ }
+ QL
+ )
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:job_play) }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it 'returns an error if the user is not allowed to play the job' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'plays a job' do
+ job_id = ::Gitlab::GlobalId.build(job, id: job.id).to_s
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['job']['id']).to eq(job_id)
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb b/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb
new file mode 100644
index 00000000000..a14935379dc
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'JobRetry' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let_it_be(:job) { create(:ci_build, :success, pipeline: pipeline, name: 'build') }
+
+ let(:mutation) do
+ variables = {
+ id: job.to_global_id.to_s
+ }
+ graphql_mutation(:job_retry, variables,
+ <<-QL
+ errors
+ job {
+ id
+ }
+ QL
+ )
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:job_retry) }
+
+ before_all do
+ project.add_maintainer(user)
+ end
+
+ it 'returns an error if the user is not allowed to retry the job' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ it 'retries a job' do
+ job_id = ::Gitlab::GlobalId.build(job, id: job.id).to_s
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['job']['id']).to eq(job_id)
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/create_spec.rb b/spec/requests/api/graphql/mutations/issues/create_spec.rb
index 39b408faa90..66450f8c604 100644
--- a/spec/requests/api/graphql/mutations/issues/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/create_spec.rb
@@ -20,7 +20,8 @@ RSpec.describe 'Create an issue' do
'title' => 'new title',
'description' => 'new description',
'confidential' => true,
- 'dueDate' => Date.tomorrow.strftime('%Y-%m-%d')
+ 'dueDate' => Date.tomorrow.strftime('%Y-%m-%d'),
+ 'type' => 'ISSUE'
}
end
@@ -37,7 +38,7 @@ RSpec.describe 'Create an issue' do
project.add_developer(current_user)
end
- it 'updates the issue' do
+ it 'creates the issue' do
post_graphql_mutation(mutation, current_user: current_user)
expect(response).to have_gitlab_http_status(:success)
diff --git a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
index b3c9b9d4995..ea5be9f9852 100644
--- a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
@@ -42,11 +42,34 @@ RSpec.describe 'Setting Due Date of an issue' do
expect(graphql_errors).to include(a_hash_including('message' => error))
end
- it 'updates the issue due date' do
- post_graphql_mutation(mutation, current_user: current_user)
+ context 'when due date value is a valid date' do
+ it 'updates the issue due date' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['issue']['dueDate']).to eq(2.days.since.to_date.to_s)
+ end
+ end
+
+ context 'when due date value is null' do
+ let(:input) { { due_date: nil } }
+
+ it 'updates the issue to remove the due date' do
+ post_graphql_mutation(mutation, current_user: current_user)
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['issue']['dueDate']).to eq(2.days.since.to_date.to_s)
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['issue']['dueDate']).to be nil
+ end
+ end
+
+ context 'when due date argument is not given' do
+ let(:input) { {} }
+
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(graphql_errors).to include(a_hash_including('message' => /Argument dueDate must be provided/))
+ end
end
context 'when the due date value is not a valid time' do
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
index 71f25dbbe49..adfa2a2bc08 100644
--- a/spec/requests/api/graphql/mutations/issues/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -14,7 +14,8 @@ RSpec.describe 'Update of an existing issue' do
'title' => 'new title',
'description' => 'new description',
'confidential' => true,
- 'dueDate' => Date.tomorrow.strftime('%Y-%m-%d')
+ 'dueDate' => Date.tomorrow.strftime('%Y-%m-%d'),
+ 'type' => 'ISSUE'
}
end
diff --git a/spec/requests/api/graphql/mutations/labels/create_spec.rb b/spec/requests/api/graphql/mutations/labels/create_spec.rb
index 28284408306..ca3ccc8e06c 100644
--- a/spec/requests/api/graphql/mutations/labels/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/labels/create_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe Mutations::Labels::Create do
{
'title' => 'foo',
'description' => 'some description',
- 'color' => '#FF0000'
+ 'color' => '#FF0000',
+ 'removeOnClose' => true
}
end
diff --git a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
index 749373e7b8d..202e7e7c333 100644
--- a/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/namespace/package_settings/update_spec.rb
@@ -12,7 +12,9 @@ RSpec.describe 'Updating the package settings' do
{
namespace_path: namespace.full_path,
maven_duplicates_allowed: false,
- maven_duplicate_exception_regex: 'foo-.*'
+ maven_duplicate_exception_regex: 'foo-.*',
+ generic_duplicates_allowed: false,
+ generic_duplicate_exception_regex: 'bar-.*'
}
end
@@ -22,6 +24,8 @@ RSpec.describe 'Updating the package settings' do
packageSettings {
mavenDuplicatesAllowed
mavenDuplicateExceptionRegex
+ genericDuplicatesAllowed
+ genericDuplicateExceptionRegex
}
errors
QL
@@ -40,6 +44,8 @@ RSpec.describe 'Updating the package settings' do
expect(mutation_response['errors']).to be_empty
expect(package_settings_response['mavenDuplicatesAllowed']).to eq(params[:maven_duplicates_allowed])
expect(package_settings_response['mavenDuplicateExceptionRegex']).to eq(params[:maven_duplicate_exception_regex])
+ expect(package_settings_response['genericDuplicatesAllowed']).to eq(params[:generic_duplicates_allowed])
+ expect(package_settings_response['genericDuplicateExceptionRegex']).to eq(params[:generic_duplicate_exception_regex])
end
end
@@ -69,8 +75,8 @@ RSpec.describe 'Updating the package settings' do
RSpec.shared_examples 'accepting the mutation request updating the package settings' do
it_behaves_like 'updating the namespace package setting attributes',
- from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT' },
- to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'foo-.*' }
+ from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT', generic_duplicates_allowed: true, generic_duplicate_exception_regex: 'foo' },
+ to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'foo-.*', generic_duplicates_allowed: false, generic_duplicate_exception_regex: 'bar-.*' }
it_behaves_like 'returning a success'
it_behaves_like 'rejecting invalid regex'
diff --git a/spec/requests/api/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb b/spec/requests/api/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb
new file mode 100644
index 00000000000..23a154b71a0
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/security/ci_configuration/configure_secret_detection_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'ConfigureSecretDetection' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :test_repo) }
+
+ let(:variables) { { project_path: project.full_path } }
+ let(:mutation) { graphql_mutation(:configure_secret_detection, variables) }
+ let(:mutation_response) { graphql_mutation_response(:configureSecretDetection) }
+
+ context 'when authorized' do
+ let_it_be(:user) { project.owner }
+
+ it 'creates a branch with secret detection configured' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to be_empty
+ expect(mutation_response['branch']).not_to be_empty
+ expect(mutation_response['successPath']).not_to be_empty
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/packages/composer_spec.rb b/spec/requests/api/graphql/packages/composer_spec.rb
new file mode 100644
index 00000000000..34137a07c34
--- /dev/null
+++ b/spec/requests/api/graphql/packages/composer_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'package details' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:composer_package) { create(:composer_package, project: project) }
+ let_it_be(:composer_json) { { name: 'name', type: 'type', license: 'license', version: 1 } }
+ let_it_be(:composer_metadatum) do
+ # we are forced to manually create the metadatum, without using the factory to force the sha to be a string
+ # and avoid an error where gitaly can't find the repository
+ create(:composer_metadatum, package: composer_package, target_sha: 'foo_sha', composer_json: composer_json)
+ end
+
+ let(:depth) { 3 }
+ let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
+ let(:metadata) { query_graphql_fragment('ComposerMetadata') }
+ let(:package_files) { all_graphql_fields_for('PackageFile') }
+ let(:user) { project.owner }
+ let(:package_global_id) { global_id_of(composer_package) }
+ let(:package_details) { graphql_data_at(:package) }
+ let(:metadata_response) { graphql_data_at(:package, :metadata) }
+ let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
+
+ let(:query) do
+ graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
+ #{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
+ metadata {
+ #{metadata}
+ }
+ packageFiles {
+ nodes {
+ #{package_files}
+ }
+ }
+ FIELDS
+ end
+
+ subject { post_graphql(query, current_user: user) }
+
+ before do
+ subject
+ end
+
+ it_behaves_like 'a working graphql query' do
+ it 'matches the JSON schema' do
+ expect(package_details).to match_schema('graphql/packages/package_details')
+ end
+ end
+
+ describe 'Composer' do
+ it 'has the correct metadata' do
+ expect(metadata_response).to include(
+ 'targetSha' => 'foo_sha',
+ 'composerJson' => composer_json.transform_keys(&:to_s).transform_values(&:to_s)
+ )
+ end
+
+ it 'does not have files' do
+ expect(package_files_response).to be_empty
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/packages/conan_spec.rb b/spec/requests/api/graphql/packages/conan_spec.rb
new file mode 100644
index 00000000000..dc64c5057d5
--- /dev/null
+++ b/spec/requests/api/graphql/packages/conan_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'conan package details' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:conan_package) { create(:conan_package, project: project) }
+
+ let(:package_global_id) { global_id_of(conan_package) }
+ let(:metadata) { query_graphql_fragment('ConanMetadata') }
+ let(:first_file) { conan_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
+
+ let(:depth) { 3 }
+ let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
+ let(:package_files) { all_graphql_fields_for('PackageFile') }
+ let(:package_files_metadata) {query_graphql_fragment('ConanFileMetadata')}
+
+ let(:user) { project.owner }
+ let(:package_details) { graphql_data_at(:package) }
+ let(:metadata_response) { graphql_data_at(:package, :metadata) }
+ let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
+ let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
+ let(:first_file_response_metadata) { graphql_data_at(:package, :package_files, :nodes, 0, :file_metadata)}
+
+ let(:query) do
+ graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
+ #{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
+ metadata {
+ #{metadata}
+ }
+ packageFiles {
+ nodes {
+ #{package_files}
+ fileMetadata {
+ #{package_files_metadata}
+ }
+ }
+ }
+ FIELDS
+ end
+
+ subject { post_graphql(query, current_user: user) }
+
+ before do
+ subject
+ end
+
+ it_behaves_like 'a working graphql query' do
+ it 'matches the JSON schema' do
+ expect(package_details).to match_schema('graphql/packages/package_details')
+ end
+ end
+
+ it 'has the correct metadata' do
+ expect(metadata_response).to include(
+ 'id' => global_id_of(conan_package.conan_metadatum),
+ 'recipe' => conan_package.conan_metadatum.recipe,
+ 'packageChannel' => conan_package.conan_metadatum.package_channel,
+ 'packageUsername' => conan_package.conan_metadatum.package_username,
+ 'recipePath' => conan_package.conan_metadatum.recipe_path
+ )
+ end
+
+ it 'has the right amount of files' do
+ expect(package_files_response.length).to be(conan_package.package_files.length)
+ end
+
+ it 'has the basic package files data' do
+ expect(first_file_response).to include(
+ 'id' => global_id_of(first_file),
+ 'fileName' => first_file.file_name,
+ 'size' => first_file.size.to_s,
+ 'downloadPath' => first_file.download_path,
+ 'fileSha1' => first_file.file_sha1,
+ 'fileMd5' => first_file.file_md5,
+ 'fileSha256' => first_file.file_sha256
+ )
+ end
+
+ it 'has the correct file metadata' do
+ expect(first_file_response_metadata).to include(
+ 'id' => global_id_of(first_file.conan_file_metadatum),
+ 'packageRevision' => first_file.conan_file_metadatum.package_revision,
+ 'conanPackageReference' => first_file.conan_file_metadatum.conan_package_reference,
+ 'recipeRevision' => first_file.conan_file_metadatum.recipe_revision,
+ 'conanFileType' => first_file.conan_file_metadatum.conan_file_type.upcase
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/packages/maven_spec.rb b/spec/requests/api/graphql/packages/maven_spec.rb
new file mode 100644
index 00000000000..8b6b5ea0986
--- /dev/null
+++ b/spec/requests/api/graphql/packages/maven_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'maven package details' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maven_package) { create(:maven_package, project: project) }
+
+ let(:package_global_id) { global_id_of(maven_package) }
+ let(:metadata) { query_graphql_fragment('MavenMetadata') }
+ let(:first_file) { maven_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
+
+ let(:depth) { 3 }
+ let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
+ let(:package_files) { all_graphql_fields_for('PackageFile') }
+
+ let(:user) { project.owner }
+ let(:package_details) { graphql_data_at(:package) }
+ let(:metadata_response) { graphql_data_at(:package, :metadata) }
+ let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
+ let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
+
+ let(:query) do
+ graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
+ #{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
+ metadata {
+ #{metadata}
+ }
+ packageFiles {
+ nodes {
+ #{package_files}
+ }
+ }
+ FIELDS
+ end
+
+ subject { post_graphql(query, current_user: user) }
+
+ shared_examples 'a working maven package' do
+ before do
+ subject
+ end
+
+ it_behaves_like 'a working graphql query' do
+ it 'matches the JSON schema' do
+ expect(package_details).to match_schema('graphql/packages/package_details')
+ end
+ end
+
+ it 'has the correct metadata' do
+ expect(metadata_response).to include(
+ 'id' => global_id_of(maven_package.maven_metadatum),
+ 'path' => maven_package.maven_metadatum.path,
+ 'appGroup' => maven_package.maven_metadatum.app_group,
+ 'appVersion' => maven_package.maven_metadatum.app_version,
+ 'appName' => maven_package.maven_metadatum.app_name
+ )
+ end
+
+ it 'has the right amount of files' do
+ expect(package_files_response.length).to be(maven_package.package_files.length)
+ end
+
+ it 'has the basic package files data' do
+ expect(first_file_response).to include(
+ 'id' => global_id_of(first_file),
+ 'fileName' => first_file.file_name,
+ 'size' => first_file.size.to_s,
+ 'downloadPath' => first_file.download_path,
+ 'fileSha1' => first_file.file_sha1,
+ 'fileMd5' => first_file.file_md5,
+ 'fileSha256' => first_file.file_sha256
+ )
+ end
+ end
+
+ context 'a maven package with version' do
+ it_behaves_like "a working maven package"
+ end
+
+ context 'a versionless maven package' do
+ let_it_be(:maven_metadatum) { create(:maven_metadatum, app_version: nil) }
+ let_it_be(:maven_package) { create(:maven_package, project: project, version: nil, maven_metadatum: maven_metadatum) }
+
+ it_behaves_like "a working maven package"
+
+ it "has an empty version" do
+ subject
+
+ expect(metadata_response['appVersion']).to eq(nil)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/packages/nuget_spec.rb b/spec/requests/api/graphql/packages/nuget_spec.rb
new file mode 100644
index 00000000000..fa9d8a0e37e
--- /dev/null
+++ b/spec/requests/api/graphql/packages/nuget_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'nuget package details' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:nuget_package) { create(:nuget_package, :with_metadatum, project: project) }
+
+ let(:package_global_id) { global_id_of(nuget_package) }
+ let(:metadata) { query_graphql_fragment('NugetMetadata') }
+ let(:first_file) { nuget_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
+
+ let(:depth) { 3 }
+ let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
+ let(:package_files) { all_graphql_fields_for('PackageFile') }
+
+ let(:user) { project.owner }
+ let(:package_details) { graphql_data_at(:package) }
+ let(:metadata_response) { graphql_data_at(:package, :metadata) }
+ let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
+ let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
+
+ let(:query) do
+ graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
+ #{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
+ metadata {
+ #{metadata}
+ }
+ packageFiles {
+ nodes {
+ #{package_files}
+ }
+ }
+ FIELDS
+ end
+
+ subject { post_graphql(query, current_user: user) }
+
+ before do
+ subject
+ end
+
+ it_behaves_like 'a working graphql query' do
+ it 'matches the JSON schema' do
+ expect(package_details).to match_schema('graphql/packages/package_details')
+ end
+ end
+
+ it 'has the correct metadata' do
+ expect(metadata_response).to include(
+ 'id' => global_id_of(nuget_package.nuget_metadatum),
+ 'licenseUrl' => nuget_package.nuget_metadatum.license_url,
+ 'projectUrl' => nuget_package.nuget_metadatum.project_url,
+ 'iconUrl' => nuget_package.nuget_metadatum.icon_url
+ )
+ end
+
+ it 'has the right amount of files' do
+ expect(package_files_response.length).to be(nuget_package.package_files.length)
+ end
+
+ it 'has the basic package files data' do
+ expect(first_file_response).to include(
+ 'id' => global_id_of(first_file),
+ 'fileName' => first_file.file_name,
+ 'size' => first_file.size.to_s,
+ 'downloadPath' => first_file.download_path,
+ 'fileSha1' => first_file.file_sha1,
+ 'fileMd5' => first_file.file_md5,
+ 'fileSha256' => first_file.file_sha256
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/packages/package_spec.rb b/spec/requests/api/graphql/packages/package_spec.rb
index a0131c7733e..83ea9ff4dc8 100644
--- a/spec/requests/api/graphql/packages/package_spec.rb
+++ b/spec/requests/api/graphql/packages/package_spec.rb
@@ -17,7 +17,9 @@ RSpec.describe 'package details' do
let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
let(:metadata) { query_graphql_fragment('ComposerMetadata') }
let(:package_files) {all_graphql_fields_for('PackageFile')}
- let(:package_files_metadata) {query_graphql_fragment('ConanFileMetadata')}
+ let(:user) { project.owner }
+ let(:package_global_id) { global_id_of(composer_package) }
+ let(:package_details) { graphql_data_at(:package) }
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
@@ -28,22 +30,11 @@ RSpec.describe 'package details' do
packageFiles {
nodes {
#{package_files}
- fileMetadata {
- #{package_files_metadata}
- }
}
}
FIELDS
end
- let(:user) { project.owner }
- let(:package_global_id) { global_id_of(composer_package) }
- let(:package_details) { graphql_data_at(:package) }
- let(:metadata_response) { graphql_data_at(:package, :metadata) }
- let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
- let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
- let(:first_file_response_metadata) { graphql_data_at(:package, :package_files, :nodes, 0, :file_metadata)}
-
subject { post_graphql(query, current_user: user) }
it_behaves_like 'a working graphql query' do
@@ -56,69 +47,6 @@ RSpec.describe 'package details' do
end
end
- describe 'Packages Metadata' do
- before do
- subject
- end
-
- describe 'Composer' do
- it 'has the correct metadata' do
- expect(metadata_response).to include(
- 'targetSha' => 'foo_sha',
- 'composerJson' => composer_json.transform_keys(&:to_s).transform_values(&:to_s)
- )
- end
-
- it 'does not have files' do
- expect(package_files_response).to be_empty
- end
- end
-
- describe 'Conan' do
- let_it_be(:conan_package) { create(:conan_package, project: project) }
-
- let(:package_global_id) { global_id_of(conan_package) }
- let(:metadata) { query_graphql_fragment('ConanMetadata') }
- let(:first_file) { conan_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
-
- it 'has the correct metadata' do
- expect(metadata_response).to include(
- 'id' => global_id_of(conan_package.conan_metadatum),
- 'recipe' => conan_package.conan_metadatum.recipe,
- 'packageChannel' => conan_package.conan_metadatum.package_channel,
- 'packageUsername' => conan_package.conan_metadatum.package_username,
- 'recipePath' => conan_package.conan_metadatum.recipe_path
- )
- end
-
- it 'has the right amount of files' do
- expect(package_files_response.length).to be(conan_package.package_files.length)
- end
-
- it 'has the basic package files data' do
- expect(first_file_response).to include(
- 'id' => global_id_of(first_file),
- 'fileName' => first_file.file_name,
- 'size' => first_file.size.to_s,
- 'downloadPath' => first_file.download_path,
- 'fileSha1' => first_file.file_sha1,
- 'fileMd5' => first_file.file_md5,
- 'fileSha256' => first_file.file_sha256
- )
- end
-
- it 'has the correct file metadata' do
- expect(first_file_response_metadata).to include(
- 'id' => global_id_of(first_file.conan_file_metadatum),
- 'packageRevision' => first_file.conan_file_metadatum.package_revision,
- 'conanPackageReference' => first_file.conan_file_metadatum.conan_package_reference,
- 'recipeRevision' => first_file.conan_file_metadatum.recipe_revision,
- 'conanFileType' => first_file.conan_file_metadatum.conan_file_type.upcase
- )
- end
- end
- end
-
context 'there are other versions of this package' do
let(:depth) { 3 }
let(:excluded) { %w[metadata project tags pipelines] } # to limit the query complexity
diff --git a/spec/requests/api/graphql/project/issue/design_collection/versions_spec.rb b/spec/requests/api/graphql/project/issue/design_collection/versions_spec.rb
index ee0085718b3..9d98498ca8a 100644
--- a/spec/requests/api/graphql/project/issue/design_collection/versions_spec.rb
+++ b/spec/requests/api/graphql/project/issue/design_collection/versions_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe 'Getting versions related to an issue' do
let(:version_params) { nil }
let(:version_query_fields) { ['edges { node { sha } }'] }
+ let(:edges_path) { %w[project issue designCollection versions edges] }
let(:project) { issue.project }
let(:current_user) { owner }
@@ -50,8 +51,7 @@ RSpec.describe 'Getting versions related to an issue' do
end
def response_values(data = graphql_data, key = 'sha')
- path = %w[project issue designCollection versions edges]
- data.dig(*path).map { |e| e.dig('node', key) }
+ data.dig(*edges_path).map { |e| e.dig('node', key) }
end
before do
@@ -64,6 +64,19 @@ RSpec.describe 'Getting versions related to an issue' do
expect(response_values).to match_array([version_a, version_b, version_c, version_d].map(&:sha))
end
+ context 'with all fields requested' do
+ let(:version_query_fields) do
+ ['edges { node { id sha createdAt author { id } } }']
+ end
+
+ it 'returns correct data' do
+ post_graphql(query, current_user: current_user)
+
+ keys = graphql_data.dig(*edges_path).first['node'].keys
+ expect(keys).to match_array(%w(id sha createdAt author))
+ end
+ end
+
describe 'filter by sha' do
let(:sha) { version_b.sha }
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index 15551005502..438ea9bb4c1 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -311,23 +311,23 @@ RSpec.describe 'getting merge request information nested in a project' do
end
end
- context 'when requesting information about MR interactions' do
+ shared_examples 'when requesting information about MR interactions' do
let_it_be(:user) { create(:user) }
let(:selected_fields) { all_graphql_fields_for('UserMergeRequestInteraction') }
let(:mr_fields) do
query_nodes(
- :reviewers,
+ field,
query_graphql_field(:merge_request_interaction, nil, selected_fields)
)
end
def interaction_data
- graphql_data_at(:project, :merge_request, :reviewers, :nodes, :merge_request_interaction)
+ graphql_data_at(:project, :merge_request, field, :nodes, :merge_request_interaction)
end
- context 'when the user does not have interactions' do
+ context 'when the user is not assigned' do
it 'returns null data' do
post_graphql(query)
@@ -338,7 +338,7 @@ RSpec.describe 'getting merge request information nested in a project' do
context 'when the user is a reviewer, but has not reviewed' do
before do
project.add_guest(user)
- merge_request.merge_request_reviewers.create!(reviewer: user)
+ assign_user(user)
end
it 'returns falsey values' do
@@ -346,8 +346,8 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(interaction_data).to contain_exactly a_hash_including(
'canMerge' => false,
- 'canUpdate' => false,
- 'reviewState' => 'UNREVIEWED',
+ 'canUpdate' => can_update,
+ 'reviewState' => unreviewed,
'reviewed' => false,
'approved' => false
)
@@ -357,7 +357,9 @@ RSpec.describe 'getting merge request information nested in a project' do
context 'when the user has interacted' do
before do
project.add_maintainer(user)
- merge_request.merge_request_reviewers.create!(reviewer: user, state: 'reviewed')
+ assign_user(user)
+ r = merge_request.merge_request_reviewers.find_or_create_by!(reviewer: user)
+ r.update!(state: 'reviewed')
merge_request.approved_by_users << user
end
@@ -392,7 +394,10 @@ RSpec.describe 'getting merge request information nested in a project' do
end
it 'does not suffer from N+1' do
- merge_request.merge_request_reviewers.create!(reviewer: user, state: 'reviewed')
+ assign_user(user)
+ merge_request.merge_request_reviewers
+ .find_or_create_by!(reviewer: user)
+ .update!(state: 'reviewed')
baseline = ActiveRecord::QueryRecorder.new do
post_graphql(query)
@@ -401,7 +406,8 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(interaction_data).to contain_exactly(include(reviewed))
other_users.each do |user|
- merge_request.merge_request_reviewers.create!(reviewer: user)
+ assign_user(user)
+ merge_request.merge_request_reviewers.find_or_create_by!(reviewer: user)
end
expect { post_graphql(query) }.not_to exceed_query_limit(baseline)
@@ -435,4 +441,24 @@ RSpec.describe 'getting merge request information nested in a project' do
end
end
end
+
+ it_behaves_like 'when requesting information about MR interactions' do
+ let(:field) { :reviewers }
+ let(:unreviewed) { 'UNREVIEWED' }
+ let(:can_update) { false }
+
+ def assign_user(user)
+ merge_request.merge_request_reviewers.create!(reviewer: user)
+ end
+ end
+
+ it_behaves_like 'when requesting information about MR interactions' do
+ let(:field) { :assignees }
+ let(:unreviewed) { nil }
+ let(:can_update) { true } # assignees can update MRs
+
+ def assign_user(user)
+ merge_request.assignees << user
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/packages_spec.rb b/spec/requests/api/graphql/project/packages_spec.rb
index 3c04e0caf61..d9ee997eb02 100644
--- a/spec/requests/api/graphql/project/packages_spec.rb
+++ b/spec/requests/api/graphql/project/packages_spec.rb
@@ -7,37 +7,10 @@ RSpec.describe 'getting a package list for a project' do
let_it_be(:resource) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
+ let_it_be(:project1) { resource }
+ let_it_be(:project2) { resource }
- let_it_be(:package) { create(:package, project: resource) }
- let_it_be(:maven_package) { create(:maven_package, project: resource) }
- let_it_be(:debian_package) { create(:debian_package, project: resource) }
- let_it_be(:composer_package) { create(:composer_package, project: resource) }
- let_it_be(:composer_metadatum) do
- create(:composer_metadatum, package: composer_package,
- target_sha: 'afdeh',
- composer_json: { name: 'x', type: 'y', license: 'z', version: 1 })
- end
-
- let(:package_names) { graphql_data_at(:project, :packages, :nodes, :name) }
- let(:target_shas) { graphql_data_at(:project, :packages, :nodes, :metadata, :target_sha) }
- let(:packages) { graphql_data_at(:project, :packages, :nodes) }
-
- let(:fields) do
- <<~QUERY
- nodes {
- #{all_graphql_fields_for('packages'.classify, excluded: ['project'])}
- metadata { #{query_graphql_fragment('ComposerMetadata')} }
- }
- QUERY
- end
-
- let(:query) do
- graphql_query_for(
- 'project',
- { 'fullPath' => resource.full_path },
- query_graphql_field('packages', {}, fields)
- )
- end
+ let(:resource_type) { :project }
it_behaves_like 'group and project packages query'
end
diff --git a/spec/requests/api/graphql/project/project_members_spec.rb b/spec/requests/api/graphql/project/project_members_spec.rb
index 984a0adb8c6..c08bb8dc0a0 100644
--- a/spec/requests/api/graphql/project/project_members_spec.rb
+++ b/spec/requests/api/graphql/project/project_members_spec.rb
@@ -78,6 +78,22 @@ RSpec.describe 'getting project members information' do
.to include('path' => %w[query project projectMembers relations],
'message' => a_string_including('invalid value ([OBLIQUE])'))
end
+
+ context 'when project is owned by a member' do
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ before_all do
+ project.add_guest(child_user)
+ project.add_guest(invited_user)
+ end
+
+ it 'returns the owner in the response' do
+ fetch_members(project: project)
+
+ expect(graphql_errors).to be_nil
+ expect_array_response(user, child_user, invited_user)
+ end
+ end
end
context 'when unauthenticated' do
diff --git a/spec/requests/api/graphql/project/release_spec.rb b/spec/requests/api/graphql/project/release_spec.rb
index 72197f00df4..7f24d051457 100644
--- a/spec/requests/api/graphql/project/release_spec.rb
+++ b/spec/requests/api/graphql/project/release_spec.rb
@@ -370,23 +370,6 @@ RSpec.describe 'Query.project(fullPath).release(tagName)' do
end
end
- describe 'ensures that the release data can be contolled by a feature flag' do
- context 'when the graphql_release_data feature flag is disabled' do
- let_it_be(:project) { create(:project, :repository, :public) }
- let_it_be(:release) { create(:release, project: project) }
-
- let(:current_user) { developer }
-
- before do
- stub_feature_flags(graphql_release_data: false)
-
- project.add_developer(developer)
- end
-
- it_behaves_like 'no access to the release field'
- end
- end
-
describe 'upcoming release' do
let(:path) { path_prefix }
let(:project) { create(:project, :repository, :private) }
diff --git a/spec/requests/api/graphql/project/releases_spec.rb b/spec/requests/api/graphql/project/releases_spec.rb
index 6e364c7d7b5..43732c2ed18 100644
--- a/spec/requests/api/graphql/project/releases_spec.rb
+++ b/spec/requests/api/graphql/project/releases_spec.rb
@@ -295,23 +295,6 @@ RSpec.describe 'Query.project(fullPath).releases()' do
end
end
- describe 'ensures that the release data can be contolled by a feature flag' do
- context 'when the graphql_release_data feature flag is disabled' do
- let_it_be(:project) { create(:project, :repository, :public) }
- let_it_be(:release) { create(:release, project: project) }
-
- let(:current_user) { developer }
-
- before do
- stub_feature_flags(graphql_release_data: false)
-
- project.add_developer(developer)
- end
-
- it_behaves_like 'no access to any release data'
- end
- end
-
describe 'sorting behavior' do
let_it_be(:today) { Time.now }
let_it_be(:yesterday) { today - 1.day }
diff --git a/spec/requests/api/graphql/project/repository_spec.rb b/spec/requests/api/graphql/project/repository_spec.rb
index a4984688557..bddd300e27f 100644
--- a/spec/requests/api/graphql/project/repository_spec.rb
+++ b/spec/requests/api/graphql/project/repository_spec.rb
@@ -36,6 +36,30 @@ RSpec.describe 'getting a repository in a project' do
end
end
+ context 'as a non-admin' do
+ let(:current_user) { create(:user) }
+
+ before do
+ project.add_role(current_user, :developer)
+ end
+
+ it 'does not return diskPath' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data['project']['repository']).not_to be_nil
+ expect(graphql_data['project']['repository']['diskPath']).to be_nil
+ end
+ end
+
+ context 'as an admin' do
+ it 'returns diskPath' do
+ post_graphql(query, current_user: create(:admin))
+
+ expect(graphql_data['project']['repository']).not_to be_nil
+ expect(graphql_data['project']['repository']['diskPath']).to eq project.disk_path
+ end
+ end
+
context 'when the repository is only accessible to members' do
let(:project) do
create(:project, :public, :repository, repository_access_level: ProjectFeature::PRIVATE)
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 2cdd7273b18..b367bbaaf43 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -57,6 +57,22 @@ RSpec.describe 'getting project information' do
end
end
+ context 'topics' do
+ it 'includes empty topics array if no topics set' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :topics)).to match([])
+ end
+
+ it 'includes topics array' do
+ project.update!(tag_list: 'topic1, topic2, topic3')
+
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :topics)).to match(%w[topic1 topic2 topic3])
+ end
+ end
+
it 'includes inherited members in project_members' do
group_member = create(:group_member, group: group)
project_member = create(:project_member, project: project)
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 3a1bcfc69b8..a336d74b135 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe 'GraphQL' do
login_as(user)
get('/')
- post '/api/graphql', params: { query: query }, headers: { 'X-CSRF-Token' => response.session['_csrf_token'] }
+ post '/api/graphql', params: { query: query }, headers: { 'X-CSRF-Token' => session['_csrf_token'] }
expect(graphql_data['echo']).to eq("\"#{user.username}\" says: Hello world")
end
@@ -283,25 +283,50 @@ RSpec.describe 'GraphQL' do
)
end
- it 'paginates datetimes correctly when they have millisecond data' do
- # let's make sure we're actually querying a timestamp, just in case
- expect(Gitlab::Graphql::Pagination::Keyset::QueryBuilder)
- .to receive(:new).with(anything, anything, hash_including('created_at'), anything).and_call_original
+ context 'when new_graphql_keyset_pagination feature flag is off' do
+ before do
+ stub_feature_flags(new_graphql_keyset_pagination: false)
+ end
+
+ it 'paginates datetimes correctly when they have millisecond data' do
+ # let's make sure we're actually querying a timestamp, just in case
+ expect(Gitlab::Graphql::Pagination::Keyset::QueryBuilder)
+ .to receive(:new).with(anything, anything, hash_including('created_at'), anything).and_call_original
+
+ execute_query
+ first_page = graphql_data
+ edges = first_page.dig(*issues_edges)
+ cursor = first_page.dig(*end_cursor)
+
+ expect(edges.count).to eq(6)
+ expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
- execute_query
- first_page = graphql_data
- edges = first_page.dig(*issues_edges)
- cursor = first_page.dig(*end_cursor)
+ execute_query(after: cursor)
+ second_page = graphql_data
+ edges = second_page.dig(*issues_edges)
- expect(edges.count).to eq(6)
- expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
+ expect(edges.count).to eq(4)
+ expect(edges.last['node']['iid']).to eq(issues[0].iid.to_s)
+ end
+ end
+
+ context 'when new_graphql_keyset_pagination feature flag is on' do
+ it 'paginates datetimes correctly when they have millisecond data' do
+ execute_query
+ first_page = graphql_data
+ edges = first_page.dig(*issues_edges)
+ cursor = first_page.dig(*end_cursor)
- execute_query(after: cursor)
- second_page = graphql_data
- edges = second_page.dig(*issues_edges)
+ expect(edges.count).to eq(6)
+ expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
- expect(edges.count).to eq(4)
- expect(edges.last['node']['iid']).to eq(issues[0].iid.to_s)
+ execute_query(after: cursor)
+ second_page = graphql_data
+ edges = second_page.dig(*issues_edges)
+
+ expect(edges.count).to eq(4)
+ expect(edges.last['node']['iid']).to eq(issues[0].iid.to_s)
+ end
end
end
end
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index 50a1e9d0c3d..8309e2ba7c1 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -178,4 +178,74 @@ RSpec.describe API::GroupExport do
end
end
end
+
+ describe 'relations export' do
+ let(:path) { "/groups/#{group.id}/export_relations" }
+ let(:download_path) { "/groups/#{group.id}/export_relations/download?relation=labels" }
+ let(:status_path) { "/groups/#{group.id}/export_relations/status" }
+
+ before do
+ stub_feature_flags(group_import_export: true)
+ group.add_owner(user)
+ end
+
+ describe 'POST /groups/:id/export_relations' do
+ it 'accepts the request' do
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ context 'when response is not success' do
+ it 'returns api error' do
+ allow_next_instance_of(BulkImports::ExportService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'error', http_status: :error))
+ end
+
+ post api(path, user)
+
+ expect(response).to have_gitlab_http_status(:error)
+ end
+ end
+ end
+
+ describe 'GET /groups/:id/export_relations/download' do
+ let(:export) { create(:bulk_import_export, group: group, relation: 'labels') }
+ let(:upload) { create(:bulk_import_export_upload, export: export) }
+
+ context 'when export file exists' do
+ it 'downloads exported group archive' do
+ upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz'))
+
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when export_file.file does not exist' do
+ it 'returns 404' do
+ allow(upload).to receive(:export_file).and_return(nil)
+
+ get api(download_path, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET /groups/:id/export_relations/status' do
+ it 'returns a list of relation export statuses' do
+ create(:bulk_import_export, :started, group: group, relation: 'labels')
+ create(:bulk_import_export, :finished, group: group, relation: 'milestones')
+ create(:bulk_import_export, :failed, group: group, relation: 'badges')
+
+ get api(status_path, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('relation')).to contain_exactly('labels', 'milestones', 'badges')
+ expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/group_labels_spec.rb b/spec/requests/api/group_labels_spec.rb
index c677e68b285..900ffe6dfc7 100644
--- a/spec/requests/api/group_labels_spec.rb
+++ b/spec/requests/api/group_labels_spec.rb
@@ -290,7 +290,7 @@ RSpec.describe API::GroupLabels do
put api("/groups/#{group.id}/labels", user), params: { name: group_label1.name }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq('new_name, color, description are missing, '\
+ expect(json_response['error']).to eq('new_name, color, description, remove_on_close are missing, '\
'at least one parameter must be provided')
end
end
@@ -337,7 +337,7 @@ RSpec.describe API::GroupLabels do
put api("/groups/#{group.id}/labels/#{valid_group_label_title_1_esc}", user)
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq('new_name, color, description are missing, '\
+ expect(json_response['error']).to eq('new_name, color, description, remove_on_close are missing, '\
'at least one parameter must be provided')
end
end
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 8160a94aef2..ce0018d6d0d 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe API::Helpers do
end
def error!(message, status, header)
- raise StandardError.new("#{status} - #{message}")
+ raise StandardError, "#{status} - #{message}"
end
def set_param(key, value)
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 47d0c872eb6..7a2cec974b9 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -67,26 +67,26 @@ RSpec.describe API::Internal::Kubernetes do
context 'is authenticated for an agent' do
let!(:agent_token) { create(:cluster_agent_token) }
- it 'returns no_content for valid gitops_sync_count' do
- send_request(params: { gitops_sync_count: 10 })
+ it 'returns no_content for valid events' do
+ send_request(params: { gitops_sync_count: 10, k8s_api_proxy_request_count: 5 })
expect(response).to have_gitlab_http_status(:no_content)
end
- it 'returns no_content 0 gitops_sync_count' do
- send_request(params: { gitops_sync_count: 0 })
+ it 'returns no_content for counts of zero' do
+ send_request(params: { gitops_sync_count: 0, k8s_api_proxy_request_count: 0 })
expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns 400 for non number' do
- send_request(params: { gitops_sync_count: 'string' })
+ send_request(params: { gitops_sync_count: 'string', k8s_api_proxy_request_count: 1 })
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'returns 400 for negative number' do
- send_request(params: { gitops_sync_count: '-1' })
+ send_request(params: { gitops_sync_count: -1, k8s_api_proxy_request_count: 1 })
expect(response).to have_gitlab_http_status(:bad_request)
end
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index da0bae8d5e7..07fa1d40f7b 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -186,7 +186,7 @@ RSpec.describe API::Issues do
it 'avoids N+1 queries' do
get api("/projects/#{project.id}/issues", user)
- create_list(:issue, 3, project: project, closed_by: user)
+ issues = create_list(:issue, 3, project: project, closed_by: user)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues", user)
@@ -195,6 +195,9 @@ RSpec.describe API::Issues do
milestone = create(:milestone, project: project)
create(:issue, project: project, milestone: milestone, closed_by: create(:user))
+ create(:note_on_issue, project: project, noteable: issues[0])
+ create(:note_on_issue, project: project, noteable: issues[1])
+
expect do
get api("/projects/#{project.id}/issues", user)
end.not_to exceed_all_query_limit(control_count)
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 8f10de59526..125db58ed69 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -115,6 +115,7 @@ RSpec.describe API::Issues do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig('author', 'id')).to eq(issue.author.id)
expect(json_response['description']).to eq(issue.description)
+ expect(json_response['issue_type']).to eq('issue')
end
end
@@ -378,6 +379,14 @@ RSpec.describe API::Issues do
expect_paginated_array_response([issue.id, closed_issue.id])
end
+ it 'returns issues with a given issue_type' do
+ issue2 = create(:incident, project: project)
+
+ get api('/issues', user), params: { issue_type: 'incident' }
+
+ expect_paginated_array_response(issue2.id)
+ end
+
it 'returns issues matching given search string for title' do
get api('/issues', user), params: { search: issue.title }
@@ -939,7 +948,17 @@ RSpec.describe API::Issues do
end
end
- describe 'PUT /projects/:id/issues/:issue_id' do
+ describe "POST /projects/:id/issues" do
+ it 'creates a new project issue' do
+ post api("/projects/#{project.id}/issues", user), params: { title: 'new issue' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['title']).to eq('new issue')
+ expect(json_response['issue_type']).to eq('issue')
+ end
+ end
+
+ describe 'PUT /projects/:id/issues/:issue_iid' do
it_behaves_like 'issuable update endpoint' do
let(:entity) { issue }
end
@@ -971,6 +990,14 @@ RSpec.describe API::Issues do
expect(ResourceLabelEvent.last.created_at).to be_like_time(fixed_time)
end
end
+
+ describe 'issue_type param' do
+ it 'allows issue type to be converted' do
+ put api("/projects/#{project.id}/issues/#{issue.iid}", user), params: { issue_type: 'incident' }
+
+ expect(issue.reload.incident?).to be(true)
+ end
+ end
end
describe 'DELETE /projects/:id/issues/:issue_iid' do
diff --git a/spec/requests/api/issues/put_projects_issues_spec.rb b/spec/requests/api/issues/put_projects_issues_spec.rb
index dac721cbea0..38c080059c4 100644
--- a/spec/requests/api/issues/put_projects_issues_spec.rb
+++ b/spec/requests/api/issues/put_projects_issues_spec.rb
@@ -402,6 +402,17 @@ RSpec.describe API::Issues do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq 'opened'
end
+
+ it 'removes labels marked to be removed on issue closed' do
+ removable_label = create(:label, project: project, remove_on_close: true)
+ create(:label_link, target: issue, label: removable_label)
+
+ put api_for_user, params: { state_event: 'close' }
+
+ expect(issue.reload.label_ids).not_to include(removable_label.id)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['state']).to eq 'closed'
+ end
end
describe 'PUT /projects/:id/issues/:issue_iid to update updated_at param' do
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index 26377c40b73..f2ceedf6dbd 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe API::Labels do
put_labels_api(route_type, user, spec_params)
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq('new_name, color, description, priority are missing, '\
+ expect(json_response['error']).to eq('new_name, color, description, priority, remove_on_close are missing, '\
'at least one parameter must be provided')
end
@@ -112,6 +112,14 @@ RSpec.describe API::Labels do
expect(json_response['id']).to eq(expected_response_label_id)
expect(json_response['priority']).to eq(10)
end
+
+ it "returns 200 if remove_on_close is changed (#{route_type} route)" do
+ put_labels_api(route_type, user, spec_params, remove_on_close: true)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['id']).to eq(expected_response_label_id)
+ expect(json_response['remove_on_close']).to eq(true)
+ end
end
it 'returns 200 if a priority is removed (deprecated route)' do
@@ -301,7 +309,8 @@ RSpec.describe API::Labels do
name: valid_label_title_2,
color: '#FFAABB',
description: 'test',
- priority: 2
+ priority: 2,
+ remove_on_close: true
}
expect(response).to have_gitlab_http_status(:created)
@@ -309,6 +318,7 @@ RSpec.describe API::Labels do
expect(json_response['color']).to eq('#FFAABB')
expect(json_response['description']).to eq('test')
expect(json_response['priority']).to eq(2)
+ expect(json_response['remove_on_close']).to eq(true)
end
it 'returns created label when only required params' do
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 3a015e98fb1..4fc5fcf8282 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'rejecting the request for non existing maven path' do |expected_status: :not_found|
before do
- if Feature.enabled?(:check_maven_path_first)
+ if Feature.enabled?(:check_maven_path_first, default_enabled: :yaml)
expect(::Packages::Maven::PackageFinder).not_to receive(:new)
end
end
@@ -299,22 +299,6 @@ RSpec.describe API::MavenPackages do
end
end
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- end
-
- it_behaves_like 'handling all conditions'
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it_behaves_like 'handling all conditions'
- end
-
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
@@ -346,22 +330,6 @@ RSpec.describe API::MavenPackages do
it_behaves_like 'processing HEAD requests', instance_level: true
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- end
-
- it_behaves_like 'processing HEAD requests', instance_level: true
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it_behaves_like 'processing HEAD requests', instance_level: true
- end
-
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
@@ -468,8 +436,7 @@ RSpec.describe API::MavenPackages do
subject
- status = Feature.enabled?(:maven_packages_group_level_improvements, default_enabled: :yaml) ? :not_found : :forbidden
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(:not_found)
end
it 'denies download when no private token' do
@@ -594,22 +561,6 @@ RSpec.describe API::MavenPackages do
end
end
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- end
-
- it_behaves_like 'handling all conditions'
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it_behaves_like 'handling all conditions'
- end
-
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
@@ -639,22 +590,6 @@ RSpec.describe API::MavenPackages do
let(:path) { package.maven_metadatum.path }
let(:url) { "/groups/#{group.id}/-/packages/maven/#{path}/#{package_file.file_name}" }
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- end
-
- it_behaves_like 'processing HEAD requests'
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it_behaves_like 'processing HEAD requests'
- end
-
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
@@ -743,22 +678,6 @@ RSpec.describe API::MavenPackages do
end
end
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- end
-
- it_behaves_like 'handling all conditions'
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it_behaves_like 'handling all conditions'
- end
-
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
@@ -789,22 +708,6 @@ RSpec.describe API::MavenPackages do
let(:path) { package.maven_metadatum.path }
let(:url) { "/projects/#{project.id}/packages/maven/#{path}/#{package_file.file_name}" }
- context 'with maven_packages_group_level_improvements enabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: true)
- end
-
- it_behaves_like 'processing HEAD requests'
- end
-
- context 'with maven_packages_group_level_improvements disabled' do
- before do
- stub_feature_flags(maven_packages_group_level_improvements: false)
- end
-
- it_behaves_like 'processing HEAD requests'
- end
-
context 'with check_maven_path_first enabled' do
before do
stub_feature_flags(check_maven_path_first: true)
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 37cb8fb7ee5..a13db1bb414 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe API::MergeRequests do
end
context 'when authenticated' do
- it 'avoids N+1 queries' do
+ it 'avoids N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/330335' do
control = ActiveRecord::QueryRecorder.new do
get api(endpoint_path, user)
end
@@ -142,7 +142,7 @@ RSpec.describe API::MergeRequests do
expect(json_response.last['labels'].first).to match_schema('/public_api/v4/label_basic')
end
- it 'avoids N+1 queries' do
+ it 'avoids N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/330335' do
path = endpoint_path + "?with_labels_details=true"
control = ActiveRecord::QueryRecorder.new do
@@ -973,6 +973,14 @@ RSpec.describe API::MergeRequests do
it_behaves_like 'merge requests list'
+ context 'when :api_caching_merge_requests is disabled' do
+ before do
+ stub_feature_flags(api_caching_merge_requests: false)
+ end
+
+ it_behaves_like 'merge requests list'
+ end
+
it "returns 404 for non public projects" do
project = create(:project, :private)
@@ -1049,7 +1057,7 @@ RSpec.describe API::MergeRequests do
include_context 'with merge requests'
- it 'avoids N+1 queries' do
+ it 'avoids N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/330335' do
control = ActiveRecord::QueryRecorder.new do
get api("/projects/#{project.id}/merge_requests", user)
end.count
@@ -2146,7 +2154,7 @@ RSpec.describe API::MergeRequests do
end
end
- describe 'PUT /projects/:id/merge_reuests/:merge_request_iid' do
+ describe 'PUT /projects/:id/merge_requests/:merge_request_iid' do
it_behaves_like 'issuable update endpoint' do
let(:entity) { merge_request }
end
@@ -2168,6 +2176,68 @@ RSpec.describe API::MergeRequests do
end
end
+ context 'when assignee_id=user2.id' do
+ let(:params) do
+ {
+ assignee_id: user2.id
+ }
+ end
+
+ it 'sets the assignees' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['assignees']).to contain_exactly(
+ a_hash_including('name' => user2.name)
+ )
+ end
+ end
+
+ context 'when only assignee_ids are provided, and the list is empty' do
+ let(:params) do
+ {
+ assignee_ids: []
+ }
+ end
+
+ it 'clears the assignees' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['assignees']).to be_empty
+ end
+ end
+
+ context 'when only assignee_ids are provided, and the list contains the sentinel value' do
+ let(:params) do
+ {
+ assignee_ids: [0]
+ }
+ end
+
+ it 'clears the assignees' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['assignees']).to be_empty
+ end
+ end
+
+ context 'when only assignee_id=0' do
+ let(:params) do
+ {
+ assignee_id: 0
+ }
+ end
+
+ it 'clears the assignees' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['assignees']).to be_empty
+ end
+ end
+
context 'accepts reviewer_ids' do
let(:params) do
{
diff --git a/spec/requests/api/package_files_spec.rb b/spec/requests/api/package_files_spec.rb
index 11170066d6e..137ded050c5 100644
--- a/spec/requests/api/package_files_spec.rb
+++ b/spec/requests/api/package_files_spec.rb
@@ -7,13 +7,13 @@ RSpec.describe API::PackageFiles do
let(:project) { create(:project, :public) }
let(:package) { create(:maven_package, project: project) }
- before do
- project.add_developer(user)
- end
-
describe 'GET /projects/:id/packages/:package_id/package_files' do
let(:url) { "/projects/#{project.id}/packages/#{package.id}/package_files" }
+ before do
+ project.add_developer(user)
+ end
+
context 'without the need for a license' do
context 'project is public' do
it 'returns 200' do
@@ -78,4 +78,77 @@ RSpec.describe API::PackageFiles do
end
end
end
+
+ describe 'DELETE /projects/:id/packages/:package_id/package_files/:package_file_id' do
+ let(:package_file_id) { package.package_files.first.id }
+ let(:url) { "/projects/#{project.id}/packages/#{package.id}/package_files/#{package_file_id}" }
+
+ subject(:api_request) { delete api(url, user) }
+
+ context 'project is public' do
+ context 'without user' do
+ let(:user) { nil }
+
+ it 'returns 403 for non authenticated user', :aggregate_failures do
+ expect { api_request }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ it 'returns 403 for a user without access to the project', :aggregate_failures do
+ expect { api_request }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'project is private' do
+ let_it_be_with_refind(:project) { create(:project, :private) }
+
+ it 'returns 404 for a user without access to the project', :aggregate_failures do
+ expect { api_request }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 403 for a user without enough permissions', :aggregate_failures do
+ project.add_developer(user)
+
+ expect { api_request }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns 204', :aggregate_failures do
+ project.add_maintainer(user)
+
+ expect { api_request }.to change { package.package_files.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ context 'without user' do
+ let(:user) { nil }
+
+ it 'returns 404 for non authenticated user', :aggregate_failures do
+ expect { api_request }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'invalid file' do
+ let(:url) { "/projects/#{project.id}/packages/#{package.id}/package_files/999999" }
+
+ it 'returns 404 when the package file does not exist', :aggregate_failures do
+ project.add_maintainer(user)
+
+ expect { api_request }.not_to change { package.package_files.count }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index f9eb9de94db..d28442bd692 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -41,6 +41,7 @@ itself: # project
- reset_approvals_on_push
- runners_token_encrypted
- storage_version
+ - topic_list
- updated_at
remapped_attributes:
avatar: avatar_url
@@ -67,6 +68,7 @@ itself: # project
- readme_url
- shared_with_groups
- ssh_url_to_repo
+ - tag_list
- web_url
build_auto_devops: # auto_devops
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 15871426ec5..f3da99573fe 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -6,12 +6,14 @@ RSpec.describe API::ProjectContainerRepositories do
include ExclusiveLeaseHelpers
let_it_be(:project) { create(:project, :private) }
+ let_it_be(:project2) { create(:project, :public) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
let(:root_repository) { create(:container_repository, :root, project: project) }
let(:test_repository) { create(:container_repository, project: project) }
+ let(:root_repository2) { create(:container_repository, :root, project: project2) }
let(:users) do
{
@@ -24,315 +26,408 @@ RSpec.describe API::ProjectContainerRepositories do
end
let(:api_user) { maintainer }
+ let(:job) { create(:ci_build, :running, user: api_user, project: project) }
+ let(:job2) { create(:ci_build, :running, user: api_user, project: project2) }
- before do
+ let(:method) { :get }
+ let(:params) { {} }
+
+ before_all do
project.add_maintainer(maintainer)
project.add_developer(developer)
project.add_reporter(reporter)
project.add_guest(guest)
- stub_container_registry_config(enabled: true)
+ project2.add_maintainer(maintainer)
+ project2.add_developer(developer)
+ project2.add_reporter(reporter)
+ project2.add_guest(guest)
+ end
+ before do
root_repository
test_repository
- end
- describe 'GET /projects/:id/registry/repositories' do
- let(:url) { "/projects/#{project.id}/registry/repositories" }
-
- subject { get api(url, api_user) }
+ stub_container_registry_config(enabled: true)
+ end
- it_behaves_like 'rejected container repository access', :guest, :forbidden
- it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
+ shared_context 'using API user' do
+ subject { public_send(method, api(url, api_user), params: params) }
+ end
- it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
- let(:object) { project }
+ shared_context 'using job token' do
+ before do
+ stub_exclusive_lease
+ stub_feature_flags(ci_job_token_scope: true)
end
+
+ subject { public_send(method, api(url), params: params.merge({ job_token: job.token })) }
end
- describe 'DELETE /projects/:id/registry/repositories/:repository_id' do
- subject { delete api("/projects/#{project.id}/registry/repositories/#{root_repository.id}", api_user) }
+ shared_context 'using job token from another project' do
+ before do
+ stub_exclusive_lease
+ stub_feature_flags(ci_job_token_scope: true)
+ end
- it_behaves_like 'rejected container repository access', :developer, :forbidden
- it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a package tracking event', described_class.name, 'delete_repository'
+ subject { public_send(method, api(url), params: { job_token: job2.token }) }
+ end
- context 'for maintainer' do
- let(:api_user) { maintainer }
+ shared_context 'using job token while ci_job_token_scope feature flag is disabled' do
+ before do
+ stub_exclusive_lease
+ stub_feature_flags(ci_job_token_scope: false)
+ end
- it 'schedules removal of repository' do
- expect(DeleteContainerRepositoryWorker).to receive(:perform_async)
- .with(maintainer.id, root_repository.id)
+ subject { public_send(method, api(url), params: params.merge({ job_token: job.token })) }
+ end
- subject
+ shared_examples 'rejected job token scopes' do
+ include_context 'using job token from another project' do
+ it_behaves_like 'rejected container repository access', :maintainer, :forbidden
+ end
- expect(response).to have_gitlab_http_status(:accepted)
- end
+ include_context 'using job token while ci_job_token_scope feature flag is disabled' do
+ it_behaves_like 'rejected container repository access', :maintainer, :forbidden
end
end
- describe 'GET /projects/:id/registry/repositories/:repository_id/tags' do
- subject { get api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags", api_user) }
-
- it_behaves_like 'rejected container repository access', :guest, :forbidden
- it_behaves_like 'rejected container repository access', :anonymous, :not_found
-
- context 'for reporter' do
- let(:api_user) { reporter }
-
- before do
- stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest))
- end
-
- it_behaves_like 'a package tracking event', described_class.name, 'list_tags'
-
- it 'returns a list of tags' do
- subject
+ describe 'GET /projects/:id/registry/repositories' do
+ let(:url) { "/projects/#{project.id}/registry/repositories" }
- expect(json_response.length).to eq(2)
- expect(json_response.map { |repository| repository['name'] }).to eq %w(latest rootA)
- end
+ ['using API user', 'using job token'].each do |context|
+ context context do
+ include_context context
- it 'returns a matching schema' do
- subject
+ it_behaves_like 'rejected container repository access', :guest, :forbidden unless context == 'using job token'
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('registry/tags')
+ it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
+ let(:object) { project }
+ end
end
end
+
+ include_examples 'rejected job token scopes'
end
- describe 'DELETE /projects/:id/registry/repositories/:repository_id/tags' do
- subject { delete api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags", api_user), params: params }
+ describe 'DELETE /projects/:id/registry/repositories/:repository_id' do
+ let(:method) { :delete }
+ let(:url) { "/projects/#{project.id}/registry/repositories/#{root_repository.id}" }
- context 'disallowed' do
- let(:params) do
- { name_regex_delete: 'v10.*' }
- end
+ ['using API user', 'using job token'].each do |context|
+ context context do
+ include_context context
- it_behaves_like 'rejected container repository access', :developer, :forbidden
- it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a package tracking event', described_class.name, 'delete_tag_bulk'
- end
+ it_behaves_like 'rejected container repository access', :developer, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'a package tracking event', described_class.name, 'delete_repository'
- context 'for maintainer' do
- let(:api_user) { maintainer }
+ context 'for maintainer' do
+ let(:api_user) { maintainer }
- context 'without required parameters' do
- let(:params) { }
+ it 'schedules removal of repository' do
+ expect(DeleteContainerRepositoryWorker).to receive(:perform_async)
+ .with(maintainer.id, root_repository.id)
- it 'returns bad request' do
- subject
+ subject
- expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
end
end
+ end
- context 'without name_regex' do
- let(:params) do
- { keep_n: 100,
- older_than: '1 day',
- other: 'some value' }
- end
-
- it 'returns bad request' do
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
+ include_examples 'rejected job token scopes'
+ end
- context 'passes all declared parameters' do
- let(:params) do
- { name_regex_delete: 'v10.*',
- name_regex_keep: 'v10.1.*',
- keep_n: 100,
- older_than: '1 day',
- other: 'some value' }
- end
+ describe 'GET /projects/:id/registry/repositories/:repository_id/tags' do
+ let(:url) { "/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags" }
- let(:worker_params) do
- { name_regex: nil,
- name_regex_delete: 'v10.*',
- name_regex_keep: 'v10.1.*',
- keep_n: 100,
- older_than: '1 day',
- container_expiration_policy: false }
- end
+ ['using API user', 'using job token'].each do |context|
+ context context do
+ include_context context
- let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
+ it_behaves_like 'rejected container repository access', :guest, :forbidden unless context == 'using job token'
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it 'schedules cleanup of tags repository' do
- stub_last_activity_update
- stub_exclusive_lease(lease_key, timeout: 1.hour)
- expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
- .with(maintainer.id, root_repository.id, worker_params)
+ context 'for reporter' do
+ let(:api_user) { reporter }
- subject
+ before do
+ stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest))
+ end
- expect(response).to have_gitlab_http_status(:accepted)
- end
+ it_behaves_like 'a package tracking event', described_class.name, 'list_tags'
- context 'called multiple times in one hour', :clean_gitlab_redis_shared_state do
- it 'returns 400 with an error message' do
- stub_exclusive_lease_taken(lease_key, timeout: 1.hour)
+ it 'returns a list of tags' do
subject
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(response.body).to include('This request has already been made.')
+ expect(json_response.length).to eq(2)
+ expect(json_response.map { |repository| repository['name'] }).to eq %w(latest rootA)
end
- it 'executes service only for the first time' do
- expect(CleanupContainerRepositoryWorker).to receive(:perform_async).once
+ it 'returns a matching schema' do
+ subject
- 2.times { subject }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('registry/tags')
end
end
end
+ end
- context 'with deprecated name_regex param' do
- let(:params) do
- { name_regex: 'v10.*',
- name_regex_keep: 'v10.1.*',
- keep_n: 100,
- older_than: '1 day',
- other: 'some value' }
- end
-
- let(:worker_params) do
- { name_regex: 'v10.*',
- name_regex_delete: nil,
- name_regex_keep: 'v10.1.*',
- keep_n: 100,
- older_than: '1 day',
- container_expiration_policy: false }
- end
+ include_examples 'rejected job token scopes'
+ end
- let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
+ describe 'DELETE /projects/:id/registry/repositories/:repository_id/tags' do
+ let(:method) { :delete }
+ let(:url) { "/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags" }
- it 'schedules cleanup of tags repository' do
- stub_last_activity_update
- stub_exclusive_lease(lease_key, timeout: 1.hour)
- expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
- .with(maintainer.id, root_repository.id, worker_params)
+ ['using API user', 'using job token'].each do |context|
+ context context do
+ include_context context
- subject
+ context 'disallowed' do
+ let(:params) do
+ { name_regex_delete: 'v10.*' }
+ end
- expect(response).to have_gitlab_http_status(:accepted)
+ it_behaves_like 'rejected container repository access', :developer, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'a package tracking event', described_class.name, 'delete_tag_bulk'
end
- end
- context 'with invalid regex' do
- let(:invalid_regex) { '*v10.' }
- let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
+ context 'for maintainer' do
+ let(:api_user) { maintainer }
- RSpec.shared_examples 'rejecting the invalid regex' do |param_name|
- it 'does not enqueue a job' do
- expect(CleanupContainerRepositoryWorker).not_to receive(:perform_async)
+ context 'without required parameters' do
+ it 'returns bad request' do
+ subject
- subject
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
- it_behaves_like 'returning response status', :bad_request
+ context 'without name_regex' do
+ let(:params) do
+ { keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
- it 'returns an error message' do
- subject
+ it 'returns bad request' do
+ subject
- expect(json_response['error']).to include("#{param_name} is an invalid regexp")
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
- end
- before do
- stub_last_activity_update
- stub_exclusive_lease(lease_key, timeout: 1.hour)
- end
+ context 'passes all declared parameters' do
+ let(:params) do
+ { name_regex_delete: 'v10.*',
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
+
+ let(:worker_params) do
+ { name_regex: nil,
+ name_regex_delete: 'v10.*',
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ container_expiration_policy: false }
+ end
+
+ let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
+
+ it 'schedules cleanup of tags repository' do
+ stub_last_activity_update
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
+ .with(maintainer.id, root_repository.id, worker_params)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+
+ context 'called multiple times in one hour', :clean_gitlab_redis_shared_state do
+ it 'returns 400 with an error message' do
+ stub_exclusive_lease_taken(lease_key, timeout: 1.hour)
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include('This request has already been made.')
+ end
+
+ it 'executes service only for the first time' do
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async).once
+
+ 2.times { subject }
+ end
+ end
+ end
+
+ context 'with deprecated name_regex param' do
+ let(:params) do
+ { name_regex: 'v10.*',
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ other: 'some value' }
+ end
+
+ let(:worker_params) do
+ { name_regex: 'v10.*',
+ name_regex_delete: nil,
+ name_regex_keep: 'v10.1.*',
+ keep_n: 100,
+ older_than: '1 day',
+ container_expiration_policy: false }
+ end
+
+ it 'schedules cleanup of tags repository' do
+ stub_last_activity_update
+ expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
+ .with(maintainer.id, root_repository.id, worker_params)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+
+ context 'with invalid regex' do
+ let(:invalid_regex) { '*v10.' }
+
+ RSpec.shared_examples 'rejecting the invalid regex' do |param_name|
+ it 'does not enqueue a job' do
+ expect(CleanupContainerRepositoryWorker).not_to receive(:perform_async)
+
+ subject
+ end
- %i[name_regex_delete name_regex name_regex_keep].each do |param_name|
- context "for #{param_name}" do
- let(:params) { { param_name => invalid_regex } }
+ it_behaves_like 'returning response status', :bad_request
- it_behaves_like 'rejecting the invalid regex', param_name
+ it 'returns an error message' do
+ subject
+
+ expect(json_response['error']).to include("#{param_name} is an invalid regexp")
+ end
+ end
+
+ before do
+ stub_last_activity_update
+ end
+
+ %i[name_regex_delete name_regex name_regex_keep].each do |param_name|
+ context "for #{param_name}" do
+ let(:params) { { param_name => invalid_regex } }
+
+ it_behaves_like 'rejecting the invalid regex', param_name
+ end
+ end
end
end
end
end
+
+ include_examples 'rejected job token scopes'
end
describe 'GET /projects/:id/registry/repositories/:repository_id/tags/:tag_name' do
- subject { get api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags/rootA", api_user) }
+ let(:url) { "/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags/rootA" }
- it_behaves_like 'rejected container repository access', :guest, :forbidden
- it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ ['using API user', 'using job token'].each do |context|
+ context context do
+ include_context context
- context 'for reporter' do
- let(:api_user) { reporter }
+ it_behaves_like 'rejected container repository access', :guest, :forbidden unless context == 'using job token'
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
- before do
- stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA), with_manifest: true)
- end
+ context 'for reporter' do
+ let(:api_user) { reporter }
- it 'returns a details of tag' do
- subject
+ before do
+ stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA), with_manifest: true)
+ end
- expect(json_response).to include(
- 'name' => 'rootA',
- 'digest' => 'sha256:4c8e63ca4cb663ce6c688cb06f1c372b088dac5b6d7ad7d49cd620d85cf72a15',
- 'revision' => 'd7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac',
- 'total_size' => 2319870)
- end
+ it 'returns a details of tag' do
+ subject
+
+ expect(json_response).to include(
+ 'name' => 'rootA',
+ 'digest' => 'sha256:4c8e63ca4cb663ce6c688cb06f1c372b088dac5b6d7ad7d49cd620d85cf72a15',
+ 'revision' => 'd7a513a663c1a6dcdba9ed832ca53c02ac2af0c333322cd6ca92936d1d9917ac',
+ 'total_size' => 2319870)
+ end
- it 'returns a matching schema' do
- subject
+ it 'returns a matching schema' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('registry/tag')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('registry/tag')
+ end
+ end
end
end
+
+ include_examples 'rejected job token scopes'
end
describe 'DELETE /projects/:id/registry/repositories/:repository_id/tags/:tag_name' do
+ let(:method) { :delete }
+ let(:url) { "/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags/rootA" }
let(:service) { double('service') }
- subject { delete api("/projects/#{project.id}/registry/repositories/#{root_repository.id}/tags/rootA", api_user) }
+ ['using API user', 'using job token'].each do |context|
+ context context do
+ include_context context
- it_behaves_like 'rejected container repository access', :reporter, :forbidden
- it_behaves_like 'rejected container repository access', :anonymous, :not_found
+ it_behaves_like 'rejected container repository access', :reporter, :forbidden
+ it_behaves_like 'rejected container repository access', :anonymous, :not_found
- context 'for developer', :snowplow do
- let(:api_user) { developer }
+ context 'for developer', :snowplow do
+ let(:api_user) { developer }
- context 'when there are multiple tags' do
- before do
- stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA rootB), with_manifest: true)
- end
+ context 'when there are multiple tags' do
+ before do
+ stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA rootB), with_manifest: true)
+ end
- it 'properly removes tag' do
- expect(service).to receive(:execute).with(root_repository) { { status: :success } }
- expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(root_repository.project, api_user, tags: %w[rootA]) { service }
+ it 'properly removes tag' do
+ expect(service).to receive(:execute).with(root_repository) { { status: :success } }
+ expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(root_repository.project, api_user, tags: %w[rootA]) { service }
- subject
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect_snowplow_event(category: described_class.name, action: 'delete_tag')
- end
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect_snowplow_event(category: described_class.name, action: 'delete_tag')
+ end
+ end
- context 'when there\'s only one tag' do
- before do
- stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA), with_manifest: true)
- end
+ context 'when there\'s only one tag' do
+ before do
+ stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA), with_manifest: true)
+ end
- it 'properly removes tag' do
- expect(service).to receive(:execute).with(root_repository) { { status: :success } }
- expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(root_repository.project, api_user, tags: %w[rootA]) { service }
+ it 'properly removes tag' do
+ expect(service).to receive(:execute).with(root_repository) { { status: :success } }
+ expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(root_repository.project, api_user, tags: %w[rootA]) { service }
- subject
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect_snowplow_event(category: described_class.name, action: 'delete_tag')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect_snowplow_event(category: described_class.name, action: 'delete_tag')
+ end
+ end
end
end
end
+
+ include_examples 'rejected job token scopes'
end
end
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index f6cdf370e5c..d3b24eb3832 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::ProjectImport do
include WorkhorseHelpers
+ include AfterNextHelpers
include_context 'workhorse headers'
@@ -31,6 +32,12 @@ RSpec.describe API::ProjectImport do
allow(ImportExportUploader).to receive(:workhorse_upload_path).and_return('/')
end
+ it 'executes a limited number of queries' do
+ control_count = ActiveRecord::QueryRecorder.new { subject }.count
+
+ expect(control_count).to be <= 100
+ end
+
it 'schedules an import using a namespace' do
stub_import(namespace)
params[:namespace] = namespace.id
@@ -273,6 +280,75 @@ RSpec.describe API::ProjectImport do
end
end
+ describe 'POST /projects/remote-import' do
+ let(:params) do
+ {
+ path: 'test-import',
+ url: 'http://some.s3.url/file'
+ }
+ end
+
+ it 'returns NOT FOUND when the feature is disabled' do
+ stub_feature_flags(import_project_from_remote_file: false)
+
+ post api('/projects/remote-import', user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(import_project_from_remote_file: true)
+ end
+
+ context 'when the response is successful' do
+ it 'schedules the import successfully' do
+ project = create(
+ :project,
+ namespace: user.namespace,
+ name: 'test-import',
+ path: 'test-import'
+ )
+
+ service_response = ServiceResponse.success(payload: project)
+ expect_next(::Import::GitlabProjects::CreateProjectFromRemoteFileService)
+ .to receive(:execute)
+ .and_return(service_response)
+
+ post api('/projects/remote-import', user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include({
+ 'id' => project.id,
+ 'name' => 'test-import',
+ 'name_with_namespace' => "#{user.namespace.name} / test-import",
+ 'path' => 'test-import',
+ 'path_with_namespace' => "#{user.namespace.path}/test-import"
+ })
+ end
+ end
+
+ context 'when the service returns an error' do
+ it 'fails to schedule the import' do
+ service_response = ServiceResponse.error(
+ message: 'Failed to import',
+ http_status: :bad_request
+ )
+ expect_next(::Import::GitlabProjects::CreateProjectFromRemoteFileService)
+ .to receive(:execute)
+ .and_return(service_response)
+
+ post api('/projects/remote-import', user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq({
+ 'message' => 'Failed to import'
+ })
+ end
+ end
+ end
+ end
+
describe 'GET /projects/:id/import' do
it 'returns the import status' do
project = create(:project, :import_started)
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index 97414b3b18a..fb1aa65c08d 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -37,6 +37,16 @@ RSpec.describe API::ProjectPackages do
end
end
+ context 'with terraform module package' do
+ let_it_be(:terraform_module_package) { create(:terraform_module_package, project: project) }
+
+ it 'filters out terraform module packages when no package_type filter is set' do
+ subject
+
+ expect(json_response).not_to include(a_hash_including('package_type' => 'terraform_module'))
+ end
+ end
+
context 'project is private' do
let(:project) { create(:project, :private) }
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index a424bc62014..070fd6db3dc 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -53,15 +53,6 @@ RSpec.describe API::ProjectTemplates do
expect(json_response).to satisfy_one { |template| template['key'] == 'Android' }
end
- it 'returns gitlab_ci_syntax_ymls' do
- get api("/projects/#{public_project.id}/templates/gitlab_ci_syntax_ymls")
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(response).to match_response_schema('public_api/v4/template_list')
- expect(json_response).to satisfy_one { |template| template['key'] == 'Artifacts example' }
- end
-
it 'returns licenses' do
get api("/projects/#{public_project.id}/templates/licenses")
@@ -172,14 +163,6 @@ RSpec.describe API::ProjectTemplates do
expect(json_response['name']).to eq('Android')
end
- it 'returns a specific gitlab_ci_syntax_yml' do
- get api("/projects/#{public_project.id}/templates/gitlab_ci_syntax_ymls/Artifacts%20example")
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/template')
- expect(json_response['name']).to eq('Artifacts example')
- end
-
it 'returns a specific metrics_dashboard_yml' do
get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls/Default")
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index b0ecb711283..7f804186bc7 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -223,6 +223,52 @@ RSpec.describe API::Projects do
expect(json_response.find { |hash| hash['id'] == project.id }.keys).not_to include('open_issues_count')
end
+ context 'filter by topic (column tag_list)' do
+ before do
+ project.update!(tag_list: %w(ruby javascript))
+ end
+
+ it 'returns no projects' do
+ get api('/projects', user), params: { topic: 'foo' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_empty
+ end
+
+ it 'returns matching project for a single topic' do
+ get api('/projects', user), params: { topic: 'ruby' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to contain_exactly a_hash_including('id' => project.id)
+ end
+
+ it 'returns matching project for multiple topics' do
+ get api('/projects', user), params: { topic: 'ruby, javascript' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to contain_exactly a_hash_including('id' => project.id)
+ end
+
+ it 'returns no projects if project match only some topic' do
+ get api('/projects', user), params: { topic: 'ruby, foo' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_empty
+ end
+
+ it 'ignores topic if it is empty' do
+ get api('/projects', user), params: { topic: '' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_present
+ end
+ end
+
context 'and with_issues_enabled=true' do
it 'only returns projects with issues enabled' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
@@ -302,22 +348,11 @@ RSpec.describe API::Projects do
context 'and with simple=true' do
it 'returns a simplified version of all the projects' do
- expected_keys = %w(
- id description default_branch tag_list
- ssh_url_to_repo http_url_to_repo web_url readme_url
- name name_with_namespace
- path path_with_namespace
- star_count forks_count
- created_at last_activity_at
- avatar_url namespace
- )
-
get api('/projects?simple=true', user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.first.keys).to match_array expected_keys
+ expect(response).to match_response_schema('public_api/v4/projects')
end
end
@@ -1300,6 +1335,7 @@ RSpec.describe API::Projects do
describe 'GET /users/:user_id/starred_projects/' do
before do
user3.update!(starred_projects: [project, project2, project3])
+ user3.reload
end
it 'returns error when user not found' do
@@ -1588,7 +1624,6 @@ RSpec.describe API::Projects do
end
it "does not leave the temporary file in place after uploading, even when the tempfile reaper does not run" do
- stub_env('GITLAB_TEMPFILE_IMMEDIATE_UNLINK', '1')
tempfile = Tempfile.new('foo')
path = tempfile.path
@@ -1648,7 +1683,7 @@ RSpec.describe API::Projects do
let_it_be(:root_group) { create(:group, :public, name: 'root group') }
let_it_be(:project_group) { create(:group, :public, parent: root_group, name: 'project group') }
let_it_be(:shared_group_with_dev_access) { create(:group, :private, parent: root_group, name: 'shared group') }
- let_it_be(:shared_group_with_reporter_access) { create(:group, :private) }
+ let_it_be(:shared_group_with_reporter_access) { create(:group, :public) }
let_it_be(:private_project) { create(:project, :private, group: project_group) }
let_it_be(:public_project) { create(:project, :public, group: project_group) }
@@ -1730,6 +1765,14 @@ RSpec.describe API::Projects do
end
end
+ context 'when shared_visible_only is on' do
+ let(:params) { super().merge(shared_visible_only: true) }
+
+ it_behaves_like 'successful groups response' do
+ let(:expected_groups) { [root_group, project_group, shared_group_with_reporter_access] }
+ end
+ end
+
context 'when search by shared group name' do
let(:params) { super().merge(search: 'shared') }
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 70de2e5330b..81ddcd7cf84 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe API::Releases do
project.add_developer(developer)
end
- describe 'GET /projects/:id/releases' do
+ describe 'GET /projects/:id/releases', :use_clean_rails_redis_caching do
context 'when there are two releases' do
let!(:release_1) do
create(:release,
@@ -129,19 +129,60 @@ RSpec.describe API::Releases do
expect(json_response.first['upcoming_release']).to eq(false)
end
- it 'avoids N+1 queries' do
+ it 'avoids N+1 queries', :use_sql_query_cache do
create(:release, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
+ create(:release_link, release: project.releases.first)
- control_count = ActiveRecord::QueryRecorder.new do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/releases", maintainer)
end.count
- create(:release, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
- create(:release, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
+ create_list(:release, 2, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
+ create_list(:release, 2, project: project)
+ create_list(:release_link, 2, release: project.releases.first)
+ create_list(:release_link, 2, release: project.releases.last)
expect do
get api("/projects/#{project.id}/releases", maintainer)
- end.not_to exceed_query_limit(control_count)
+ end.not_to exceed_all_query_limit(control_count)
+ end
+
+ it 'serializes releases for the first time and read cached data from the second time' do
+ create_list(:release, 2, project: project)
+
+ expect(API::Entities::Release)
+ .to receive(:represent).with(instance_of(Release), any_args)
+ .twice
+
+ 5.times { get api("/projects/#{project.id}/releases", maintainer) }
+ end
+
+ it 'increments the cache key when link is updated' do
+ releases = create_list(:release, 2, project: project)
+
+ expect(API::Entities::Release)
+ .to receive(:represent).with(instance_of(Release), any_args)
+ .exactly(4).times
+
+ 2.times { get api("/projects/#{project.id}/releases", maintainer) }
+
+ releases.each { |release| create(:release_link, release: release) }
+
+ 3.times { get api("/projects/#{project.id}/releases", maintainer) }
+ end
+
+ it 'increments the cache key when evidence is updated' do
+ releases = create_list(:release, 2, project: project)
+
+ expect(API::Entities::Release)
+ .to receive(:represent).with(instance_of(Release), any_args)
+ .exactly(4).times
+
+ 2.times { get api("/projects/#{project.id}/releases", maintainer) }
+
+ releases.each { |release| create(:evidence, release: release) }
+
+ 3.times { get api("/projects/#{project.id}/releases", maintainer) }
end
context 'when tag does not exist in git repository' do
@@ -227,6 +268,20 @@ RSpec.describe API::Releases do
end
end
end
+
+ context 'when releases are public and request user is absent' do
+ let(:project) { create(:project, :repository, :public) }
+
+ it 'returns the releases' do
+ create(:release, project: project, tag: 'v0.1')
+
+ get api("/projects/#{project.id}/releases")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq(1)
+ expect(json_response.first['tag_name']).to eq('v0.1')
+ end
+ end
end
describe 'GET /projects/:id/releases/:tag_name' do
@@ -1133,8 +1188,33 @@ RSpec.describe API::Releases do
end
end
+ describe 'Track API events', :snowplow do
+ context 'when tracking event with labels from User-Agent' do
+ it 'adds the tracked User-Agent to the label of the tracked event' do
+ get api("/projects/#{project.id}/releases", maintainer), headers: { 'User-Agent' => described_class::RELEASE_CLI_USER_AGENT }
+
+ assert_snowplow_event('get_releases', true)
+ end
+
+ it 'skips label when User-Agent is invalid' do
+ get api("/projects/#{project.id}/releases", maintainer), headers: { 'User-Agent' => 'invalid_user_agent' }
+ assert_snowplow_event('get_releases', false)
+ end
+ end
+ end
+
def initialize_tags
project.repository.add_tag(maintainer, 'v0.1', commit.id)
project.repository.add_tag(maintainer, 'v0.2', commit.id)
end
+
+ def assert_snowplow_event(action, release_cli, user = maintainer)
+ expect_snowplow_event(
+ category: described_class.name,
+ action: action,
+ project: project,
+ user: user,
+ release_cli: release_cli
+ )
+ end
end
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 2157e69e7bf..1f859622760 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe API::Services do
end
end
- Service.available_services_names.each do |service|
+ Integration.available_services_names.each do |service|
describe "PUT /projects/:id/services/#{service.dasherize}" do
include_context service
@@ -51,7 +51,7 @@ RSpec.describe API::Services do
expect(response).to have_gitlab_http_status(:ok)
- current_service = project.services.first
+ current_service = project.integrations.first
events = current_service.event_names.empty? ? ["foo"].freeze : current_service.event_names
query_strings = []
events.each do |event|
@@ -66,7 +66,7 @@ RSpec.describe API::Services do
events.each do |event|
next if event == "foo"
- expect(project.services.first[event]).not_to eq(current_service[event]),
+ expect(project.integrations.first[event]).not_to eq(current_service[event]),
"expected #{!current_service[event]} for event #{event} for service #{current_service.title}, got #{current_service[event]}"
end
end
@@ -114,21 +114,61 @@ RSpec.describe API::Services do
describe "GET /projects/:id/services/#{service.dasherize}" do
include_context service
- # inject some properties into the service
- let!(:initialized_service) { initialize_service(service) }
+ let!(:initialized_service) { initialize_service(service, active: true) }
+
+ let_it_be(:project2) do
+ create(:project, creator_id: user.id, namespace: user.namespace)
+ end
+
+ def deactive_service!
+ return initialized_service.update!(active: false) unless initialized_service.is_a?(PrometheusService)
+
+ # PrometheusService sets `#active` itself within a `before_save`:
+ initialized_service.manual_configuration = false
+ initialized_service.save!
+ end
it 'returns authentication error when unauthenticated' do
get api("/projects/#{project.id}/services/#{dashed_service}")
expect(response).to have_gitlab_http_status(:unauthorized)
end
- it "returns all properties of service #{service}" do
+ it "returns all properties of active service #{service}" do
get api("/projects/#{project.id}/services/#{dashed_service}", user)
+ expect(initialized_service).to be_active
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
end
+ it "returns all properties of inactive service #{service}" do
+ deactive_service!
+
+ get api("/projects/#{project.id}/services/#{dashed_service}", user)
+
+ expect(initialized_service).not_to be_active
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties'].keys).to match_array(service_instance.api_field_names)
+ end
+
+ it "returns not found if service does not exist" do
+ get api("/projects/#{project2.id}/services/#{dashed_service}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Service Not Found')
+ end
+
+ it "returns not found if service exists but is in `Project#disabled_services`" do
+ expect_next_found_instance_of(Project) do |project|
+ expect(project).to receive(:disabled_services).at_least(:once).and_return([service])
+ end
+
+ get api("/projects/#{project.id}/services/#{dashed_service}", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Service Not Found')
+ end
+
it "returns error when authenticated but not a project owner" do
project.add_developer(user2)
get api("/projects/#{project.id}/services/#{dashed_service}", user2)
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 48f5bd114a1..66c0dcaa36c 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -41,10 +41,12 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['snippet_size_limit']).to eq(50.megabytes)
expect(json_response['spam_check_endpoint_enabled']).to be_falsey
expect(json_response['spam_check_endpoint_url']).to be_nil
+ expect(json_response['spam_check_api_key']).to be_nil
expect(json_response['wiki_page_max_content_bytes']).to be_a(Integer)
expect(json_response['require_admin_approval_after_user_signup']).to eq(true)
expect(json_response['personal_access_token_prefix']).to be_nil
expect(json_response['admin_mode']).to be(false)
+ expect(json_response['whats_new_variant']).to eq('all_tiers')
end
end
@@ -121,7 +123,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
issues_create_limit: 300,
raw_blob_request_limit: 300,
spam_check_endpoint_enabled: true,
- spam_check_endpoint_url: 'https://example.com/spam_check',
+ spam_check_endpoint_url: 'grpc://example.com/spam_check',
+ spam_check_api_key: 'SPAM_CHECK_API_KEY',
disabled_oauth_sign_in_sources: 'unknown',
import_sources: 'github,bitbucket',
wiki_page_max_content_bytes: 12345,
@@ -166,7 +169,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['issues_create_limit']).to eq(300)
expect(json_response['raw_blob_request_limit']).to eq(300)
expect(json_response['spam_check_endpoint_enabled']).to be_truthy
- expect(json_response['spam_check_endpoint_url']).to eq('https://example.com/spam_check')
+ expect(json_response['spam_check_endpoint_url']).to eq('grpc://example.com/spam_check')
+ expect(json_response['spam_check_api_key']).to eq('SPAM_CHECK_API_KEY')
expect(json_response['disabled_oauth_sign_in_sources']).to eq([])
expect(json_response['import_sources']).to match_array(%w(github bitbucket))
expect(json_response['wiki_page_max_content_bytes']).to eq(12345)
@@ -459,13 +463,32 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
context "missing spam_check_endpoint_url value when spam_check_endpoint_enabled is true" do
it "returns a blank parameter error message" do
- put api("/application/settings", admin), params: { spam_check_endpoint_enabled: true }
+ put api("/application/settings", admin), params: { spam_check_endpoint_enabled: true, spam_check_api_key: "SPAM_CHECK_API_KEY" }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error']).to eq('spam_check_endpoint_url is missing')
end
end
+ context "missing spam_check_api_key value when spam_check_endpoint_enabled is true" do
+ it "returns a blank parameter error message" do
+ put api("/application/settings", admin), params: { spam_check_endpoint_enabled: true, spam_check_endpoint_url: "https://example.com/spam_check" }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('spam_check_api_key is missing')
+ end
+ end
+
+ context "overly long spam_check_api_key" do
+ it "fails to update the settings with too long spam_check_api_key" do
+ put api("/application/settings", admin), params: { spam_check_api_key: "0123456789" * 500 }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ message = json_response["message"]
+ expect(message["spam_check_api_key"]).to include(a_string_matching("is too long"))
+ end
+ end
+
context "personal access token prefix settings" do
context "handles validation errors" do
it "fails to update the settings with too long prefix" do
@@ -485,5 +508,32 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
end
end
end
+
+ context 'whats_new_variant setting' do
+ before do
+ Gitlab::CurrentSettings.current_application_settings.whats_new_variant_disabled!
+ end
+
+ it 'updates setting' do
+ new_value = 'all_tiers'
+ put api("/application/settings", admin),
+ params: {
+ whats_new_variant: new_value
+ }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['whats_new_variant']).to eq(new_value)
+ end
+
+ it 'fails to update setting with invalid value' do
+ put api("/application/settings", admin),
+ params: {
+ whats_new_variant: 'invalid_value'
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('whats_new_variant does not have a valid value')
+ end
+ end
end
end
diff --git a/spec/requests/api/terraform/modules/v1/packages_spec.rb b/spec/requests/api/terraform/modules/v1/packages_spec.rb
new file mode 100644
index 00000000000..d318b22cf27
--- /dev/null
+++ b/spec/requests/api/terraform/modules/v1/packages_spec.rb
@@ -0,0 +1,360 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Terraform::Modules::V1::Packages do
+ include PackagesManagerApiSpecHelpers
+ include WorkhorseHelpers
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:project) { create(:project, namespace: group) }
+ let_it_be(:package) { create(:terraform_module_package, project: project) }
+ let_it_be(:personal_access_token) { create(:personal_access_token) }
+ let_it_be(:user) { personal_access_token.user }
+ let_it_be(:job) { create(:ci_build, :running, user: user) }
+ let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:project_deploy_token) { create(:project_deploy_token, deploy_token: deploy_token, project: project) }
+
+ let(:headers) { {} }
+
+ let(:tokens) do
+ {
+ personal_access_token: personal_access_token.token,
+ deploy_token: deploy_token.token,
+ job_token: job.token
+ }
+ end
+
+ describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/versions' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/versions") }
+ let(:headers) { {} }
+
+ subject { get(url, headers: headers) }
+
+ context 'with valid namespace' do
+ where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | true | 'returns terraform module packages' | :success
+ :public | :guest | true | :personal_access_token | true | 'returns terraform module packages' | :success
+ :public | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | true | 'returns no terraform module packages' | :success
+ :public | :guest | false | :personal_access_token | true | 'returns no terraform module packages' | :success
+ :public | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | :personal_access_token | true | 'returns no terraform module packages' | :success
+ :private | :developer | true | :personal_access_token | true | 'returns terraform module packages' | :success
+ :private | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | true | 'returns terraform module packages' | :success
+ :public | :guest | true | :job_token | true | 'returns no terraform module packages' | :success
+ :public | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | true | 'returns no terraform module packages' | :success
+ :public | :guest | false | :job_token | true | 'returns no terraform module packages' | :success
+ :public | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | true | 'returns terraform module packages' | :success
+ :private | :guest | true | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
+
+ before do
+ group.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/:module_version/download' do
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/download") }
+ let(:headers) { {} }
+
+ subject { get(url, headers: headers) }
+
+ context 'with valid namespace' do
+ where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | true | 'grants terraform module download' | :success
+ :public | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :developer | true | :personal_access_token | true | 'grants terraform module download' | :success
+ :private | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | :job_token | true | 'grants terraform module download' | :success
+ :public | :guest | true | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :guest | false | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | true | 'grants terraform module download' | :success
+ :private | :guest | true | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:headers) { user_role == :anonymous ? {} : { 'Authorization' => "Bearer #{token}" } }
+
+ before do
+ group.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'GET /api/v4/packages/terraform/modules/v1/:module_namespace/:module_name/:module_system/:module_version/file' do
+ let(:tokens) do
+ {
+ personal_access_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = personal_access_token.id }.encoded,
+ job_token: ::Gitlab::JWTToken.new.tap { |jwt| jwt['token'] = job.token }.encoded
+ }
+ end
+
+ subject { get(url, headers: headers) }
+
+ context 'with valid namespace' do
+ where(:visibility, :user_role, :member, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | :personal_access_token | true | 'grants terraform module package file access' | :success
+ :public | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :developer | true | :personal_access_token | true | 'grants terraform module package file access' | :success
+ :private | :guest | true | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | :job_token | true | 'grants terraform module package file access' | :success
+ :public | :guest | true | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :guest | false | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :public | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | :job_token | true | 'grants terraform module package file access' | :success
+ :private | :guest | true | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :guest | false | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:url) { api("/packages/terraform/modules/v1/#{group.path}/#{package.name}/#{package.version}/file?token=#{token}") }
+
+ before do
+ group.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file/authorize' do
+ include_context 'workhorse headers'
+
+ let(:url) { api("/projects/#{project.id}/packages/terraform/modules/mymodule/mysystem/1.0.0/file/authorize") }
+ let(:headers) { {} }
+
+ subject { put(url, headers: headers) }
+
+ context 'with valid project' do
+ where(:visibility, :user_role, :member, :token_header, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module workhorse authorization' | :success
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module workhorse authorization' | :success
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module workhorse authorization' | :success
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module workhorse authorization' | :success
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module workhorse authorization' | :success
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module workhorse authorization' | :success
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+ let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:project_id/packages/terraform/modules/:module_name/:module_system/:module_version/file' do
+ include_context 'workhorse headers'
+
+ let_it_be(:file_name) { 'module-system-v1.0.0.tgz' }
+
+ let(:url) { "/projects/#{project.id}/packages/terraform/modules/mymodule/mysystem/1.0.0/file" }
+ let(:headers) { {} }
+ let(:params) { { file: temp_file(file_name) } }
+ let(:file_key) { :file }
+ let(:send_rewritten_field) { true }
+
+ subject do
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: file_key,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+
+ context 'with valid project' do
+ where(:visibility, :user_role, :member, :token_header, :token_type, :valid_token, :shared_examples_name, :expected_status) do
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module upload' | :created
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'process terraform module upload' | :created
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'PRIVATE-TOKEN' | :personal_access_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :anonymous | false | 'PRIVATE-TOKEN' | :personal_access_token | true | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module upload' | :created
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :public | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | true | 'process terraform module upload' | :created
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :forbidden
+ :private | :developer | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | true | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | true | 'rejects terraform module packages access' | :not_found
+ :private | :developer | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :guest | false | 'JOB-TOKEN' | :job_token | false | 'rejects terraform module packages access' | :unauthorized
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module upload' | :created
+ :public | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | true | 'process terraform module upload' | :created
+ :private | :developer | true | 'DEPLOY-TOKEN' | :deploy_token | false | 'rejects terraform module packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:user_headers) { user_role == :anonymous ? {} : { token_header => token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ before do
+ project.update!(visibility: visibility.to_s)
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
+
+ context 'failed package file save' do
+ let(:user_headers) { { 'PRIVATE-TOKEN' => personal_access_token.token } }
+ let(:headers) { user_headers.merge(workhorse_headers) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does not create package record', :aggregate_failures do
+ allow(Packages::CreatePackageFileService).to receive(:new).and_raise(StandardError)
+
+ expect { subject }
+ .to change { project.packages.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ expect(response).to have_gitlab_http_status(:error)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 01a24be9f20..71fdd986f20 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1449,6 +1449,48 @@ RSpec.describe API::Users do
end
end
+ describe "PUT /user/:id/credit_card_validation" do
+ let(:credit_card_validated_time) { Time.utc(2020, 1, 1) }
+
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ put api("/user/#{user.id}/credit_card_validation"), params: { credit_card_validated_at: credit_card_validated_time }
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when authenticated as non-admin' do
+ it "does not allow updating user's credit card validation", :aggregate_failures do
+ put api("/user/#{user.id}/credit_card_validation", user), params: { credit_card_validated_at: credit_card_validated_time }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when authenticated as admin' do
+ it "updates user's credit card validation", :aggregate_failures do
+ put api("/user/#{user.id}/credit_card_validation", admin), params: { credit_card_validated_at: credit_card_validated_time }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(user.reload.credit_card_validated_at).to eq(credit_card_validated_time)
+ end
+
+ it "returns 400 error if credit_card_validated_at is missing" do
+ put api("/user/#{user.id}/credit_card_validation", admin), params: {}
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns 404 error if user not found' do
+ put api("/user/#{non_existing_record_id}/credit_card_validation", admin), params: { credit_card_validated_at: credit_card_validated_time }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
+ end
+ end
+ end
+
describe "DELETE /users/:id/identities/:provider" do
let(:test_user) { create(:omniauth_user, provider: 'ldapmain') }
diff --git a/spec/requests/groups/autocomplete_sources_spec.rb b/spec/requests/groups/autocomplete_sources_spec.rb
new file mode 100644
index 00000000000..d053e0fe773
--- /dev/null
+++ b/spec/requests/groups/autocomplete_sources_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups autocomplete' do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:group) { create(:group, :private) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ describe '#issues' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ let(:none) { [] }
+ let(:all) { [issue, incident] }
+
+ where(:issue_types, :expected) do
+ nil | :all
+ '' | :all
+ 'invalid' | :none
+ 'issue' | :issue
+ 'incident' | :incident
+ end
+
+ with_them do
+ it 'returns the correct response', :aggregate_failures do
+ issues = Array(expected).flat_map { |sym| public_send(sym) }
+
+ get issues_group_autocomplete_sources_path(group, issue_types: issue_types)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an(Array)
+ expect(json_response.size).to eq(issues.size)
+ expect(json_response.map { |issue| issue['iid'] })
+ .to match_array(issues.map(&:iid))
+ end
+ end
+ end
+
+ describe '#milestones' do
+ it 'returns correct response' do
+ parent_group = create(:group, :private)
+ group.update!(parent: parent_group)
+ sub_group = create(:group, :private, parent: sub_group)
+ create(:milestone, group: parent_group)
+ create(:milestone, group: sub_group)
+ group_milestone = create(:milestone, group: group)
+
+ get milestones_group_autocomplete_sources_path(group)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq(1)
+ expect(json_response.first).to include(
+ 'iid' => group_milestone.iid, 'title' => group_milestone.title
+ )
+ end
+ end
+end
diff --git a/spec/requests/groups/email_campaigns_controller_spec.rb b/spec/requests/groups/email_campaigns_controller_spec.rb
index a77f600ea1e..48297ec4cb6 100644
--- a/spec/requests/groups/email_campaigns_controller_spec.rb
+++ b/spec/requests/groups/email_campaigns_controller_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe Groups::EmailCampaignsController do
- include InProductMarketingHelper
using RSpec::Parameterized::TableSyntax
describe 'GET #index', :snowplow do
@@ -13,7 +12,7 @@ RSpec.describe Groups::EmailCampaignsController do
let(:track) { 'create' }
let(:series) { '0' }
let(:schema) { described_class::EMAIL_CAMPAIGNS_SCHEMA_URL }
- let(:subject_line_text) { subject_line(track.to_sym, series.to_i) }
+ let(:subject_line_text) { Gitlab::Email::Message::InProductMarketing.for(track.to_sym).new(group: group, series: series.to_i).subject_line }
let(:data) do
{
namespace_id: group.id,
diff --git a/spec/requests/invite_registration_spec.rb b/spec/requests/invite_registration_spec.rb
new file mode 100644
index 00000000000..167cf4b1de7
--- /dev/null
+++ b/spec/requests/invite_registration_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Registering from an invite' do
+ let(:com) { true }
+
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(com)
+ end
+
+ describe 'GET /users/sign_up/invites/new' do
+ subject(:request) { get '/users/sign_up/invites/new' }
+
+ context 'when on .com' do
+ it 'renders the template with expected text', :aggregate_failures do
+ request
+
+ expect(response).to render_template('layouts/simple_registration')
+ expect(response).to render_template(:new)
+ expect(response.body).to include('Join your team')
+ end
+ end
+
+ context 'when not on .com' do
+ let(:com) { false }
+
+ it 'returns not found' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'POST /users/sign_up/invites' do
+ subject(:request) do
+ post '/users/sign_up/invites',
+ params: {
+ user: {
+ first_name: 'first',
+ last_name: 'last',
+ username: 'new_username',
+ email: 'new@user.com',
+ password: 'Any_password'
+ }
+ }
+ end
+
+ context 'when on .com' do
+ it 'creates a user' do
+ expect { request }.to change(User, :count).by(1)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
+ context 'when not on .com' do
+ let(:com) { false }
+
+ it 'returns not found' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb b/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
index 5d2f3e98bb4..7d5eb1c9685 100644
--- a/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
+++ b/spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb
@@ -24,20 +24,6 @@ RSpec.describe 'Projects::Ci::PrometheusMetrics::HistogramsController' do
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- context 'with the feature flag disabled' do
- before do
- stub_feature_flags(ci_accept_frontend_prometheus_metrics: false)
- end
-
- it 'returns 202 Accepted' do
- post histograms_route(histograms: [
- { name: :pipeline_graph_link_calculation_duration_seconds, value: 1 }
- ])
-
- expect(response).to have_gitlab_http_status(:accepted)
- end
- end
end
def histograms_route(params = {})
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 972caec6eb3..f24f815e9c6 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -18,7 +18,11 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
throttle_authenticated_web_requests_per_period: 100,
throttle_authenticated_web_period_in_seconds: 1,
throttle_authenticated_protected_paths_request_per_period: 100,
- throttle_authenticated_protected_paths_in_seconds: 1
+ throttle_authenticated_protected_paths_in_seconds: 1,
+ throttle_unauthenticated_packages_api_requests_per_period: 100,
+ throttle_unauthenticated_packages_api_period_in_seconds: 1,
+ throttle_authenticated_packages_api_requests_per_period: 100,
+ throttle_authenticated_packages_api_period_in_seconds: 1
}
end
@@ -435,6 +439,186 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
end
end
+ describe 'Packages API' do
+ let(:request_method) { 'GET' }
+
+ context 'unauthenticated' do
+ let_it_be(:project) { create(:project, :public) }
+
+ let(:throttle_setting_prefix) { 'throttle_unauthenticated_packages_api' }
+ let(:packages_path_that_does_not_require_authentication) { "/api/v4/projects/#{project.id}/packages/conan/v1/ping" }
+
+ def do_request
+ get packages_path_that_does_not_require_authentication
+ end
+
+ before do
+ settings_to_set[:throttle_unauthenticated_packages_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_unauthenticated_packages_api_period_in_seconds] = period_in_seconds
+ end
+
+ context 'when unauthenticated packages api throttle is disabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_packages_api_enabled] = false
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'allows requests over the rate limit' do
+ (1 + requests_per_period).times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when unauthenticated api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_unauthenticated_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the unauthenticated api rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+ end
+
+ context 'when unauthenticated packages api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_unauthenticated_packages_api_requests_per_period] = requests_per_period # 1
+ settings_to_set[:throttle_unauthenticated_packages_api_period_in_seconds] = period_in_seconds # 10_000
+ settings_to_set[:throttle_unauthenticated_packages_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+
+ context 'when unauthenticated api throttle is lower' do
+ before do
+ settings_to_set[:throttle_unauthenticated_requests_per_period] = 0
+ settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_unauthenticated_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'ignores unauthenticated api throttle' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+
+ it_behaves_like 'tracking when dry-run mode is set' do
+ let(:throttle_name) { 'throttle_unauthenticated_packages_api' }
+ end
+ end
+ end
+
+ context 'authenticated', :api do
+ let_it_be(:project) { create(:project, :internal) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+ let_it_be(:other_user) { create(:user) }
+ let_it_be(:other_user_token) { create(:personal_access_token, user: other_user) }
+
+ let(:throttle_setting_prefix) { 'throttle_authenticated_packages_api' }
+ let(:api_partial_url) { "/projects/#{project.id}/packages/conan/v1/ping" }
+
+ before do
+ stub_application_setting(settings_to_set)
+ end
+
+ context 'with the token in the query string' do
+ let(:request_args) { [api(api_partial_url, personal_access_token: token), {}] }
+ let(:other_user_request_args) { [api(api_partial_url, personal_access_token: other_user_token), {}] }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'with the token in the headers' do
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, personal_access_token_headers(other_user_token)) }
+
+ it_behaves_like 'rate-limited token-authenticated requests'
+ end
+
+ context 'precedence over authenticated api throttle' do
+ before do
+ settings_to_set[:throttle_authenticated_packages_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_authenticated_packages_api_period_in_seconds] = period_in_seconds
+ end
+
+ def do_request
+ get api(api_partial_url, personal_access_token: token)
+ end
+
+ context 'when authenticated packages api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_authenticated_packages_api_enabled] = true
+ end
+
+ context 'when authenticated api throttle is lower' do
+ before do
+ settings_to_set[:throttle_authenticated_api_requests_per_period] = 0
+ settings_to_set[:throttle_authenticated_api_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_authenticated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'ignores authenticated api throttle' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+ end
+
+ context 'when authenticated packages api throttle is disabled' do
+ before do
+ settings_to_set[:throttle_authenticated_packages_api_enabled] = false
+ end
+
+ context 'when authenticated api throttle is enabled' do
+ before do
+ settings_to_set[:throttle_authenticated_api_requests_per_period] = requests_per_period
+ settings_to_set[:throttle_authenticated_api_period_in_seconds] = period_in_seconds
+ settings_to_set[:throttle_authenticated_api_enabled] = true
+ stub_application_setting(settings_to_set)
+ end
+
+ it 'rejects requests over the authenticated api rate limit' do
+ requests_per_period.times do
+ do_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ expect_rejection { do_request }
+ end
+ end
+ end
+ end
+ end
+ end
+
describe 'throttle bypass header' do
let(:headers) { {} }
let(:bypass_header) { 'gitlab-bypass-rate-limiting' }
diff --git a/spec/requests/terraform/services_controller_spec.rb b/spec/requests/terraform/services_controller_spec.rb
new file mode 100644
index 00000000000..54f7348513e
--- /dev/null
+++ b/spec/requests/terraform/services_controller_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::ServicesController do
+ describe 'GET /.well-known/terraform.json' do
+ subject { get '/.well-known/terraform.json' }
+
+ it 'responds with terraform service discovery' do
+ subject
+
+ expect(json_response['modules.v1']).to eq("/api/#{::API::API.version}/packages/terraform/modules/v1/")
+ end
+ end
+end
diff --git a/spec/requests/whats_new_controller_spec.rb b/spec/requests/whats_new_controller_spec.rb
index ffb31bdf9bb..d4976a2bba3 100644
--- a/spec/requests/whats_new_controller_spec.rb
+++ b/spec/requests/whats_new_controller_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe WhatsNewController, :clean_gitlab_redis_cache do
ReleaseHighlight.instance_variable_set(:@file_paths, nil)
end
- describe 'whats_new_path' do
+ describe 'GET #index' do
let(:item) { double(:item) }
let(:highlights) { double(:highlight, items: [item], map: [item].map, next_page: 2) }
@@ -35,5 +35,17 @@ RSpec.describe WhatsNewController, :clean_gitlab_redis_cache do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'with whats_new_variant = disabled' do
+ before do
+ Gitlab::CurrentSettings.current_application_settings.whats_new_variant_disabled!
+ end
+
+ it 'returns a 404' do
+ get whats_new_path, xhr: true
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
end
diff --git a/spec/rubocop/cop/active_model_errors_direct_manipulation_spec.rb b/spec/rubocop/cop/active_model_errors_direct_manipulation_spec.rb
new file mode 100644
index 00000000000..37fcdb38907
--- /dev/null
+++ b/spec/rubocop/cop/active_model_errors_direct_manipulation_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../rubocop/cop/active_model_errors_direct_manipulation'
+
+RSpec.describe RuboCop::Cop::ActiveModelErrorsDirectManipulation do
+ subject(:cop) { described_class.new }
+
+ context 'when modifying errors' do
+ it 'registers an offense' do
+ expect_offense(<<~PATTERN)
+ user.errors[:name] << 'msg'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid manipulating errors hash directly. [...]
+ PATTERN
+ end
+
+ context 'when assigning' do
+ it 'registers an offense' do
+ expect_offense(<<~PATTERN)
+ user.errors[:name] = []
+ ^^^^^^^^^^^^^^^^^^^^^^^ Avoid manipulating errors hash directly. [...]
+ PATTERN
+ end
+ end
+ end
+
+ context 'when modifying errors.messages' do
+ it 'registers an offense' do
+ expect_offense(<<~PATTERN)
+ user.errors.messages[:name] << 'msg'
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid manipulating errors hash directly. [...]
+ PATTERN
+ end
+
+ context 'when assigning' do
+ it 'registers an offense' do
+ expect_offense(<<~PATTERN)
+ user.errors.messages[:name] = []
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid manipulating errors hash directly. [...]
+ PATTERN
+ end
+ end
+ end
+
+ context 'when modifying errors.details' do
+ it 'registers an offense' do
+ expect_offense(<<~PATTERN)
+ user.errors.details[:name] << {}
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid manipulating errors hash directly. [...]
+ PATTERN
+ end
+
+ context 'when assigning' do
+ it 'registers an offense' do
+ expect_offense(<<~PATTERN)
+ user.errors.details[:name] = []
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Avoid manipulating errors hash directly. [...]
+ PATTERN
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
index 8bfa57031d7..962efc23453 100644
--- a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
+++ b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
@@ -6,173 +6,77 @@ require_relative '../../../rubocop/cop/inject_enterprise_edition_module'
RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
subject(:cop) { described_class.new }
- it 'flags the use of `prepend_if_ee EE` in the middle of a file' do
+ it 'flags the use of `prepend_mod_with` in the middle of a file' do
expect_offense(<<~SOURCE)
class Foo
- prepend_if_ee 'EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
+ prepend_mod_with('Foo')
+ ^^^^^^^^^^^^^^^^^^^^^^^ Injecting extension modules must be done on the last line of this file, outside of any class or module definitions
end
SOURCE
end
- it 'flags the use of `prepend_if_ee QA::EE` in the middle of a file' do
+ it 'flags the use of `include_mod_with` in the middle of a file' do
expect_offense(<<~SOURCE)
class Foo
- prepend_if_ee 'QA::EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
+ include_mod_with('Foo')
+ ^^^^^^^^^^^^^^^^^^^^^^^ Injecting extension modules must be done on the last line of this file, outside of any class or module definitions
end
SOURCE
end
-
- it 'does not flag the use of `prepend_if_ee EEFoo` in the middle of a file' do
- expect_no_offenses(<<~SOURCE)
- class Foo
- prepend_if_ee 'EEFoo'
- end
- SOURCE
- end
-
- it 'flags the use of `prepend_if_ee EE::Foo::Bar` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- prepend_if_ee 'EE::Foo::Bar'
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `prepend_if_ee(EE::Foo::Bar)` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- prepend_if_ee('EE::Foo::Bar')
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `prepend_if_ee EE::Foo::Bar::Baz` in the middle of a file' do
+ it 'flags the use of `extend_mod_with` in the middle of a file' do
expect_offense(<<~SOURCE)
class Foo
- prepend_if_ee 'EE::Foo::Bar::Baz'
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `prepend_if_ee ::EE` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- prepend_if_ee '::EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `include_if_ee EE` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- include_if_ee 'EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `include_if_ee ::EE` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- include_if_ee '::EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `extend_if_ee EE` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- extend_if_ee 'EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'flags the use of `extend_if_ee ::EE` in the middle of a file' do
- expect_offense(<<~SOURCE)
- class Foo
- extend_if_ee '::EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- end
- SOURCE
- end
-
- it 'does not flag prepending of regular modules' do
- expect_no_offenses(<<~SOURCE)
- class Foo
- prepend_if_ee 'Foo'
- end
- SOURCE
- end
-
- it 'does not flag including of regular modules' do
- expect_no_offenses(<<~SOURCE)
- class Foo
- include_if_ee 'Foo'
- end
- SOURCE
- end
-
- it 'does not flag extending using regular modules' do
- expect_no_offenses(<<~SOURCE)
- class Foo
- extend_if_ee 'Foo'
+ extend_mod_with('Foo')
+ ^^^^^^^^^^^^^^^^^^^^^^ Injecting extension modules must be done on the last line of this file, outside of any class or module definitions
end
SOURCE
end
- it 'does not flag the use of `prepend_if_ee EE` on the last line' do
+ it 'does not flag the use of `prepend_mod_with` on the last line' do
expect_no_offenses(<<~SOURCE)
class Foo
end
- Foo.prepend_if_ee('EE::Foo')
+ Foo.prepend_mod_with('Foo')
SOURCE
end
- it 'does not flag the use of `include_if_ee EE` on the last line' do
+ it 'does not flag the use of `include_mod_with` on the last line' do
expect_no_offenses(<<~SOURCE)
class Foo
end
- Foo.include_if_ee('EE::Foo')
+ Foo.include_mod_with('Foo')
SOURCE
end
- it 'does not flag the use of `extend_if_ee EE` on the last line' do
+ it 'does not flag the use of `extend_mod_with` on the last line' do
expect_no_offenses(<<~SOURCE)
class Foo
end
- Foo.extend_if_ee('EE::Foo')
+ Foo.extend_mod_with('Foo')
SOURCE
end
- it 'does not flag the double use of `X_if_ee` on the last line' do
+ it 'does not flag the double use of `X_mod_with` on the last line' do
expect_no_offenses(<<~SOURCE)
class Foo
end
- Foo.extend_if_ee('EE::Foo')
- Foo.include_if_ee('EE::Foo')
- Foo.prepend_if_ee('EE::Foo')
+ Foo.extend_mod_with('Foo')
+ Foo.include_mod_with('Foo')
+ Foo.prepend_mod_with('Foo')
SOURCE
end
- it 'does not flag the use of `prepend_if_ee EE` as long as all injections are at the end of the file' do
+ it 'does not flag the use of `prepend_mod_with` as long as all injections are at the end of the file' do
expect_no_offenses(<<~SOURCE)
class Foo
end
- Foo.include_if_ee('EE::Foo')
- Foo.prepend_if_ee('EE::Foo')
+ Foo.include_mod_with('Foo')
+ Foo.prepend_mod_with('Foo')
Foo.include(Bar)
# comment on prepending Bar
@@ -183,27 +87,27 @@ RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
it 'autocorrects offenses by just disabling the Cop' do
expect_offense(<<~SOURCE)
class Foo
- prepend_if_ee 'EE::Foo'
- ^^^^^^^^^^^^^^^^^^^^^^^ Injecting EE modules must be done on the last line of this file, outside of any class or module definitions
- include_if_ee 'Bar'
+ prepend_mod_with('Foo')
+ ^^^^^^^^^^^^^^^^^^^^^^^ Injecting extension modules must be done on the last line of this file, outside of any class or module definitions
+ include Bar
end
SOURCE
expect_correction(<<~SOURCE)
class Foo
- prepend_if_ee 'EE::Foo' # rubocop: disable Cop/InjectEnterpriseEditionModule
- include_if_ee 'Bar'
+ prepend_mod_with('Foo') # rubocop: disable Cop/InjectEnterpriseEditionModule
+ include Bar
end
SOURCE
end
- it 'disallows the use of prepend to inject an EE module' do
+ it 'disallows the use of prepend to inject an extension module' do
expect_offense(<<~SOURCE)
class Foo
end
Foo.prepend(EE::Foo)
- ^^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_if_ee`, `extend_if_ee`, or `prepend_if_ee`
+ ^^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_mod_with`, `extend_mod_with`, or `prepend_mod_with`
SOURCE
end
@@ -213,7 +117,7 @@ RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
end
Foo.prepend(QA::EE::Foo)
- ^^^^^^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_if_ee`, `extend_if_ee`, or `prepend_if_ee`
+ ^^^^^^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_mod_with`, `extend_mod_with`, or `prepend_mod_with`
SOURCE
end
@@ -223,7 +127,7 @@ RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
end
Foo.extend(EE::Foo)
- ^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_if_ee`, `extend_if_ee`, or `prepend_if_ee`
+ ^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_mod_with`, `extend_mod_with`, or `prepend_mod_with`
SOURCE
end
@@ -233,37 +137,37 @@ RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
end
Foo.include(EE::Foo)
- ^^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_if_ee`, `extend_if_ee`, or `prepend_if_ee`
+ ^^^^^^^^^^^^^^^^^^^^ EE modules must be injected using `include_mod_with`, `extend_mod_with`, or `prepend_mod_with`
SOURCE
end
- it 'disallows the use of prepend_if_ee without a String' do
+ it 'disallows the use of prepend_mod_with without a String' do
expect_offense(<<~SOURCE)
class Foo
end
- Foo.prepend_if_ee(EE::Foo)
- ^^^^^^^ EE modules to inject must be specified as a String
+ Foo.prepend_mod_with(Foo)
+ ^^^ extension modules to inject must be specified as a String
SOURCE
end
- it 'disallows the use of include_if_ee without a String' do
+ it 'disallows the use of include_mod_with without a String' do
expect_offense(<<~SOURCE)
class Foo
end
- Foo.include_if_ee(EE::Foo)
- ^^^^^^^ EE modules to inject must be specified as a String
+ Foo.include_mod_with(Foo)
+ ^^^ extension modules to inject must be specified as a String
SOURCE
end
- it 'disallows the use of extend_if_ee without a String' do
+ it 'disallows the use of extend_mod_with without a String' do
expect_offense(<<~SOURCE)
class Foo
end
- Foo.extend_if_ee(EE::Foo)
- ^^^^^^^ EE modules to inject must be specified as a String
+ Foo.extend_mod_with(Foo)
+ ^^^ extension modules to inject must be specified as a String
SOURCE
end
end
diff --git a/spec/rubocop/cop/performance/ar_count_each_spec.rb b/spec/rubocop/cop/performance/ar_count_each_spec.rb
index fa7a1aba426..4aeb9e13b18 100644
--- a/spec/rubocop/cop/performance/ar_count_each_spec.rb
+++ b/spec/rubocop/cop/performance/ar_count_each_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require_relative '../../../../rubocop/cop/performance/ar_count_each.rb'
+require_relative '../../../../rubocop/cop/performance/ar_count_each'
RSpec.describe RuboCop::Cop::Performance::ARCountEach do
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb b/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb
index 127c858a549..e95220756ed 100644
--- a/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb
+++ b/spec/rubocop/cop/performance/ar_exists_and_present_blank_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require_relative '../../../../rubocop/cop/performance/ar_exists_and_present_blank.rb'
+require_relative '../../../../rubocop/cop/performance/ar_exists_and_present_blank'
RSpec.describe RuboCop::Cop::Performance::ARExistsAndPresentBlank do
subject(:cop) { described_class.new }
diff --git a/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb b/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb
new file mode 100644
index 00000000000..90cc7f7827b
--- /dev/null
+++ b/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::StageEntity do
+ let(:stage) { build(:cycle_analytics_project_stage, start_event_identifier: :merge_request_created, end_event_identifier: :merge_request_merged) }
+
+ subject(:entity_json) { described_class.new(Analytics::CycleAnalytics::StagePresenter.new(stage)).as_json }
+
+ it 'exposes start and end event descriptions' do
+ expect(entity_json).to have_key(:start_event_html_description)
+ expect(entity_json).to have_key(:end_event_html_description)
+ end
+end
diff --git a/spec/serializers/ci/codequality_mr_diff_entity_spec.rb b/spec/serializers/ci/codequality_mr_diff_entity_spec.rb
index 82708908d95..4f161c36b06 100644
--- a/spec/serializers/ci/codequality_mr_diff_entity_spec.rb
+++ b/spec/serializers/ci/codequality_mr_diff_entity_spec.rb
@@ -4,18 +4,18 @@ require 'spec_helper'
RSpec.describe Ci::CodequalityMrDiffEntity do
let(:entity) { described_class.new(mr_diff_report) }
- let(:mr_diff_report) { Gitlab::Ci::Reports::CodequalityMrDiff.new(codequality_report) }
+ let(:mr_diff_report) { Gitlab::Ci::Reports::CodequalityMrDiff.new(codequality_report.all_degradations) }
let(:codequality_report) { Gitlab::Ci::Reports::CodequalityReports.new }
- let(:degradation_1) { build(:codequality_degradation_1) }
- let(:degradation_2) { build(:codequality_degradation_2) }
+ let(:major) { build(:codequality_degradation, :major) }
+ let(:minor) { build(:codequality_degradation, :minor) }
describe '#as_json' do
subject(:report) { entity.as_json }
context 'when quality report has degradations' do
before do
- codequality_report.add_degradation(degradation_1)
- codequality_report.add_degradation(degradation_2)
+ codequality_report.add_degradation(major)
+ codequality_report.add_degradation(minor)
end
it 'contains correct codequality mr diff report', :aggregate_failures do
diff --git a/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb b/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb
index 906ca36041f..6afbc3b8353 100644
--- a/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb
+++ b/spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb
@@ -4,18 +4,18 @@ require 'spec_helper'
RSpec.describe Ci::CodequalityMrDiffReportSerializer do
let(:serializer) { described_class.new.represent(mr_diff_report) }
- let(:mr_diff_report) { Gitlab::Ci::Reports::CodequalityMrDiff.new(codequality_report) }
+ let(:mr_diff_report) { Gitlab::Ci::Reports::CodequalityMrDiff.new(codequality_report.all_degradations) }
let(:codequality_report) { Gitlab::Ci::Reports::CodequalityReports.new }
- let(:degradation_1) { build(:codequality_degradation_1) }
- let(:degradation_2) { build(:codequality_degradation_2) }
+ let(:major) { build(:codequality_degradation, :major) }
+ let(:minor) { build(:codequality_degradation, :minor) }
describe '#to_json' do
subject { serializer.as_json }
context 'when quality report has degradations' do
before do
- codequality_report.add_degradation(degradation_1)
- codequality_report.add_degradation(degradation_2)
+ codequality_report.add_degradation(major)
+ codequality_report.add_degradation(minor)
end
it 'matches the schema' do
diff --git a/spec/serializers/ci/downloadable_artifact_entity_spec.rb b/spec/serializers/ci/downloadable_artifact_entity_spec.rb
new file mode 100644
index 00000000000..34a271e7422
--- /dev/null
+++ b/spec/serializers/ci/downloadable_artifact_entity_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DownloadableArtifactEntity do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports) }
+ let(:user) { create(:user) }
+ let(:request) { EntityRequest.new({ current_user: user }) }
+ let(:entity) { described_class.new(pipeline, request: request) }
+
+ describe '#as_json' do
+ subject { entity.as_json }
+
+ it 'contains required fields', :aggregate_failures do
+ expect(subject).to include(:artifacts)
+ expect(subject[:artifacts].size).to eq(1)
+ end
+
+ context 'when user cannot read job artifact' do
+ let!(:build) { create(:ci_build, :success, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+
+ it 'returns only artifacts readable by user', :aggregate_failures do
+ expect(subject[:artifacts].size).to eq(1)
+ expect(subject[:artifacts].first[:name]).to eq("test:codequality")
+ end
+ end
+ end
+end
diff --git a/spec/serializers/ci/downloadable_artifact_serializer_spec.rb b/spec/serializers/ci/downloadable_artifact_serializer_spec.rb
new file mode 100644
index 00000000000..90f159a06f9
--- /dev/null
+++ b/spec/serializers/ci/downloadable_artifact_serializer_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DownloadableArtifactSerializer do
+ let(:pipeline) { create(:ci_pipeline, :with_codequality_reports) }
+ let(:user) { create(:user) }
+ let(:serializer) { described_class.new(current_user: user).represent(pipeline) }
+
+ describe '#as_json' do
+ subject { serializer.as_json }
+
+ it 'matches schema' do
+ expect(subject).to match_schema('entities/downloadable_artifact')
+ end
+ end
+end
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
index 83ea0d649e8..054406e4e65 100644
--- a/spec/serializers/ci/pipeline_entity_spec.rb
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe Ci::PipelineEntity do
it 'has a correct failure reason' do
expect(subject[:failure_reason])
- .to eq 'CI/CD YAML configuration error!'
+ .to eq 'The pipeline failed due to an error on the CI/CD configuration file.'
end
end
@@ -239,23 +239,23 @@ RSpec.describe Ci::PipelineEntity do
end
context 'when pipeline has failed builds' do
- let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let_it_be(:build) { create(:ci_build, :success, pipeline: pipeline) }
let_it_be(:failed_1) { create(:ci_build, :failed, pipeline: pipeline) }
let_it_be(:failed_2) { create(:ci_build, :failed, pipeline: pipeline) }
context 'when the user can retry the pipeline' do
- it 'exposes these failed builds' do
- allow(entity).to receive(:can_retry?).and_return(true)
+ before do
+ project.add_maintainer(user)
+ end
+ it 'exposes these failed builds' do
expect(subject[:failed_builds].map { |b| b[:id] }).to contain_exactly(failed_1.id, failed_2.id)
end
end
context 'when the user cannot retry the pipeline' do
it 'is nil' do
- allow(entity).to receive(:can_retry?).and_return(false)
-
expect(subject[:failed_builds]).to be_nil
end
end
diff --git a/spec/serializers/context_commits_diff_entity_spec.rb b/spec/serializers/context_commits_diff_entity_spec.rb
new file mode 100644
index 00000000000..e8f38527f5b
--- /dev/null
+++ b/spec/serializers/context_commits_diff_entity_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ContextCommitsDiffEntity do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:mrcc1) { create(:merge_request_context_commit, merge_request: merge_request, sha: "cfe32cf61b73a0d5e9f13e774abde7ff789b1660") }
+ let_it_be(:mrcc2) { create(:merge_request_context_commit, merge_request: merge_request, sha: "ae73cb07c9eeaf35924a10f713b364d32b2dd34f") }
+
+ context 'as json' do
+ subject { ContextCommitsDiffEntity.represent(merge_request.context_commits_diff).as_json }
+
+ it 'exposes commits_count' do
+ expect(subject[:commits_count]).to eq(2)
+ end
+
+ it 'exposes showing_context_commits_diff' do
+ expect(subject).to have_key(:showing_context_commits_diff)
+ end
+
+ it 'exposes diffs_path' do
+ expect(subject[:diffs_path]).to eq(Gitlab::Routing.url_helpers.diffs_project_merge_request_path(merge_request.project, merge_request, only_context_commits: true))
+ end
+ end
+end
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index f6993d4652e..b1cbe7e216e 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe DiffsMetadataEntity do
:start_version, :latest_diff, :latest_version_path,
:added_lines, :removed_lines, :render_overflow_warning,
:email_patch_path, :plain_diff_path,
- :merge_request_diffs, :context_commits,
+ :merge_request_diffs, :context_commits, :context_commits_diff,
:definition_path_prefix, :source_branch_exists,
:can_merge, :conflict_resolution_path, :has_conflicts,
:project_name, :project_path, :user_full_name, :username,
diff --git a/spec/serializers/group_issuable_autocomplete_entity_spec.rb b/spec/serializers/group_issuable_autocomplete_entity_spec.rb
new file mode 100644
index 00000000000..86ef9dea23b
--- /dev/null
+++ b/spec/serializers/group_issuable_autocomplete_entity_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupIssuableAutocompleteEntity do
+ let(:group) { build_stubbed(:group) }
+ let(:project) { build_stubbed(:project, group: group) }
+ let(:issue) { build_stubbed(:issue, project: project) }
+
+ describe '#represent' do
+ subject { described_class.new(issue, parent_group: group).as_json }
+
+ it 'includes the iid, title, and reference' do
+ expect(subject).to include(:iid, :title, :reference)
+ end
+ end
+end
diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb
index 82ea26fae40..76f8cf644c6 100644
--- a/spec/serializers/issue_entity_spec.rb
+++ b/spec/serializers/issue_entity_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe IssueEntity do
before do
project.add_developer(member)
public_project.add_developer(member)
- Issues::MoveService.new(public_project, member).execute(issue, project)
+ Issues::MoveService.new(project: public_project, current_user: member).execute(issue, project)
end
context 'when user cannot read target project' do
@@ -61,7 +61,7 @@ RSpec.describe IssueEntity do
before do
Issues::DuplicateService
- .new(project, member)
+ .new(project: project, current_user: member)
.execute(issue, new_issue)
end
diff --git a/spec/serializers/job_entity_spec.rb b/spec/serializers/job_entity_spec.rb
index 1cbf1914c0c..f31cfcb8499 100644
--- a/spec/serializers/job_entity_spec.rb
+++ b/spec/serializers/job_entity_spec.rb
@@ -21,6 +21,10 @@ RSpec.describe JobEntity do
subject { entity.as_json }
+ it 'contains complete to indicate if a pipeline is completed' do
+ expect(subject).to include(:complete)
+ end
+
it 'contains paths to job page action' do
expect(subject).to include(:build_path)
end
diff --git a/spec/serializers/member_serializer_spec.rb b/spec/serializers/member_serializer_spec.rb
index f7415214e95..687d69f86ea 100644
--- a/spec/serializers/member_serializer_spec.rb
+++ b/spec/serializers/member_serializer_spec.rb
@@ -30,39 +30,6 @@ RSpec.describe MemberSerializer do
.from(nil).to(true)
.and change(group_member, :last_blocked_owner).from(nil).to(false)
end
-
- context "with LastGroupOwnerAssigner query improvements" do
- it "avoids N+1 database queries for last group owner assignment in MembersPresenter" do
- group_member = create(:group_member, group: group)
- control_count = ActiveRecord::QueryRecorder.new { member_last_owner_with_preload([group_member]) }.count
- group_members = create_list(:group_member, 3, group: group)
-
- expect { member_last_owner_with_preload(group_members) }.not_to exceed_query_limit(control_count)
- end
-
- it "avoids N+1 database queries for last blocked owner assignment in MembersPresenter" do
- group_member = create(:group_member, group: group)
- control_count = ActiveRecord::QueryRecorder.new { member_last_blocked_owner_with_preload([group_member]) }.count
- group_members = create_list(:group_member, 3, group: group)
-
- expect { member_last_blocked_owner_with_preload(group_members) }.not_to exceed_query_limit(control_count)
- end
-
- def member_last_owner_with_preload(members)
- assigner_with_preload(members)
- members.map { |m| group.member_last_owner?(m) }
- end
-
- def member_last_blocked_owner_with_preload(members)
- assigner_with_preload(members)
- members.map { |m| group.member_last_blocked_owner?(m) }
- end
-
- def assigner_with_preload(members)
- MembersPreloader.new(members).preload_all
- Members::LastGroupOwnerAssigner.new(group, members).execute
- end
- end
end
context 'project member' do
diff --git a/spec/serializers/merge_requests/pipeline_entity_spec.rb b/spec/serializers/merge_requests/pipeline_entity_spec.rb
index 03a049401c1..6970b547f12 100644
--- a/spec/serializers/merge_requests/pipeline_entity_spec.rb
+++ b/spec/serializers/merge_requests/pipeline_entity_spec.rb
@@ -42,6 +42,4 @@ RSpec.describe MergeRequests::PipelineEntity do
expect(entity.as_json).not_to include(:coverage)
end
end
-
- it_behaves_like 'public artifacts'
end
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index 5756656d146..128f1922887 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe PipelineDetailsEntity do
expect(subject[:details])
.to include :duration, :finished_at
expect(subject[:details])
- .to include :stages, :artifacts, :manual_actions, :scheduled_actions
+ .to include :stages, :manual_actions, :scheduled_actions
expect(subject[:details][:status]).to include :icon, :favicon, :text, :label
end
@@ -70,6 +70,20 @@ RSpec.describe PipelineDetailsEntity do
expect(subject[:flags][:retryable]).to eq false
end
end
+
+ it 'does not contain code_quality_build_path in details' do
+ expect(subject[:details]).not_to include :code_quality_build_path
+ end
+
+ context 'when option code_quality_walkthrough is set and pipeline is a success' do
+ let(:entity) do
+ described_class.represent(pipeline, request: request, code_quality_walkthrough: true)
+ end
+
+ it 'contains details.code_quality_build_path' do
+ expect(subject[:details]).to include :code_quality_build_path
+ end
+ end
end
context 'when pipeline is cancelable' do
@@ -184,7 +198,5 @@ RSpec.describe PipelineDetailsEntity do
expect(source_jobs[child_pipeline.id][:name]).to eq('child')
end
end
-
- it_behaves_like 'public artifacts'
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index 6028da301f3..1111290cade 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 39 : 36
+ expected_queries = Gitlab.ee? ? 33 : 30
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -176,7 +176,7 @@ RSpec.describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
- expected_queries = Gitlab.ee? ? 42 : 39
+ expected_queries = Gitlab.ee? ? 36 : 33
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -202,7 +202,7 @@ RSpec.describe PipelineSerializer do
# Existing numbers are high and require performance optimization
# Ongoing issue:
# https://gitlab.com/gitlab-org/gitlab/-/issues/225156
- expected_queries = Gitlab.ee? ? 82 : 76
+ expected_queries = Gitlab.ee? ? 77 : 70
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -221,8 +221,7 @@ RSpec.describe PipelineSerializer do
create(:ci_build, :scheduled, project: project, environment: env.name)
recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 61 : 57
-
+ expected_queries = Gitlab.ee? ? 56 : 52
expect(recorded.count).to be_within(1).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
diff --git a/spec/serializers/test_case_entity_spec.rb b/spec/serializers/test_case_entity_spec.rb
index e2b0f722f41..cdeefd2fec5 100644
--- a/spec/serializers/test_case_entity_spec.rb
+++ b/spec/serializers/test_case_entity_spec.rb
@@ -41,47 +41,19 @@ RSpec.describe TestCaseEntity do
end
end
- context 'when feature is enabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: true)
- end
-
- context 'when attachment is present' do
- let(:test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
-
- it 'returns the attachment_url' do
- expect(subject).to include(:attachment_url)
- end
- end
-
- context 'when attachment is not present' do
- let(:test_case) { build(:report_test_case, job: job) }
+ context 'when attachment is present' do
+ let(:test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
- it 'returns a nil attachment_url' do
- expect(subject[:attachment_url]).to be_nil
- end
+ it 'returns the attachment_url' do
+ expect(subject).to include(:attachment_url)
end
end
- context 'when feature is disabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: false)
- end
-
- context 'when attachment is present' do
- let(:test_case) { build(:report_test_case, :failed_with_attachment, job: job) }
-
- it 'returns no attachment_url' do
- expect(subject).not_to include(:attachment_url)
- end
- end
-
- context 'when attachment is not present' do
- let(:test_case) { build(:report_test_case, job: job) }
+ context 'when attachment is not present' do
+ let(:test_case) { build(:report_test_case, job: job) }
- it 'returns no attachment_url' do
- expect(subject).not_to include(:attachment_url)
- end
+ it 'returns a nil attachment_url' do
+ expect(subject[:attachment_url]).to be_nil
end
end
end
diff --git a/spec/services/admin/propagate_service_template_spec.rb b/spec/services/admin/propagate_service_template_spec.rb
index d95d31ceaea..406da790a66 100644
--- a/spec/services/admin/propagate_service_template_spec.rb
+++ b/spec/services/admin/propagate_service_template_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Admin::PropagateServiceTemplate do
context 'with a project that has another service' do
before do
- BambooService.create!(
+ Integrations::Bamboo.create!(
active: true,
project: project,
properties: {
@@ -50,10 +50,10 @@ RSpec.describe Admin::PropagateServiceTemplate do
end
it 'does not create the service if it exists already' do
- Service.build_from_integration(service_template, project_id: project.id).save!
+ Integration.build_from_integration(service_template, project_id: project.id).save!
expect { described_class.propagate(service_template) }
- .not_to change { Service.count }
+ .not_to change { Integration.count }
end
end
end
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index 9bd71ea6f64..86a6cdee52d 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -5,38 +5,27 @@ require 'spec_helper'
RSpec.describe AlertManagement::ProcessPrometheusAlertService do
let_it_be(:project, reload: true) { create(:project, :repository) }
- before do
- allow(ProjectServiceWorker).to receive(:perform_async)
- end
+ let(:service) { described_class.new(project, payload) }
describe '#execute' do
- let(:service) { described_class.new(project, payload) }
- let(:source) { 'Prometheus' }
- let(:auto_close_incident) { true }
- let(:create_issue) { true }
- let(:send_email) { true }
- let(:incident_management_setting) do
- double(
- auto_close_incident?: auto_close_incident,
- create_issue?: create_issue,
- send_email?: send_email
- )
- end
+ include_context 'incident management settings enabled'
+
+ subject(:execute) { service.execute }
before do
- allow(service)
- .to receive(:incident_management_setting)
- .and_return(incident_management_setting)
+ stub_licensed_features(oncall_schedules: false, generic_alert_fingerprinting: false)
end
- subject(:execute) { service.execute }
-
context 'when alert payload is valid' do
- let(:parsed_payload) { Gitlab::AlertManagement::Payload.parse(project, payload, monitoring_tool: source) }
- let(:fingerprint) { parsed_payload.gitlab_fingerprint }
+ let_it_be(:starts_at) { '2020-04-27T10:10:22.265949279Z' }
+ let_it_be(:title) { 'Alert title' }
+ let_it_be(:fingerprint) { [starts_at, title, 'vector(1)'].join('/') }
+ let_it_be(:source) { 'Prometheus' }
+
+ let(:prometheus_status) { 'firing' }
let(:payload) do
{
- 'status' => status,
+ 'status' => prometheus_status,
'labels' => {
'alertname' => 'GitalyFileServerDown',
'channel' => 'gitaly',
@@ -46,196 +35,32 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
'annotations' => {
'description' => 'Alert description',
'runbook' => 'troubleshooting/gitaly-down.md',
- 'title' => 'Alert title'
+ 'title' => title
},
- 'startsAt' => '2020-04-27T10:10:22.265949279Z',
+ 'startsAt' => starts_at,
'endsAt' => '2020-04-27T10:20:22.265949279Z',
- 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1',
- 'fingerprint' => 'b6ac4d42057c43c1'
+ 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1'
}
end
- let(:status) { 'firing' }
-
- context 'when Prometheus alert status is firing' do
- context 'when alert with the same fingerprint already exists' do
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint) }
-
- it_behaves_like 'adds an alert management alert event'
- it_behaves_like 'processes incident issues'
- it_behaves_like 'Alert Notification Service sends notification email'
-
- context 'existing alert is resolved' do
- let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint) }
-
- it_behaves_like 'creates an alert management alert'
- it_behaves_like 'Alert Notification Service sends notification email'
- end
-
- context 'existing alert is ignored' do
- let!(:alert) { create(:alert_management_alert, :ignored, project: project, fingerprint: fingerprint) }
-
- it_behaves_like 'adds an alert management alert event'
- it_behaves_like 'Alert Notification Service sends no notifications'
- end
-
- context 'existing alert is acknowledged' do
- let!(:alert) { create(:alert_management_alert, :acknowledged, project: project, fingerprint: fingerprint) }
-
- it_behaves_like 'adds an alert management alert event'
- it_behaves_like 'Alert Notification Service sends no notifications'
- end
-
- context 'two existing alerts, one resolved one open' do
- let!(:resolved_alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint) }
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint) }
-
- it_behaves_like 'adds an alert management alert event'
- it_behaves_like 'Alert Notification Service sends notification email'
- end
-
- context 'when auto-creation of issues is disabled' do
- let(:create_issue) { false }
-
- it_behaves_like 'does not process incident issues'
- end
-
- context 'when emails are disabled' do
- let(:send_email) { false }
-
- it_behaves_like 'Alert Notification Service sends no notifications'
- end
- end
-
- context 'when alert does not exist' do
- context 'when alert can be created' do
- it_behaves_like 'creates an alert management alert'
- it_behaves_like 'Alert Notification Service sends notification email'
- it_behaves_like 'processes incident issues'
-
- it_behaves_like 'creates single system note based on the source of the alert'
-
- context 'when auto-alert creation is disabled' do
- let(:create_issue) { false }
-
- it_behaves_like 'does not process incident issues'
- end
-
- context 'when emails are disabled' do
- let(:send_email) { false }
-
- it_behaves_like 'Alert Notification Service sends no notifications'
- end
- end
-
- context 'when alert cannot be created' do
- let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] })}
-
- before do
- allow(service).to receive(:alert).and_call_original
- allow(service).to receive_message_chain(:alert, :save).and_return(false)
- allow(service).to receive_message_chain(:alert, :errors).and_return(errors)
- end
-
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :bad_request
- it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
-
- it 'writes a warning to the log' do
- expect(Gitlab::AppLogger).to receive(:warn).with(
- message: 'Unable to create AlertManagement::Alert from Prometheus',
- project_id: project.id,
- alert_errors: { hosts: ['hosts array is over 255 chars'] }
- )
-
- execute
- end
- end
-
- it { is_expected.to be_success }
- end
- end
-
- context 'when Prometheus alert status is resolved' do
- let(:status) { 'resolved' }
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint, monitoring_tool: source) }
-
- context 'when auto_resolve_incident set to true' do
- context 'when status can be changed' do
- it_behaves_like 'Alert Notification Service sends notification email'
- it_behaves_like 'does not process incident issues'
-
- it 'resolves an existing alert without error' do
- expect(Gitlab::AppLogger).not_to receive(:warn)
- expect { execute }.to change { alert.reload.resolved? }.to(true)
- end
-
- it_behaves_like 'creates status-change system note for an auto-resolved alert'
-
- context 'existing issue' do
- let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint) }
-
- it 'closes the issue' do
- issue = alert.issue
-
- expect { execute }
- .to change { issue.reload.state }
- .from('opened')
- .to('closed')
- end
-
- it 'creates a resource state event' do
- expect { execute }.to change(ResourceStateEvent, :count).by(1)
- end
- end
- end
-
- context 'when status change did not succeed' do
- before do
- allow(AlertManagement::Alert).to receive(:for_fingerprint).and_return([alert])
- allow(alert).to receive(:resolve).and_return(false)
- end
-
- it 'writes a warning to the log' do
- expect(Gitlab::AppLogger).to receive(:warn).with(
- message: 'Unable to update AlertManagement::Alert status to resolved',
- project_id: project.id,
- alert_id: alert.id
- )
-
- execute
- end
-
- it_behaves_like 'Alert Notification Service sends notification email'
- end
-
- it { is_expected.to be_success }
- end
+ it_behaves_like 'processes new firing alert'
- context 'when auto_resolve_incident set to false' do
- let(:auto_close_incident) { false }
+ context 'with resolving payload' do
+ let(:prometheus_status) { 'resolved' }
- it 'does not resolve an existing alert' do
- expect { execute }.not_to change { alert.reload.resolved? }
- end
-
- it_behaves_like 'creates single system note based on the source of the alert'
- end
-
- context 'when emails are disabled' do
- let(:send_email) { false }
-
- it_behaves_like 'Alert Notification Service sends no notifications'
- end
+ it_behaves_like 'processes recovery alert'
end
context 'environment given' do
let(:environment) { create(:environment, project: project) }
+ let(:alert) { project.alert_management_alerts.last }
- it 'sets the environment' do
+ before do
payload['labels']['gitlab_environment_name'] = environment.name
- execute
+ end
- alert = project.alert_management_alerts.last
+ it 'sets the environment' do
+ execute
expect(alert.environment).to eq(environment)
end
@@ -243,12 +68,14 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'prometheus alert given' do
let(:prometheus_alert) { create(:prometheus_alert, project: project) }
+ let(:alert) { project.alert_management_alerts.last }
- it 'sets the prometheus alert and environment' do
+ before do
payload['labels']['gitlab_alert_id'] = prometheus_alert.prometheus_metric_id
- execute
+ end
- alert = project.alert_management_alerts.last
+ it 'sets the prometheus alert and environment' do
+ execute
expect(alert.prometheus_alert).to eq(prometheus_alert)
expect(alert.environment).to eq(prometheus_alert.environment)
@@ -259,10 +86,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
context 'when alert payload is invalid' do
let(:payload) { {} }
- it 'responds with bad_request' do
- expect(execute).to be_error
- expect(execute.http_status).to eq(:bad_request)
- end
+ it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
end
end
end
diff --git a/spec/services/analytics/cycle_analytics/stages/list_service_spec.rb b/spec/services/analytics/cycle_analytics/stages/list_service_spec.rb
new file mode 100644
index 00000000000..24f0123ed3b
--- /dev/null
+++ b/spec/services/analytics/cycle_analytics/stages/list_service_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Analytics::CycleAnalytics::Stages::ListService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:value_stream) { Analytics::CycleAnalytics::ProjectValueStream.build_default_value_stream(project) }
+ let(:stages) { subject.payload[:stages] }
+
+ subject { described_class.new(parent: project, current_user: user).execute }
+
+ before_all do
+ project.add_reporter(user)
+ end
+
+ it 'returns only the default stages' do
+ expect(stages.size).to eq(Gitlab::Analytics::CycleAnalytics::DefaultStages.all.size)
+ end
+
+ it 'provides the default stages as non-persisted objects' do
+ expect(stages.map(&:id)).to all(be_nil)
+ end
+end
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 258b3d25aee..56c1284927d 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -336,6 +336,32 @@ RSpec.describe ApplicationSettings::UpdateService do
end
end
+ context 'when package registry rate limits are passed' do
+ let(:params) do
+ {
+ throttle_unauthenticated_packages_api_enabled: 1,
+ throttle_unauthenticated_packages_api_period_in_seconds: 500,
+ throttle_unauthenticated_packages_api_requests_per_period: 20,
+ throttle_authenticated_packages_api_enabled: 1,
+ throttle_authenticated_packages_api_period_in_seconds: 600,
+ throttle_authenticated_packages_api_requests_per_period: 10
+ }
+ end
+
+ it 'updates package registry throttle settings' do
+ subject.execute
+
+ application_settings.reload
+
+ expect(application_settings.throttle_unauthenticated_packages_api_enabled).to be_truthy
+ expect(application_settings.throttle_unauthenticated_packages_api_period_in_seconds).to eq(500)
+ expect(application_settings.throttle_unauthenticated_packages_api_requests_per_period).to eq(20)
+ expect(application_settings.throttle_authenticated_packages_api_enabled).to be_truthy
+ expect(application_settings.throttle_authenticated_packages_api_period_in_seconds).to eq(600)
+ expect(application_settings.throttle_authenticated_packages_api_requests_per_period).to eq(10)
+ end
+ end
+
context 'when issues_create_limit is passed' do
let(:params) do
{
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index 1d33dc15838..3f535b83788 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save merge request' do
before do
- allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
+ allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'does not yield block' do
@@ -195,7 +195,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save' do
before do
- allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
+ allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'does not yield block' do
@@ -213,7 +213,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save merge request' do
before do
- allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
+ allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'returns error status' do
@@ -260,7 +260,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save' do
before do
- allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
+ allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'returns error status' do
diff --git a/spec/services/boards/lists/destroy_service_spec.rb b/spec/services/boards/lists/destroy_service_spec.rb
index 4c512b96065..d5358bcc1e1 100644
--- a/spec/services/boards/lists/destroy_service_spec.rb
+++ b/spec/services/boards/lists/destroy_service_spec.rb
@@ -3,11 +3,20 @@
require 'spec_helper'
RSpec.describe Boards::Lists::DestroyService do
+ let_it_be(:user) { create(:user) }
+
+ let(:list_type) { :list }
+
describe '#execute' do
context 'when board parent is a project' do
- let(:project) { create(:project) }
- let(:board) { create(:board, project: project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board) }
+ let_it_be(:closed_list) { board.lists.closed.first }
+
+ let(:params) do
+ { board: board }
+ end
let(:parent) { project }
@@ -15,9 +24,14 @@ RSpec.describe Boards::Lists::DestroyService do
end
context 'when board parent is a group' do
- let(:group) { create(:group) }
- let(:board) { create(:board, group: group) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:list) { create(:list, board: board) }
+ let_it_be(:closed_list) { board.lists.closed.first }
+
+ let(:params) do
+ { board: board }
+ end
let(:parent) { group }
diff --git a/spec/services/boards/lists/update_service_spec.rb b/spec/services/boards/lists/update_service_spec.rb
index 10fed9b7aac..21216e1b945 100644
--- a/spec/services/boards/lists/update_service_spec.rb
+++ b/spec/services/boards/lists/update_service_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe Boards::Lists::UpdateService do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let!(:list) { create(:list, board: board, position: 0) }
+ let!(:list2) { create(:list, board: board, position: 1) }
describe '#execute' do
let(:service) { described_class.new(board.resource_parent, user, params) }
diff --git a/spec/services/boards/visits/create_service_spec.rb b/spec/services/boards/visits/create_service_spec.rb
index a9a8754825b..8910345d170 100644
--- a/spec/services/boards/visits/create_service_spec.rb
+++ b/spec/services/boards/visits/create_service_spec.rb
@@ -7,47 +7,20 @@ RSpec.describe Boards::Visits::CreateService do
let(:user) { create(:user) }
context 'when a project board' do
- let(:project) { create(:project) }
- let(:project_board) { create(:board, project: project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:board) { create(:board, project: project) }
- subject(:service) { described_class.new(project_board.resource_parent, user) }
+ let_it_be(:model) { BoardProjectRecentVisit }
- it 'returns nil when there is no user' do
- service.current_user = nil
-
- expect(service.execute(project_board)).to eq nil
- end
-
- it 'returns nil when database is read only' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
-
- expect(service.execute(project_board)).to eq nil
- end
-
- it 'records the visit' do
- expect(BoardProjectRecentVisit).to receive(:visited!).once
-
- service.execute(project_board)
- end
+ it_behaves_like 'boards recent visit create service'
end
context 'when a group board' do
- let(:group) { create(:group) }
- let(:group_board) { create(:board, group: group) }
-
- subject(:service) { described_class.new(group_board.resource_parent, user) }
-
- it 'returns nil when there is no user' do
- service.current_user = nil
-
- expect(service.execute(group_board)).to eq nil
- end
-
- it 'records the visit' do
- expect(BoardGroupRecentVisit).to receive(:visited!).once
+ let_it_be(:group) { create(:group) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:model) { BoardGroupRecentVisit }
- service.execute(group_board)
- end
+ it_behaves_like 'boards recent visit create service'
end
end
end
diff --git a/spec/services/branches/delete_service_spec.rb b/spec/services/branches/delete_service_spec.rb
index 291431c1723..727cadc5a50 100644
--- a/spec/services/branches/delete_service_spec.rb
+++ b/spec/services/branches/delete_service_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Branches::DeleteService do
context 'when Gitlab::Git::CommandError is raised' do
before do
allow(repository).to receive(:rm_branch) do
- raise Gitlab::Git::CommandError.new('Could not update patch')
+ raise Gitlab::Git::CommandError, 'Could not update patch'
end
end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 479309572a5..8369eb48088 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a group association' do
let!(:group) { create(:group) }
- let(:created_integration) { Service.find_by(group: group) }
+ let(:created_integration) { Integration.find_by(group: group) }
let(:batch) { Group.where(id: group.id) }
let(:association) { 'group' }
@@ -86,7 +86,7 @@ RSpec.describe BulkCreateIntegrationService do
context 'with a group association' do
let!(:subgroup) { create(:group, parent: group) }
let(:integration) { create(:jira_service, group: group, project: nil, inherit_from_id: instance_integration.id) }
- let(:created_integration) { Service.find_by(group: subgroup) }
+ let(:created_integration) { Integration.find_by(group: subgroup) }
let(:batch) { Group.where(id: subgroup.id) }
let(:association) { 'group' }
let(:inherit_from_id) { instance_integration.id }
diff --git a/spec/services/bulk_imports/export_service_spec.rb b/spec/services/bulk_imports/export_service_spec.rb
new file mode 100644
index 00000000000..2414f7c5ca7
--- /dev/null
+++ b/spec/services/bulk_imports/export_service_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ExportService do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ subject { described_class.new(portable: group, user: user) }
+
+ describe '#execute' do
+ it 'schedules RelationExportWorker for each top level relation' do
+ expect(subject).to receive(:execute).and_return(ServiceResponse.success).and_call_original
+ top_level_relations = BulkImports::FileTransfer.config_for(group).portable_relations
+
+ top_level_relations.each do |relation|
+ expect(BulkImports::RelationExportWorker)
+ .to receive(:perform_async)
+ .with(user.id, group.id, group.class.name, relation)
+ end
+
+ subject.execute
+ end
+
+ context 'when exception occurs' do
+ it 'does not schedule RelationExportWorker' do
+ service = described_class.new(portable: nil, user: user)
+
+ expect(service)
+ .to receive(:execute)
+ .and_return(ServiceResponse.error(message: 'Gitlab::ImportExport::Error', http_status: :unprocessible_entity))
+ .and_call_original
+ expect(BulkImports::RelationExportWorker).not_to receive(:perform_async)
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_imports/relation_export_service_spec.rb b/spec/services/bulk_imports/relation_export_service_spec.rb
new file mode 100644
index 00000000000..bf286998df2
--- /dev/null
+++ b/spec/services/bulk_imports/relation_export_service_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::RelationExportService do
+ let_it_be(:jid) { 'jid' }
+ let_it_be(:relation) { 'labels' }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:label) { create(:group_label, group: group) }
+ let_it_be(:export_path) { "#{Dir.tmpdir}/relation_export_service_spec/tree" }
+ let_it_be_with_reload(:export) { create(:bulk_import_export, group: group, relation: relation) }
+
+ before do
+ group.add_owner(user)
+
+ allow(export).to receive(:export_path).and_return(export_path)
+ end
+
+ after :all do
+ FileUtils.rm_rf(export_path)
+ end
+
+ subject { described_class.new(user, group, relation, jid) }
+
+ describe '#execute' do
+ it 'exports specified relation and marks export as finished' do
+ subject.execute
+
+ expect(export.reload.upload.export_file).to be_present
+ expect(export.finished?).to eq(true)
+ end
+
+ it 'removes temp export files' do
+ subject.execute
+
+ expect(Dir.exist?(export_path)).to eq(false)
+ end
+
+ it 'exports specified relation and marks export as finished' do
+ subject.execute
+
+ expect(export.upload.export_file).to be_present
+ end
+
+ context 'when export record does not exist' do
+ let(:another_group) { create(:group) }
+
+ subject { described_class.new(user, another_group, relation, jid) }
+
+ it 'creates export record' do
+ another_group.add_owner(user)
+
+ expect { subject.execute }
+ .to change { another_group.bulk_import_exports.count }
+ .from(0)
+ .to(1)
+ end
+ end
+
+ context 'when there is existing export present' do
+ let(:upload) { create(:bulk_import_export_upload, export: export) }
+
+ it 'removes existing export before exporting' do
+ upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/labels.ndjson.gz'))
+
+ expect_any_instance_of(BulkImports::ExportUpload) do |upload|
+ expect(upload).to receive(:remove_export_file!)
+ end
+
+ subject.execute
+ end
+ end
+
+ context 'when exception occurs during export' do
+ shared_examples 'tracks exception' do |exception_class|
+ it 'tracks exception' do
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(exception_class, portable_id: group.id, portable_type: group.class.name)
+ .and_call_original
+
+ subject.execute
+ end
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::ExportUpload) do |upload|
+ allow(upload).to receive(:save!).and_raise(StandardError)
+ end
+ end
+
+ it 'marks export as failed' do
+ subject.execute
+
+ expect(export.reload.failed?).to eq(true)
+ end
+
+ include_examples 'tracks exception', StandardError
+
+ context 'when passed relation is not supported' do
+ let(:relation) { 'unsupported' }
+
+ include_examples 'tracks exception', ActiveRecord::RecordInvalid
+ end
+
+ context 'when user is not allowed to perform export' do
+ let(:another_user) { create(:user) }
+
+ subject { described_class.new(another_user, group, relation, jid) }
+
+ include_examples 'tracks exception', Gitlab::ImportExport::Error
+ end
+ end
+ end
+end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index e20bcd44923..cd50a2a5708 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe BulkUpdateIntegrationService do
let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
let(:batch) do
- Service.inherited_descendants_from_self_or_ancestors_from(subgroup_integration).where(id: group_integration.id..integration.id)
+ Integration.inherited_descendants_from_self_or_ancestors_from(subgroup_integration).where(id: group_integration.id..integration.id)
end
let_it_be(:group) { create(:group) }
diff --git a/spec/services/chat_names/find_user_service_spec.rb b/spec/services/chat_names/find_user_service_spec.rb
index a29b243ad2c..9bbad09cd0d 100644
--- a/spec/services/chat_names/find_user_service_spec.rb
+++ b/spec/services/chat_names/find_user_service_spec.rb
@@ -4,13 +4,13 @@ require 'spec_helper'
RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state do
describe '#execute' do
- let(:service) { create(:service) }
+ let(:integration) { create(:service) }
- subject { described_class.new(service, params).execute }
+ subject { described_class.new(integration, params).execute }
context 'find user mapping' do
let(:user) { create(:user) }
- let!(:chat_name) { create(:chat_name, user: user, service: service) }
+ let!(:chat_name) { create(:chat_name, user: user, integration: integration) }
context 'when existing user is requested' do
let(:params) { { team_id: chat_name.team_id, user_id: chat_name.chat_id } }
@@ -28,7 +28,7 @@ RSpec.describe ChatNames::FindUserService, :clean_gitlab_redis_shared_state do
end
it 'only updates an existing timestamp once within a certain time frame' do
- service = described_class.new(service, params)
+ service = described_class.new(integration, params)
expect(chat_name.last_used_at).to be_nil
diff --git a/spec/services/ci/change_variable_service_spec.rb b/spec/services/ci/change_variable_service_spec.rb
index 7acdd4e834f..f86a87132b1 100644
--- a/spec/services/ci/change_variable_service_spec.rb
+++ b/spec/services/ci/change_variable_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::ChangeVariableService do
let(:service) { described_class.new(container: group, current_user: user, params: params) }
let_it_be(:user) { create(:user) }
+
let(:group) { create(:group) }
describe '#execute' do
diff --git a/spec/services/ci/change_variables_service_spec.rb b/spec/services/ci/change_variables_service_spec.rb
index 5f1207eaf58..b710ca78554 100644
--- a/spec/services/ci/change_variables_service_spec.rb
+++ b/spec/services/ci/change_variables_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::ChangeVariablesService do
let(:service) { described_class.new(container: group, current_user: user, params: params) }
let_it_be(:user) { create(:user) }
+
let(:group) { spy(:group, variables: []) }
let(:params) { { variables_attributes: [{ key: 'new_variable', value: 'variable_value' }] } }
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index dd10fb017aa..8bab7856375 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -3,9 +3,11 @@
require 'spec_helper'
RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
+ include Ci::SourcePipelineHelpers
+
let_it_be(:user) { create(:user) }
let(:upstream_project) { create(:project, :repository) }
- let_it_be(:downstream_project) { create(:project, :repository) }
+ let_it_be(:downstream_project, refind: true) { create(:project, :repository) }
let!(:upstream_pipeline) do
create(:ci_pipeline, :running, project: upstream_project)
@@ -394,6 +396,47 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
end
end
+ context 'when relationship between pipelines is cyclical' do
+ before do
+ pipeline_a = create(:ci_pipeline, project: upstream_project)
+ pipeline_b = create(:ci_pipeline, project: downstream_project)
+ pipeline_c = create(:ci_pipeline, project: upstream_project)
+
+ create_source_pipeline(pipeline_a, pipeline_b)
+ create_source_pipeline(pipeline_b, pipeline_c)
+ create_source_pipeline(pipeline_c, upstream_pipeline)
+ end
+
+ it 'does not create a new pipeline' do
+ expect { service.execute(bridge) }
+ .not_to change { Ci::Pipeline.count }
+ end
+
+ it 'changes status of the bridge build' do
+ service.execute(bridge)
+
+ expect(bridge.reload).to be_failed
+ expect(bridge.failure_reason).to eq 'pipeline_loop_detected'
+ end
+
+ context 'when ci_drop_cyclical_triggered_pipelines is not enabled' do
+ before do
+ stub_feature_flags(ci_drop_cyclical_triggered_pipelines: false)
+ end
+
+ it 'creates a new pipeline' do
+ expect { service.execute(bridge) }
+ .to change { Ci::Pipeline.count }
+ end
+
+ it 'expect bridge build not to be failed' do
+ service.execute(bridge)
+
+ expect(bridge.reload).not_to be_failed
+ end
+ end
+ end
+
context 'when downstream pipeline creation errors out' do
let(:stub_config) { false }
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index d4e9946ac46..b3b8e34dd8e 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService, '#execute' do
let_it_be(:group) { create(:group, name: 'my-organization') }
+
let(:upstream_project) { create(:project, :repository, name: 'upstream', group: group) }
let(:downstram_project) { create(:project, :repository, name: 'downstream', group: group) }
let(:user) { create(:user) }
diff --git a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
index 6320a16d646..42c3f52541b 100644
--- a/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/custom_config_content_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
+
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
diff --git a/spec/services/ci/create_pipeline_service/dry_run_spec.rb b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
index c21a4ef0917..0fb500f5729 100644
--- a/spec/services/ci/create_pipeline_service/dry_run_spec.rb
+++ b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
+
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
diff --git a/spec/services/ci/create_pipeline_service/environment_spec.rb b/spec/services/ci/create_pipeline_service/environment_spec.rb
index 0ed63012325..e77591298ad 100644
--- a/spec/services/ci/create_pipeline_service/environment_spec.rb
+++ b/spec/services/ci/create_pipeline_service/environment_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) }
+
let(:service) { described_class.new(project, user, ref: 'master') }
let(:user) { developer }
diff --git a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
index 90b8baa23a7..94500a550c6 100644
--- a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.owner }
+
let(:service) { described_class.new(project, user, { ref: 'refs/heads/master' }) }
let(:content) do
<<~EOY
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 5ea75c2253b..512cf546e6a 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService, '#execute' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:ref_name) { 'master' }
let(:service) do
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 98c85234fe7..9fdce1ae926 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Ci::CreatePipelineService do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user, reload: true) { project.owner }
+
let(:ref_name) { 'refs/heads/master' }
before do
@@ -101,14 +102,6 @@ RSpec.describe Ci::CreatePipelineService do
execute_service
end
- describe 'recording a conversion event' do
- it 'schedules a record conversion event worker' do
- expect(Experiments::RecordConversionEventWorker).to receive(:perform_async).with(:ci_syntax_templates_b, user.id)
-
- pipeline
- end
- end
-
context 'when merge requests already exist for this source branch' do
let(:merge_request_1) do
create(:merge_request, source_branch: 'feature', target_branch: "master", source_project: project)
@@ -539,7 +532,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'pull it from Auto-DevOps' do
pipeline = execute_service
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality eslint-sast secret_detection_default_branch test])
+ expect(pipeline.builds.map(&:name)).to match_array(%w[brakeman-sast build code_quality eslint-sast secret_detection_default_branch semgrep-sast test])
end
end
diff --git a/spec/services/ci/create_web_ide_terminal_service_spec.rb b/spec/services/ci/create_web_ide_terminal_service_spec.rb
index c1acf8fd60c..0804773442d 100644
--- a/spec/services/ci/create_web_ide_terminal_service_spec.rb
+++ b/spec/services/ci/create_web_ide_terminal_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::CreateWebIdeTerminalService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:ref) { 'master' }
describe '#execute' do
@@ -20,6 +21,13 @@ RSpec.describe Ci::CreateWebIdeTerminalService do
expect(subject[:pipeline].stages.count).to eq(1)
expect(subject[:pipeline].builds.count).to eq(1)
end
+
+ it 'calls ensure_project_iid explicitly' do
+ expect_next_instance_of(Ci::Pipeline) do |instance|
+ expect(instance).to receive(:ensure_project_iid!).twice
+ end
+ subject
+ end
end
before do
diff --git a/spec/services/ci/delete_unit_tests_service_spec.rb b/spec/services/ci/delete_unit_tests_service_spec.rb
new file mode 100644
index 00000000000..4c63c513d48
--- /dev/null
+++ b/spec/services/ci/delete_unit_tests_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeleteUnitTestsService do
+ describe '#execute' do
+ let!(:unit_test_1) { create(:ci_unit_test) }
+ let!(:unit_test_2) { create(:ci_unit_test) }
+ let!(:unit_test_3) { create(:ci_unit_test) }
+ let!(:unit_test_4) { create(:ci_unit_test) }
+ let!(:unit_test_1_recent_failure) { create(:ci_unit_test_failure, unit_test: unit_test_1) }
+ let!(:unit_test_1_old_failure) { create(:ci_unit_test_failure, unit_test: unit_test_1, failed_at: 15.days.ago) }
+ let!(:unit_test_2_old_failure) { create(:ci_unit_test_failure, unit_test: unit_test_2, failed_at: 15.days.ago) }
+ let!(:unit_test_3_old_failure) { create(:ci_unit_test_failure, unit_test: unit_test_3, failed_at: 15.days.ago) }
+ let!(:unit_test_4_old_failure) { create(:ci_unit_test_failure, unit_test: unit_test_4, failed_at: 15.days.ago) }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ described_class.new.execute
+ end
+
+ it 'does not delete unit test failures not older than 14 days' do
+ expect(unit_test_1_recent_failure.reload).to be_persisted
+ end
+
+ it 'deletes unit test failures older than 14 days' do
+ ids = [
+ unit_test_1_old_failure,
+ unit_test_2_old_failure,
+ unit_test_3_old_failure,
+ unit_test_4_old_failure
+ ].map(&:id)
+
+ result = Ci::UnitTestFailure.where(id: ids)
+
+ expect(result).to be_empty
+ end
+
+ it 'deletes unit tests that have no more associated unit test failures' do
+ ids = [
+ unit_test_2,
+ unit_test_3,
+ unit_test_4
+ ].map(&:id)
+
+ result = Ci::UnitTest.where(id: ids)
+
+ expect(result).to be_empty
+ end
+ end
+end
diff --git a/spec/services/ci/destroy_pipeline_service_spec.rb b/spec/services/ci/destroy_pipeline_service_spec.rb
index 6977c99e335..302233cea5a 100644
--- a/spec/services/ci/destroy_pipeline_service_spec.rb
+++ b/spec/services/ci/destroy_pipeline_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe ::Ci::DestroyPipelineService do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
let!(:pipeline) { create(:ci_pipeline, :success, project: project, sha: project.commit.id) }
subject { described_class.new(project, user).execute(pipeline) }
@@ -17,13 +18,16 @@ RSpec.describe ::Ci::DestroyPipelineService do
expect { pipeline.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
- it 'clears the cache', :use_clean_rails_memory_store_caching do
+ it 'clears the cache', :use_clean_rails_redis_caching do
create(:commit_status, :success, pipeline: pipeline, ref: pipeline.ref)
expect(project.pipeline_status.has_status?).to be_truthy
subject
+ # We need to reset lazy_latest_pipeline cache to simulate a new request
+ BatchLoader::Executor.clear_current
+
# Need to use find to avoid memoization
expect(Project.find(project.id).pipeline_status.has_status?).to be_falsey
end
@@ -57,6 +61,10 @@ RSpec.describe ::Ci::DestroyPipelineService do
expect { artifact.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
+
+ it 'inserts deleted objects for object storage files' do
+ expect { subject }.to change { Ci::DeletedObject.count }
+ end
end
end
end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index 3dbf2dbb8f1..613bbe45e68 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
subject { described_class.new }
describe '#execute' do
@@ -14,12 +15,14 @@ RSpec.describe Ci::ExpirePipelineCacheService do
new_mr_pipelines_path = "/#{project.full_path}/-/merge_requests/new.json"
pipeline_path = "/#{project.full_path}/-/pipelines/#{pipeline.id}.json"
graphql_pipeline_path = "/api/graphql:pipelines/id/#{pipeline.id}"
+ graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
expect(store).to receive(:touch).with(pipelines_path)
expect(store).to receive(:touch).with(new_mr_pipelines_path)
expect(store).to receive(:touch).with(pipeline_path)
expect(store).to receive(:touch).with(graphql_pipeline_path)
+ expect(store).to receive(:touch).with(graphql_pipeline_sha_path)
end
subject.execute(pipeline)
@@ -49,7 +52,7 @@ RSpec.describe Ci::ExpirePipelineCacheService do
let(:project_with_repo) { create(:project, :repository) }
let!(:pipeline_with_commit) { create(:ci_pipeline, :success, project: project_with_repo, sha: project_with_repo.commit.id) }
- it 'clears the cache', :use_clean_rails_memory_store_caching do
+ it 'clears the cache', :use_clean_rails_redis_caching do
create(:commit_status, :success, pipeline: pipeline_with_commit, ref: pipeline_with_commit.ref)
# Sanity check
@@ -59,6 +62,9 @@ RSpec.describe Ci::ExpirePipelineCacheService do
pipeline_with_commit.destroy!
+ # We need to reset lazy_latest_pipeline cache to simulate a new request
+ BatchLoader::Executor.clear_current
+
# Need to use find to avoid memoization
expect(Project.find(project_with_repo.id).pipeline_status.has_status?).to be_falsey
end
diff --git a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
index 0cbeaa5446b..e25dd351bb3 100644
--- a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
describe '#execute' do
let_it_be(:project) { create(:project, :auto_devops, :repository) }
let_it_be(:user) { create(:user) }
+
let(:pull_request) { create(:external_pull_request, project: project) }
before do
diff --git a/spec/services/ci/find_exposed_artifacts_service_spec.rb b/spec/services/ci/find_exposed_artifacts_service_spec.rb
index 287f5c4b929..32d96471f16 100644
--- a/spec/services/ci/find_exposed_artifacts_service_spec.rb
+++ b/spec/services/ci/find_exposed_artifacts_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Ci::FindExposedArtifactsService do
end
let_it_be(:project) { create(:project) }
+
let(:user) { nil }
after do
diff --git a/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb b/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb
index 5d747a09f2a..63bc7a1caf8 100644
--- a/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb
+++ b/spec/services/ci/generate_codequality_mr_diff_report_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::GenerateCodequalityMrDiffReportService do
subject { service.execute(base_pipeline, head_pipeline) }
context 'when head pipeline has codequality mr diff report' do
- let!(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project) }
+ let!(:merge_request) { create(:merge_request, :with_codequality_mr_diff_reports, source_project: project, id: 123456789) }
let!(:service) { described_class.new(project, nil, id: merge_request.id) }
let!(:head_pipeline) { merge_request.head_pipeline }
let!(:base_pipeline) { nil }
@@ -18,7 +18,7 @@ RSpec.describe Ci::GenerateCodequalityMrDiffReportService do
it 'returns status and data', :aggregate_failures do
expect_any_instance_of(Ci::PipelineArtifact) do |instance|
expect(instance).to receive(:present)
- expect(instance).to receive(:for_files).with(merge_request.new_paths).and_call_original
+ expect(instance).to receive(:for_files).with(merge_request).and_call_original
end
expect(subject[:status]).to eq(:parsed)
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 22aa9e62c6f..97c65dc005e 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::JobArtifacts::CreateService do
let_it_be(:project) { create(:project) }
+
let(:service) { described_class.new(job) }
let(:job) { create(:ci_build, project: project) }
let(:artifacts_sha256) { '0' * 64 }
diff --git a/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb b/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
new file mode 100644
index 00000000000..b1a4741851b
--- /dev/null
+++ b/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::JobArtifacts::DestroyAssociationsService do
+ let(:artifacts) { Ci::JobArtifact.all }
+ let(:service) { described_class.new(artifacts) }
+
+ let_it_be(:artifact, refind: true) do
+ create(:ci_job_artifact)
+ end
+
+ before do
+ artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
+ artifact.save!
+ end
+
+ describe '#destroy_records' do
+ it 'removes artifacts without updating statistics' do
+ expect(ProjectStatistics).not_to receive(:increment_statistic)
+
+ expect { service.destroy_records }.to change { Ci::JobArtifact.count }
+ end
+
+ context 'when there are no artifacts' do
+ let(:artifacts) { Ci::JobArtifact.none }
+
+ it 'does not raise error' do
+ expect { service.destroy_records }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#update_statistics' do
+ before do
+ service.destroy_records
+ end
+
+ it 'updates project statistics' do
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(artifact.project, :build_artifacts_size, -artifact.file.size)
+
+ service.update_statistics
+ end
+
+ context 'when there are no artifacts' do
+ let(:artifacts) { Ci::JobArtifact.none }
+
+ it 'does not raise error' do
+ expect { service.update_statistics }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 52aaf73d67e..2cedbf93d74 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
- include ExclusiveLeaseHelpers
-
let(:artifacts) { Ci::JobArtifact.all }
let(:service) { described_class.new(artifacts, pick_up_at: Time.current) }
@@ -25,14 +23,6 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
end
- it 'resets project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact.project, :build_artifacts_size, -artifact.file.size)
- .and_call_original
-
- execute
- end
-
it 'does not remove the files' do
expect { execute }.not_to change { artifact.file.exists? }
end
@@ -44,6 +34,29 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
execute
end
+
+ context 'ProjectStatistics' do
+ it 'resets project statistics' do
+ expect(ProjectStatistics).to receive(:increment_statistic).once
+ .with(artifact.project, :build_artifacts_size, -artifact.file.size)
+ .and_call_original
+
+ execute
+ end
+
+ context 'with update_stats: false' do
+ it 'does not update project statistics' do
+ expect(ProjectStatistics).not_to receive(:increment_statistic)
+
+ service.execute(update_stats: false)
+ end
+
+ it 'returns size statistics' do
+ expect(service.execute(update_stats: false)).to match(
+ a_hash_including(statistics_updates: { artifact.project => -artifact.file.size }))
+ end
+ end
+ end
end
context 'when failed to destroy artifact' do
@@ -65,16 +78,12 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
context 'when there are no artifacts' do
let(:artifacts) { Ci::JobArtifact.none }
- before do
- artifact.destroy!
- end
-
it 'does not raise error' do
expect { execute }.not_to raise_error
end
it 'reports the number of destroyed artifacts' do
- is_expected.to eq(destroyed_artifacts_count: 0, status: :success)
+ is_expected.to eq(destroyed_artifacts_count: 0, statistics_updates: {}, status: :success)
end
end
end
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
index 91b81af9fd1..7536e04f2de 100644
--- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::ParseDotenvArtifactService do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
let(:build) { create(:ci_build, pipeline: pipeline, project: project) }
let(:service) { described_class.new(project, nil) }
@@ -24,7 +25,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
context 'when parse error happens' do
before do
- allow(service).to receive(:scan_line!) { raise described_class::ParserError.new('Invalid Format') }
+ allow(service).to receive(:scan_line!) { raise described_class::ParserError, 'Invalid Format' }
end
it 'returns error' do
diff --git a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
index 0c48f15d726..5568052e346 100644
--- a/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/create_code_quality_mr_diff_report_service_spec.rb
@@ -4,58 +4,76 @@ require 'spec_helper'
RSpec.describe ::Ci::PipelineArtifacts::CreateCodeQualityMrDiffReportService do
describe '#execute' do
- subject(:pipeline_artifact) { described_class.new.execute(pipeline) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+ let(:head_pipeline) { create(:ci_pipeline, :success, :with_codequality_reports, project: project, merge_requests_as_head_pipeline: [merge_request]) }
+ let(:base_pipeline) { create(:ci_pipeline, :success, project: project, ref: merge_request.target_branch, sha: merge_request.diff_base_sha) }
- context 'when pipeline has codequality reports' do
- let(:project) { create(:project, :repository) }
+ subject { described_class.new(head_pipeline).execute }
- describe 'pipeline completed status' do
- using RSpec::Parameterized::TableSyntax
+ context 'when there are codequality reports' do
+ context 'when pipeline passes' do
+ context 'when degradations are present' do
+ context 'when degradations already present in target branch pipeline' do
+ before do
+ create(:ci_build, :success, :codequality_reports, name: 'codequality', pipeline: base_pipeline, project: project)
+ end
- where(:status, :result) do
- :success | 1
- :failed | 1
- :canceled | 1
- :skipped | 1
- end
+ it "does not persist a pipeline artifact" do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ end
+ end
+
+ context 'when degradation is not present in target branch pipeline' do
+ before do
+ create(:ci_build, :success, :codequality_reports_without_degradation, name: 'codequality', pipeline: base_pipeline, project: project)
+ end
- with_them do
- let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, status: status, project: project) }
+ it 'persists a pipeline artifact' do
+ expect { subject }.to change { Ci::PipelineArtifact.count }.by(1)
+ end
- it 'creates a pipeline artifact' do
- expect { pipeline_artifact }.to change(Ci::PipelineArtifact, :count).by(result)
- end
+ it 'persists the default file name' do
+ subject
- it 'persists the default file name' do
- expect(pipeline_artifact.file.filename).to eq('code_quality_mr_diff.json')
- end
+ pipeline_artifact = Ci::PipelineArtifact.first
- it 'sets expire_at to 1 week' do
- freeze_time do
- expect(pipeline_artifact.expire_at).to eq(1.week.from_now)
+ expect(pipeline_artifact.file.filename).to eq('code_quality_mr_diff.json')
end
- end
- end
- end
- context 'when pipeline artifact has already been created' do
- let(:pipeline) { create(:ci_pipeline, :with_codequality_reports, project: project) }
+ it 'sets expire_at to 1 week' do
+ freeze_time do
+ subject
+
+ pipeline_artifact = Ci::PipelineArtifact.first
+
+ expect(pipeline_artifact.expire_at).to eq(1.week.from_now)
+ end
+ end
- it 'does not persist the same artifact twice' do
- 2.times { described_class.new.execute(pipeline) }
+ it 'does not persist the same artifact twice' do
+ 2.times { described_class.new(head_pipeline).execute }
- expect(Ci::PipelineArtifact.count).to eq(1)
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ end
+ end
end
end
end
- context 'when pipeline is not completed and codequality report does not exist' do
- let(:pipeline) { create(:ci_pipeline, :running) }
+ context 'when there are no codequality reports for head pipeline' do
+ let(:head_pipeline) { create(:ci_pipeline, :success, project: project, merge_requests_as_head_pipeline: [merge_request]) }
+
+ it "does not persist a pipeline artifact" do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ end
+ end
- it 'does not persist data' do
- pipeline_artifact
+ context 'when there are no codequality reports for base pipeline' do
+ let(:head_pipeline) { create(:ci_pipeline, :success, project: project, merge_requests_as_head_pipeline: [merge_request]) }
- expect(Ci::PipelineArtifact.count).to eq(0)
+ it "does not persist a pipeline artifact" do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
end
end
end
diff --git a/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
index 3dc4f35df22..eb664043567 100644
--- a/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1)
stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
- create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
+ create_list(:ci_pipeline_artifact, 2, :unlocked, expire_at: 1.week.ago)
end
it 'destroys one artifact' do
@@ -46,7 +46,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
before do
stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
- create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
+ create_list(:ci_pipeline_artifact, 2, :unlocked, expire_at: 1.week.ago)
end
it 'destroys all expired artifacts' do
@@ -60,7 +60,21 @@ RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
context 'when artifacts are not expired' do
before do
- create(:ci_pipeline_artifact, expire_at: 2.days.from_now)
+ create(:ci_pipeline_artifact, :unlocked, expire_at: 2.days.from_now)
+ end
+
+ it 'does not destroy pipeline artifacts' do
+ expect { subject }.not_to change { Ci::PipelineArtifact.count }
+ end
+
+ it 'reports the number of destroyed artifacts' do
+ is_expected.to eq(0)
+ end
+ end
+
+ context 'when pipeline is locked' do
+ before do
+ create(:ci_pipeline_artifact, expire_at: 2.weeks.ago)
end
it 'does not destroy pipeline artifacts' do
diff --git a/spec/services/ci/pipeline_bridge_status_service_spec.rb b/spec/services/ci/pipeline_bridge_status_service_spec.rb
index 584b23bb3aa..1346f68c952 100644
--- a/spec/services/ci/pipeline_bridge_status_service_spec.rb
+++ b/spec/services/ci/pipeline_bridge_status_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::PipelineBridgeStatusService do
let(:user) { build(:user) }
let_it_be(:project) { create(:project) }
+
let(:pipeline) { build(:ci_pipeline, project: project) }
describe '#execute' do
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index bc8b6b2d113..a66d3898c5c 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
-require_relative 'shared_processing_service.rb'
-require_relative 'shared_processing_service_tests_with_yaml.rb'
+require_relative 'shared_processing_service'
+require_relative 'shared_processing_service_tests_with_yaml'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
it_behaves_like 'Pipeline Processing Service'
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_post_test_needs_deploy_is_stage.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_post_test_needs_deploy_is_stage.yml
new file mode 100644
index 00000000000..03d5781395d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_post_test_needs_deploy_is_stage.yml
@@ -0,0 +1,50 @@
+config:
+ stages: [build, test, post_test, deploy]
+
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+ when: manual
+
+ post_test:
+ stage: post_test
+ script: exit 0
+ needs: [test]
+
+ deploy:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ post_test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ post_test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ post_test: skipped
+ deploy: pending
+ jobs:
+ build: success
+ test: manual
+ post_test: skipped
+ deploy: pending
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index 36055779a2e..080ca1cf0cd 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -13,12 +13,35 @@ RSpec.describe Ci::PipelineTriggerService do
describe '#execute' do
let_it_be(:user) { create(:user) }
+
let(:result) { described_class.new(project, user, params).execute }
before do
project.add_developer(user)
end
+ shared_examples 'detecting an unprocessable pipeline trigger' do
+ context 'when the pipeline was not created successfully' do
+ let(:fail_pipeline) do
+ receive(:execute).and_wrap_original do |original, *args|
+ pipeline = original.call(*args)
+ pipeline.update!(failure_reason: 'unknown_failure')
+ pipeline
+ end
+ end
+
+ before do
+ allow_next(Ci::CreatePipelineService).to fail_pipeline
+ end
+
+ it 'has the correct status code' do
+ expect { result }.to change { Ci::Pipeline.count }
+ expect(result).to be_error
+ expect(result.http_status).to eq(:unprocessable_entity)
+ end
+ end
+ end
+
context 'with a trigger token' do
let(:trigger) { create(:ci_trigger, project: project, owner: user) }
@@ -62,7 +85,7 @@ RSpec.describe Ci::PipelineTriggerService do
it 'ignores [ci skip] and create as general' do
expect { result }.to change { Ci::Pipeline.count }.by(1)
- expect(result[:status]).to eq(:success)
+ expect(result).to be_success
end
end
@@ -77,19 +100,22 @@ RSpec.describe Ci::PipelineTriggerService do
expect(result[:pipeline].trigger_requests.last.variables).to be_nil
end
end
+
+ it_behaves_like 'detecting an unprocessable pipeline trigger'
end
- context 'when params have a non-existsed ref' do
+ context 'when params have a non-existant ref' do
let(:params) { { token: trigger.token, ref: 'invalid-ref', variables: nil } }
it 'does not trigger a pipeline' do
expect { result }.not_to change { Ci::Pipeline.count }
- expect(result[:http_status]).to eq(400)
+ expect(result).to be_error
+ expect(result.http_status).to eq(:bad_request)
end
end
end
- context 'when params have a non-existsed trigger token' do
+ context 'when params have a non-existant trigger token' do
let(:params) { { token: 'invalid-token', ref: nil, variables: nil } }
it 'does not trigger a pipeline' do
@@ -172,14 +198,17 @@ RSpec.describe Ci::PipelineTriggerService do
expect(job.sourced_pipelines.last.pipeline_id).to eq(result[:pipeline].id)
end
end
+
+ it_behaves_like 'detecting an unprocessable pipeline trigger'
end
- context 'when params have a non-existsed ref' do
+ context 'when params have a non-existant ref' do
let(:params) { { token: job.token, ref: 'invalid-ref', variables: nil } }
- it 'does not job a pipeline' do
+ it 'does not trigger a job in the pipeline' do
expect { result }.not_to change { Ci::Pipeline.count }
- expect(result[:http_status]).to eq(400)
+ expect(result).to be_error
+ expect(result.http_status).to eq(:bad_request)
end
end
end
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index 254bd19c808..b5bf0adadaf 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -3,8 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::ProcessPipelineService do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:pipeline) do
create(:ci_empty_pipeline, ref: 'master', project: project)
@@ -24,8 +23,6 @@ RSpec.describe Ci::ProcessPipelineService do
stub_ci_pipeline_to_return_yaml_file
stub_not_protect_default_branch
- project.add_developer(user)
-
allow(subject).to receive(:metrics).and_return(metrics)
end
@@ -69,6 +66,14 @@ RSpec.describe Ci::ProcessPipelineService do
subject.execute
end
+ it 'logs the project and pipeline id' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(event: 'update_retried_is_used',
+ project_id: project.id,
+ pipeline_id: pipeline.id)
+
+ subject.execute
+ end
+
context 'when the previous build has already retried column true' do
before do
build_retried.update_columns(retried: true)
diff --git a/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb b/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb
index 2eef852b0f4..0b100af5902 100644
--- a/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb
+++ b/spec/services/ci/prometheus_metrics/observe_histograms_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::PrometheusMetrics::ObserveHistogramsService do
let_it_be(:project) { create(:project) }
+
let(:params) { {} }
subject(:execute) { described_class.new(project, params).execute }
@@ -54,32 +55,6 @@ RSpec.describe Ci::PrometheusMetrics::ObserveHistogramsService do
end
end
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(ci_accept_frontend_prometheus_metrics: false)
- end
-
- let(:params) do
- {
- histograms: [
- { name: 'pipeline_graph_link_calculation_duration_seconds', value: '4' }
- ]
- }
- end
-
- it 'does not register the metrics' do
- execute
-
- expect(histogram_data).to be_nil
- end
-
- it 'returns an empty body and status code' do
- is_expected.to be_success
- expect(subject.http_status).to eq(:accepted)
- expect(subject.payload).to eq({})
- end
- end
-
def histogram_data(name = :pipeline_graph_link_calculation_duration_seconds)
Gitlab::Metrics.registry.get(name)&.get({})
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 02b48e8ba06..839a3c53f07 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -7,6 +7,7 @@ module Ci
let_it_be(:group) { create(:group) }
let_it_be(:project, reload: true) { create(:project, group: group, shared_runners_enabled: false, group_runners_enabled: false) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
let!(:shared_runner) { create(:ci_runner, :instance) }
let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
let!(:group_runner) { create(:ci_runner, :group, groups: [group]) }
@@ -81,31 +82,69 @@ module Ci
let!(:build2_project2) { FactoryBot.create :ci_build, pipeline: pipeline2 }
let!(:build1_project3) { FactoryBot.create :ci_build, pipeline: pipeline3 }
- it 'prefers projects without builds first' do
- # it gets for one build from each of the projects
- expect(execute(shared_runner)).to eq(build1_project1)
- expect(execute(shared_runner)).to eq(build1_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
-
- # then it gets a second build from each of the projects
- expect(execute(shared_runner)).to eq(build2_project1)
- expect(execute(shared_runner)).to eq(build2_project2)
+ context 'when using fair scheduling' do
+ context 'when all builds are pending' do
+ it 'prefers projects without builds first' do
+ # it gets for one build from each of the projects
+ expect(execute(shared_runner)).to eq(build1_project1)
+ expect(execute(shared_runner)).to eq(build1_project2)
+ expect(execute(shared_runner)).to eq(build1_project3)
+
+ # then it gets a second build from each of the projects
+ expect(execute(shared_runner)).to eq(build2_project1)
+ expect(execute(shared_runner)).to eq(build2_project2)
+
+ # in the end the third build
+ expect(execute(shared_runner)).to eq(build3_project1)
+ end
+ end
- # in the end the third build
- expect(execute(shared_runner)).to eq(build3_project1)
+ context 'when some builds transition to success' do
+ it 'equalises number of running builds' do
+ # after finishing the first build for project 1, get a second build from the same project
+ expect(execute(shared_runner)).to eq(build1_project1)
+ build1_project1.reload.success
+ expect(execute(shared_runner)).to eq(build2_project1)
+
+ expect(execute(shared_runner)).to eq(build1_project2)
+ build1_project2.reload.success
+ expect(execute(shared_runner)).to eq(build2_project2)
+ expect(execute(shared_runner)).to eq(build1_project3)
+ expect(execute(shared_runner)).to eq(build3_project1)
+ end
+ end
end
- it 'equalises number of running builds' do
- # after finishing the first build for project 1, get a second build from the same project
- expect(execute(shared_runner)).to eq(build1_project1)
- build1_project1.reload.success
- expect(execute(shared_runner)).to eq(build2_project1)
+ context 'when using DEFCON mode that disables fair scheduling' do
+ before do
+ stub_feature_flags(ci_queueing_disaster_recovery: true)
+ end
+
+ context 'when all builds are pending' do
+ it 'returns builds in order of creation (FIFO)' do
+ # it gets for one build from each of the projects
+ expect(execute(shared_runner)).to eq(build1_project1)
+ expect(execute(shared_runner)).to eq(build2_project1)
+ expect(execute(shared_runner)).to eq(build3_project1)
+ expect(execute(shared_runner)).to eq(build1_project2)
+ expect(execute(shared_runner)).to eq(build2_project2)
+ expect(execute(shared_runner)).to eq(build1_project3)
+ end
+ end
- expect(execute(shared_runner)).to eq(build1_project2)
- build1_project2.reload.success
- expect(execute(shared_runner)).to eq(build2_project2)
- expect(execute(shared_runner)).to eq(build1_project3)
- expect(execute(shared_runner)).to eq(build3_project1)
+ context 'when some builds transition to success' do
+ it 'returns builds in order of creation (FIFO)' do
+ expect(execute(shared_runner)).to eq(build1_project1)
+ build1_project1.reload.success
+ expect(execute(shared_runner)).to eq(build2_project1)
+
+ expect(execute(shared_runner)).to eq(build3_project1)
+ build2_project1.reload.success
+ expect(execute(shared_runner)).to eq(build1_project2)
+ expect(execute(shared_runner)).to eq(build2_project2)
+ expect(execute(shared_runner)).to eq(build1_project3)
+ end
+ end
end
end
@@ -477,10 +516,6 @@ module Ci
end
end
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: false)
- end
-
let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
let!(:pending_job) do
@@ -491,37 +526,7 @@ module Ci
subject { execute(specific_runner) }
- context 'when validates for dependencies is enabled' do
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: false)
- end
-
- it_behaves_like 'validation is active'
-
- context 'when the main feature flag is enabled for a specific project' do
- before do
- stub_feature_flags(ci_validate_build_dependencies: pipeline.project)
- end
-
- it_behaves_like 'validation is active'
- end
-
- context 'when the main feature flag is enabled for a different project' do
- before do
- stub_feature_flags(ci_validate_build_dependencies: create(:project))
- end
-
- it_behaves_like 'validation is not active'
- end
- end
-
- context 'when validates for dependencies is disabled' do
- before do
- stub_feature_flags(ci_validate_build_dependencies_override: true)
- end
-
- it_behaves_like 'validation is not active'
- end
+ it_behaves_like 'validation is active'
end
context 'when build is degenerated' do
diff --git a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
index 6c69a7f3b11..a741e3b49e7 100644
--- a/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
+++ b/spec/services/ci/resource_groups/assign_resource_from_resource_group_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:service) { described_class.new(project, user) }
describe '#execute' do
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 7dd3d963e56..86bda868625 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Ci::RetryBuildService do
end
let_it_be_with_refind(:build) { create(:ci_build, pipeline: pipeline, stage_id: stage.id) }
+
let(:user) { developer }
let(:service) do
diff --git a/spec/services/ci/stop_environments_service_spec.rb b/spec/services/ci/stop_environments_service_spec.rb
index 5a0b7f23556..d5ef67c871c 100644
--- a/spec/services/ci/stop_environments_service_spec.rb
+++ b/spec/services/ci/stop_environments_service_spec.rb
@@ -188,6 +188,7 @@ RSpec.describe Ci::StopEnvironmentsService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:environments) { Environment.available }
before_all do
diff --git a/spec/services/clusters/applications/prometheus_update_service_spec.rb b/spec/services/clusters/applications/prometheus_update_service_spec.rb
index 076ff0210c9..615bfc44045 100644
--- a/spec/services/clusters/applications/prometheus_update_service_spec.rb
+++ b/spec/services/clusters/applications/prometheus_update_service_spec.rb
@@ -9,83 +9,102 @@ RSpec.describe Clusters::Applications::PrometheusUpdateService do
let(:cluster) { create(:cluster, :provided_by_user, :with_installed_helm, projects: [project]) }
let(:application) { create(:clusters_applications_prometheus, :installed, cluster: cluster) }
let(:empty_alerts_values_update_yaml) { "---\nalertmanager:\n enabled: false\nserverFiles:\n alerts: {}\n" }
- let!(:patch_command) { application.patch_command(empty_alerts_values_update_yaml) }
let(:helm_client) { instance_double(::Gitlab::Kubernetes::Helm::API) }
subject(:service) { described_class.new(application, project) }
- before do
- allow(service).to receive(:patch_command).with(empty_alerts_values_update_yaml).and_return(patch_command)
- allow(service).to receive(:helm_api).and_return(helm_client)
+ context 'when prometheus is a Clusters::Integrations::Prometheus' do
+ let(:application) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it 'raises NotImplementedError' do
+ expect { service.execute }.to raise_error(NotImplementedError)
+ end
end
- context 'when there are no errors' do
- before do
- expect(helm_client).to receive(:update).with(patch_command)
+ context 'when prometheus is externally installed' do
+ let(:application) { create(:clusters_applications_prometheus, :externally_installed, cluster: cluster) }
- allow(::ClusterWaitForAppUpdateWorker)
- .to receive(:perform_in)
- .and_return(nil)
+ it 'raises NotImplementedError' do
+ expect { service.execute }.to raise_error(NotImplementedError)
end
+ end
- it 'make the application updating' do
- expect(application.cluster).not_to be_nil
-
- service.execute
+ context 'when prometheus is a Clusters::Applications::Prometheus' do
+ let!(:patch_command) { application.patch_command(empty_alerts_values_update_yaml) }
- expect(application).to be_updating
+ before do
+ allow(service).to receive(:patch_command).with(empty_alerts_values_update_yaml).and_return(patch_command)
+ allow(service).to receive(:helm_api).and_return(helm_client)
end
- it 'updates current config' do
- prometheus_config_service = spy(:prometheus_config_service)
+ context 'when there are no errors' do
+ before do
+ expect(helm_client).to receive(:update).with(patch_command)
- expect(Clusters::Applications::PrometheusConfigService)
- .to receive(:new)
- .with(project, cluster, application)
- .and_return(prometheus_config_service)
+ allow(::ClusterWaitForAppUpdateWorker)
+ .to receive(:perform_in)
+ .and_return(nil)
+ end
- expect(prometheus_config_service)
- .to receive(:execute)
- .and_return(YAML.safe_load(empty_alerts_values_update_yaml))
+ it 'make the application updating' do
+ expect(application.cluster).not_to be_nil
- service.execute
- end
+ service.execute
- it 'schedules async update status check' do
- expect(::ClusterWaitForAppUpdateWorker).to receive(:perform_in).once
+ expect(application).to be_updating
+ end
- service.execute
- end
- end
+ it 'updates current config' do
+ prometheus_config_service = spy(:prometheus_config_service)
- context 'when k8s cluster communication fails' do
- before do
- error = ::Kubeclient::HttpError.new(500, 'system failure', nil)
- allow(helm_client).to receive(:update).and_raise(error)
- end
+ expect(Clusters::Applications::PrometheusConfigService)
+ .to receive(:new)
+ .with(project, cluster, application)
+ .and_return(prometheus_config_service)
+
+ expect(prometheus_config_service)
+ .to receive(:execute)
+ .and_return(YAML.safe_load(empty_alerts_values_update_yaml))
- it 'make the application update errored' do
- service.execute
+ service.execute
+ end
- expect(application).to be_update_errored
- expect(application.status_reason).to match(/kubernetes error:/i)
+ it 'schedules async update status check' do
+ expect(::ClusterWaitForAppUpdateWorker).to receive(:perform_in).once
+
+ service.execute
+ end
end
- end
- context 'when application cannot be persisted' do
- let(:application) { build(:clusters_applications_prometheus, :installed) }
+ context 'when k8s cluster communication fails' do
+ before do
+ error = ::Kubeclient::HttpError.new(500, 'system failure', nil)
+ allow(helm_client).to receive(:update).and_raise(error)
+ end
- before do
- allow(application).to receive(:make_updating!).once
- .and_raise(ActiveRecord::RecordInvalid.new(application))
+ it 'make the application update errored' do
+ service.execute
+
+ expect(application).to be_update_errored
+ expect(application.status_reason).to match(/kubernetes error:/i)
+ end
end
- it 'make the application update errored' do
- expect(helm_client).not_to receive(:update)
+ context 'when application cannot be persisted' do
+ let(:application) { build(:clusters_applications_prometheus, :installed) }
+
+ before do
+ allow(application).to receive(:make_updating!).once
+ .and_raise(ActiveRecord::RecordInvalid.new(application))
+ end
+
+ it 'make the application update errored' do
+ expect(helm_client).not_to receive(:update)
- service.execute
+ service.execute
- expect(application).to be_update_errored
+ expect(application).to be_update_errored
+ end
end
end
end
diff --git a/spec/services/clusters/applications/schedule_update_service_spec.rb b/spec/services/clusters/applications/schedule_update_service_spec.rb
index 01a75a334e6..2cbcb861938 100644
--- a/spec/services/clusters/applications/schedule_update_service_spec.rb
+++ b/spec/services/clusters/applications/schedule_update_service_spec.rb
@@ -10,6 +10,32 @@ RSpec.describe Clusters::Applications::ScheduleUpdateService do
freeze_time { example.run }
end
+ context 'when the application is a Clusters::Integrations::Prometheus' do
+ let(:application) { create(:clusters_integrations_prometheus) }
+
+ it 'does nothing' do
+ service = described_class.new(application, project)
+
+ expect(::ClusterUpdateAppWorker).not_to receive(:perform_in)
+ expect(::ClusterUpdateAppWorker).not_to receive(:perform_async)
+
+ service.execute
+ end
+ end
+
+ context 'when the application is externally installed' do
+ let(:application) { create(:clusters_applications_prometheus, :externally_installed) }
+
+ it 'does nothing' do
+ service = described_class.new(application, project)
+
+ expect(::ClusterUpdateAppWorker).not_to receive(:perform_in)
+ expect(::ClusterUpdateAppWorker).not_to receive(:perform_async)
+
+ service.execute
+ end
+ end
+
context 'when application is able to be updated' do
context 'when the application was recently scheduled' do
it 'schedules worker with a backoff delay' do
diff --git a/spec/services/clusters/integrations/create_service_spec.rb b/spec/services/clusters/integrations/create_service_spec.rb
index cfc0943b6ad..14653236ab1 100644
--- a/spec/services/clusters/integrations/create_service_spec.rb
+++ b/spec/services/clusters/integrations/create_service_spec.rb
@@ -6,79 +6,64 @@ RSpec.describe Clusters::Integrations::CreateService, '#execute' do
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- let(:params) do
- { application_type: 'prometheus', enabled: true }
- end
-
let(:service) do
described_class.new(container: project, cluster: cluster, current_user: project.owner, params: params)
end
- it 'creates a new Prometheus instance' do
- expect(service.execute).to be_success
-
- expect(cluster.integration_prometheus).to be_present
- expect(cluster.integration_prometheus).to be_persisted
- expect(cluster.integration_prometheus).to be_enabled
- end
-
- context 'enabled param is false' do
- let(:params) do
- { application_type: 'prometheus', enabled: false }
- end
-
- it 'creates a new uninstalled Prometheus instance' do
- expect(service.execute).to be_success
+ shared_examples_for 'a cluster integration' do |application_type|
+ let(:integration) { cluster.public_send("integration_#{application_type}") }
- expect(cluster.integration_prometheus).to be_present
- expect(cluster.integration_prometheus).to be_persisted
- expect(cluster.integration_prometheus).not_to be_enabled
- end
- end
+ context 'when enabled param is true' do
+ let(:params) do
+ { application_type: application_type, enabled: true }
+ end
- context 'unauthorized user' do
- let(:service) do
- unauthorized_user = create(:user)
+ it 'creates a new enabled integration' do
+ expect(service.execute).to be_success
- described_class.new(container: project, cluster: cluster, current_user: unauthorized_user, params: params)
+ expect(integration).to be_present
+ expect(integration).to be_persisted
+ expect(integration).to be_enabled
+ end
end
- it 'does not create a new Prometheus instance' do
- expect(service.execute).to be_error
+ context 'when enabled param is false' do
+ let(:params) do
+ { application_type: application_type, enabled: false }
+ end
- expect(cluster.integration_prometheus).to be_nil
- end
- end
+ it 'creates a new disabled integration' do
+ expect(service.execute).to be_success
- context 'prometheus record exists' do
- before do
- create(:clusters_integrations_prometheus, cluster: cluster)
+ expect(integration).to be_present
+ expect(integration).to be_persisted
+ expect(integration).not_to be_enabled
+ end
end
- it 'updates the Prometheus instance' do
- expect(service.execute).to be_success
-
- expect(cluster.integration_prometheus).to be_present
- expect(cluster.integration_prometheus).to be_persisted
- expect(cluster.integration_prometheus).to be_enabled
- end
+ context 'when integration already exists' do
+ before do
+ create(:"clusters_integrations_#{application_type}", cluster: cluster, enabled: false)
+ end
- context 'enabled param is false' do
let(:params) do
- { application_type: 'prometheus', enabled: false }
+ { application_type: application_type, enabled: true }
end
- it 'updates the Prometheus instance as uninstalled' do
+ it 'updates the integration' do
+ expect(integration).not_to be_enabled
+
expect(service.execute).to be_success
- expect(cluster.integration_prometheus).to be_present
- expect(cluster.integration_prometheus).to be_persisted
- expect(cluster.integration_prometheus).not_to be_enabled
+ expect(integration.reload).to be_enabled
end
end
end
- context 'for an un-supported application type' do
+ it_behaves_like 'a cluster integration', 'prometheus'
+ it_behaves_like 'a cluster integration', 'elastic_stack'
+
+ context 'when application_type is invalid' do
let(:params) do
{ application_type: 'something_else', enabled: true }
end
@@ -87,4 +72,22 @@ RSpec.describe Clusters::Integrations::CreateService, '#execute' do
expect { service.execute}.to raise_error(ArgumentError)
end
end
+
+ context 'when user is unauthorized' do
+ let(:params) do
+ { application_type: 'prometheus', enabled: true }
+ end
+
+ let(:service) do
+ unauthorized_user = create(:user)
+
+ described_class.new(container: project, cluster: cluster, current_user: unauthorized_user, params: params)
+ end
+
+ it 'returns error and does not create a new integration record' do
+ expect(service.execute).to be_error
+
+ expect(cluster.integration_prometheus).to be_nil
+ end
+ end
end
diff --git a/spec/services/clusters/management/create_project_service_spec.rb b/spec/services/clusters/management/create_project_service_spec.rb
deleted file mode 100644
index 5d8cc71faa4..00000000000
--- a/spec/services/clusters/management/create_project_service_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Management::CreateProjectService do
- let(:cluster) { create(:cluster, :project) }
- let(:current_user) { create(:user) }
-
- subject { described_class.new(cluster, current_user: current_user).execute }
-
- shared_examples 'management project is not required' do
- it 'does not create a project' do
- expect { subject }.not_to change(cluster, :management_project)
- end
- end
-
- context ':auto_create_cluster_management_project feature flag is disabled' do
- before do
- stub_feature_flags(auto_create_cluster_management_project: false)
- end
-
- include_examples 'management project is not required'
- end
-
- context 'cluster already has a management project' do
- let(:cluster) { create(:cluster, :management_project) }
-
- include_examples 'management project is not required'
- end
-
- shared_examples 'creates a management project' do
- let(:project_params) do
- {
- name: "#{cluster.name} Cluster Management",
- description: 'This project is automatically generated and will be used to manage your Kubernetes cluster. [More information](/help/user/clusters/management_project)',
- namespace_id: namespace&.id,
- visibility_level: Gitlab::VisibilityLevel::PRIVATE
- }
- end
-
- it 'creates a management project' do
- expect(Projects::CreateService).to receive(:new)
- .with(current_user, project_params)
- .and_call_original
-
- subject
-
- management_project = cluster.management_project
-
- expect(management_project).to be_present
- expect(management_project).to be_private
- expect(management_project.name).to eq "#{cluster.name} Cluster Management"
- expect(management_project.namespace).to eq namespace
- end
- end
-
- context 'project cluster' do
- let(:cluster) { create(:cluster, projects: [project]) }
- let(:project) { create(:project, namespace: current_user.namespace) }
- let(:namespace) { project.namespace }
-
- include_examples 'creates a management project'
- end
-
- context 'group cluster' do
- let(:cluster) { create(:cluster, :group, user: current_user) }
- let(:namespace) { cluster.group }
-
- before do
- namespace.add_user(current_user, Gitlab::Access::MAINTAINER)
- end
-
- include_examples 'creates a management project'
- end
-
- context 'instance cluster' do
- let(:cluster) { create(:cluster, :instance, user: current_user) }
- let(:namespace) { create(:group) }
-
- before do
- stub_application_setting(instance_administrators_group: namespace)
-
- namespace.add_user(current_user, Gitlab::Access::MAINTAINER)
- end
-
- include_examples 'creates a management project'
- end
-
- describe 'error handling' do
- let(:project) { cluster.project }
-
- before do
- allow(Projects::CreateService).to receive(:new)
- .and_return(double(execute: project))
- end
-
- context 'project is invalid' do
- let(:errors) { double(full_messages: ["Error message"]) }
- let(:project) { instance_double(Project, errors: errors) }
-
- it { expect { subject }.to raise_error(described_class::CreateError, /Failed to create project/) }
- end
-
- context 'instance administrators group is missing' do
- let(:cluster) { create(:cluster, :instance) }
-
- it { expect { subject }.to raise_error(described_class::CreateError, /Instance administrators group not found/) }
- end
-
- context 'cluster is invalid' do
- before do
- allow(cluster).to receive(:update).and_return(false)
- end
-
- it { expect { subject }.to raise_error(described_class::CreateError, /Failed to update cluster/) }
- end
-
- context 'unknown cluster type' do
- before do
- allow(cluster).to receive(:cluster_type).and_return("unknown_type")
- end
-
- it { expect { subject }.to raise_error(NotImplementedError) }
- end
- end
-end
diff --git a/spec/services/container_expiration_policies/cleanup_service_spec.rb b/spec/services/container_expiration_policies/cleanup_service_spec.rb
index 746e3464427..c6faae7449d 100644
--- a/spec/services/container_expiration_policies/cleanup_service_spec.rb
+++ b/spec/services/container_expiration_policies/cleanup_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ContainerExpirationPolicies::CleanupService do
- let_it_be(:repository, reload: true) { create(:container_repository) }
+ let_it_be(:repository, reload: true) { create(:container_repository, expiration_policy_started_at: 30.minutes.ago) }
let_it_be(:project) { repository.project }
let(:service) { described_class.new(repository) }
@@ -11,59 +11,35 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
describe '#execute' do
subject { service.execute }
- context 'with a successful cleanup tags service execution' do
- let(:cleanup_tags_service_params) { project.container_expiration_policy.policy_params.merge('container_expiration_policy' => true) }
- let(:cleanup_tags_service) { instance_double(Projects::ContainerRepository::CleanupTagsService) }
+ shared_examples 'cleaning up a container repository' do
+ context 'with a successful cleanup tags service execution' do
+ let(:cleanup_tags_service_params) { project.container_expiration_policy.policy_params.merge('container_expiration_policy' => true) }
+ let(:cleanup_tags_service) { instance_double(Projects::ContainerRepository::CleanupTagsService) }
- it 'completely clean up the repository' do
- expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
- expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
+ it 'completely clean up the repository' do
+ expect(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).with(project, nil, cleanup_tags_service_params).and_return(cleanup_tags_service)
+ expect(cleanup_tags_service).to receive(:execute).with(repository).and_return(status: :success)
- response = subject
+ response = subject
- aggregate_failures "checking the response and container repositories" do
- expect(response.success?).to eq(true)
- expect(response.payload).to include(cleanup_status: :finished, container_repository_id: repository.id)
- expect(ContainerRepository.waiting_for_cleanup.count).to eq(0)
- expect(repository.reload.cleanup_unscheduled?).to be_truthy
- expect(repository.expiration_policy_started_at).to eq(nil)
- expect(repository.expiration_policy_completed_at).not_to eq(nil)
+ aggregate_failures "checking the response and container repositories" do
+ expect(response.success?).to eq(true)
+ expect(response.payload).to include(cleanup_status: :finished, container_repository_id: repository.id)
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(0)
+ expect(repository.reload.cleanup_unscheduled?).to be_truthy
+ expect(repository.expiration_policy_completed_at).not_to eq(nil)
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
+ end
end
end
- end
-
- context 'without a successful cleanup tags service execution' do
- let(:cleanup_tags_service_response) { { status: :error, message: 'timeout' } }
-
- before do
- expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).and_return(double(execute: cleanup_tags_service_response))
- end
- it 'partially clean up the repository' do
- response = subject
+ context 'without a successful cleanup tags service execution' do
+ let(:cleanup_tags_service_response) { { status: :error, message: 'timeout' } }
- aggregate_failures "checking the response and container repositories" do
- expect(response.success?).to eq(true)
- expect(response.payload).to include(cleanup_status: :unfinished, container_repository_id: repository.id)
- expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
- expect(repository.reload.cleanup_unfinished?).to be_truthy
- expect(repository.expiration_policy_started_at).not_to eq(nil)
- expect(repository.expiration_policy_completed_at).to eq(nil)
- end
- end
-
- context 'with a truncated cleanup tags service response' do
- let(:cleanup_tags_service_response) do
- {
- status: :error,
- original_size: 1000,
- before_truncate_size: 800,
- after_truncate_size: 200,
- before_delete_size: 100,
- deleted_size: 100
- }
+ before do
+ expect(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).and_return(double(execute: cleanup_tags_service_response))
end
it 'partially clean up the repository' do
@@ -71,49 +47,179 @@ RSpec.describe ContainerExpirationPolicies::CleanupService do
aggregate_failures "checking the response and container repositories" do
expect(response.success?).to eq(true)
- expect(response.payload)
- .to include(
- cleanup_status: :unfinished,
- container_repository_id: repository.id,
- cleanup_tags_service_original_size: 1000,
- cleanup_tags_service_before_truncate_size: 800,
- cleanup_tags_service_after_truncate_size: 200,
- cleanup_tags_service_before_delete_size: 100,
- cleanup_tags_service_deleted_size: 100
- )
+ expect(response.payload).to include(cleanup_status: :unfinished, container_repository_id: repository.id)
expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
expect(repository.reload.cleanup_unfinished?).to be_truthy
expect(repository.expiration_policy_started_at).not_to eq(nil)
expect(repository.expiration_policy_completed_at).to eq(nil)
end
end
+
+ context 'with a truncated cleanup tags service response' do
+ let(:cleanup_tags_service_response) do
+ {
+ status: :error,
+ original_size: 1000,
+ before_truncate_size: 800,
+ after_truncate_size: 200,
+ before_delete_size: 100,
+ deleted_size: 100
+ }
+ end
+
+ it 'partially clean up the repository' do
+ response = subject
+
+ aggregate_failures "checking the response and container repositories" do
+ expect(response.success?).to eq(true)
+ expect(response.payload)
+ .to include(
+ cleanup_status: :unfinished,
+ container_repository_id: repository.id,
+ cleanup_tags_service_original_size: 1000,
+ cleanup_tags_service_before_truncate_size: 800,
+ cleanup_tags_service_after_truncate_size: 200,
+ cleanup_tags_service_before_delete_size: 100,
+ cleanup_tags_service_deleted_size: 100
+ )
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
+ expect(repository.reload.cleanup_unfinished?).to be_truthy
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
+ expect(repository.expiration_policy_completed_at).to eq(nil)
+ end
+ end
+ end
end
- end
- context 'with no repository' do
- let(:service) { described_class.new(nil) }
+ context 'with no repository' do
+ let(:service) { described_class.new(nil) }
+
+ it 'returns an error response' do
+ expect(subject.success?).to eq(false)
+ expect(subject.message).to eq('no repository')
+ end
+ end
- it 'returns an error response' do
- response = subject
+ context 'with an invalid policy' do
+ let(:policy) { repository.project.container_expiration_policy }
- expect(response.success?).to eq(false)
+ before do
+ policy.name_regex = nil
+ policy.enabled = true
+ repository.expiration_policy_cleanup_status = :cleanup_ongoing
+ end
+
+ it 'returns an error response' do
+ expect { subject }.to change { repository.expiration_policy_cleanup_status }.from('cleanup_ongoing').to('cleanup_unscheduled')
+ expect(subject.success?).to eq(false)
+ expect(subject.message).to eq('invalid policy')
+ expect(policy).not_to be_enabled
+ end
+ end
+
+ context 'with a network error' do
+ before do
+ expect(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).and_raise(Faraday::TimeoutError)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Faraday::TimeoutError)
+
+ expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
+ expect(repository.reload.cleanup_unfinished?).to be_truthy
+ expect(repository.expiration_policy_started_at).not_to eq(nil)
+ expect(repository.expiration_policy_completed_at).to eq(nil)
+ end
end
end
- context 'with a network error' do
+ context 'with loopless enabled' do
+ let(:policy) { repository.project.container_expiration_policy }
+
before do
- expect(Projects::ContainerRepository::CleanupTagsService)
- .to receive(:new).and_raise(Faraday::TimeoutError)
+ policy.update!(enabled: true)
+ policy.update_column(:next_run_at, 5.minutes.ago)
end
- it 'raises an error' do
- expect { subject }.to raise_error(Faraday::TimeoutError)
+ it_behaves_like 'cleaning up a container repository'
+
+ context 'next run scheduling' do
+ let_it_be_with_reload(:repository2) { create(:container_repository, project: project) }
+ let_it_be_with_reload(:repository3) { create(:container_repository, project: project) }
+
+ before do
+ cleanup_tags_service = instance_double(Projects::ContainerRepository::CleanupTagsService)
+ allow(Projects::ContainerRepository::CleanupTagsService)
+ .to receive(:new).and_return(cleanup_tags_service)
+ allow(cleanup_tags_service).to receive(:execute).and_return(status: :success)
+ end
+
+ shared_examples 'not scheduling the next run' do
+ it 'does not scheduled the next run' do
+ expect(policy).not_to receive(:schedule_next_run!)
+
+ expect { subject }.not_to change { policy.reload.next_run_at }
+ end
+ end
+
+ shared_examples 'scheduling the next run' do
+ it 'schedules the next run' do
+ expect(policy).to receive(:schedule_next_run!).and_call_original
+
+ expect { subject }.to change { policy.reload.next_run_at }
+ end
+ end
+
+ context 'with cleanups started_at before policy next_run_at' do
+ before do
+ ContainerRepository.update_all(expiration_policy_started_at: 10.minutes.ago)
+ end
+
+ it_behaves_like 'not scheduling the next run'
+ end
+
+ context 'with cleanups started_at around policy next_run_at' do
+ before do
+ repository3.update!(expiration_policy_started_at: policy.next_run_at + 10.minutes.ago)
+ end
- expect(ContainerRepository.waiting_for_cleanup.count).to eq(1)
- expect(repository.reload.cleanup_unfinished?).to be_truthy
- expect(repository.expiration_policy_started_at).not_to eq(nil)
- expect(repository.expiration_policy_completed_at).to eq(nil)
+ it_behaves_like 'not scheduling the next run'
+ end
+
+ context 'with only the current repository started_at before the policy next_run_at' do
+ before do
+ repository2.update!(expiration_policy_started_at: policy.next_run_at + 10.minutes)
+ repository3.update!(expiration_policy_started_at: policy.next_run_at + 12.minutes)
+ end
+
+ it_behaves_like 'scheduling the next run'
+ end
+
+ context 'with cleanups started_at after policy next_run_at' do
+ before do
+ ContainerRepository.update_all(expiration_policy_started_at: policy.next_run_at + 10.minutes)
+ end
+
+ it_behaves_like 'scheduling the next run'
+ end
+
+ context 'with a future policy next_run_at' do
+ before do
+ policy.update_column(:next_run_at, 5.minutes.from_now)
+ end
+
+ it_behaves_like 'not scheduling the next run'
+ end
end
end
+
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
+ end
+
+ it_behaves_like 'cleaning up a container repository'
+ end
end
end
diff --git a/spec/services/deployments/create_service_spec.rb b/spec/services/deployments/create_service_spec.rb
index 0bb5949ddb1..0f2a6ce32e1 100644
--- a/spec/services/deployments/create_service_spec.rb
+++ b/spec/services/deployments/create_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- expect(Deployments::ExecuteHooksWorker).to receive(:perform_async)
+ expect(Deployments::HooksWorker).to receive(:perform_async)
expect(service.execute).to be_persisted
end
@@ -37,7 +37,7 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
- expect(Deployments::ExecuteHooksWorker).not_to receive(:perform_async)
+ expect(Deployments::HooksWorker).not_to receive(:perform_async)
expect(service.execute).to be_persisted
end
@@ -57,7 +57,7 @@ RSpec.describe Deployments::CreateService do
expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
- expect(Deployments::ExecuteHooksWorker).not_to receive(:perform_async)
+ expect(Deployments::HooksWorker).not_to receive(:perform_async)
described_class.new(environment.reload, user, params).execute
end
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 372805cc0fd..4d15258a186 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
- allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
+ allow(Deployments::HooksWorker).to receive(:perform_async)
job.success! # Create/Succeed deployment
end
@@ -161,6 +161,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
context 'when deployment was created by an external CD system' do
before do
deployment.update_column(:deployable_id, nil)
+ deployment.reload
end
it 'guesses the deployment tier' do
diff --git a/spec/services/discussions/capture_diff_note_positions_service_spec.rb b/spec/services/discussions/capture_diff_note_positions_service_spec.rb
index be53b02a4c1..25e5f549bee 100644
--- a/spec/services/discussions/capture_diff_note_positions_service_spec.rb
+++ b/spec/services/discussions/capture_diff_note_positions_service_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe Discussions::CaptureDiffNotePositionsService do
context 'and position of the discussion changed on target branch head' do
it 'diff positions are created for the first notes of the discussions' do
- MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+ MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author).execute(merge_request)
service.execute
verify_diff_note_position!(first_discussion_note, new_line: first_new_line)
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index f93622dc25a..2e1de367da3 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -202,7 +202,7 @@ RSpec.describe DraftNotes::PublishService do
expect(newrev).to be_present
# Generates new MR revision at DB level
- refresh = MergeRequests::RefreshService.new(project, user)
+ refresh = MergeRequests::RefreshService.new(project: project, current_user: user)
refresh.execute(oldrev, newrev, merge_request.source_branch_ref)
expect { publish(draft: draft) }.to change { Suggestion.count }.by(1)
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index 128fab114fe..2e0c162ebc1 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe FeatureFlags::CreateService do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { developer }
before_all do
diff --git a/spec/services/feature_flags/destroy_service_spec.rb b/spec/services/feature_flags/destroy_service_spec.rb
index b35de02c628..ee30474873c 100644
--- a/spec/services/feature_flags/destroy_service_spec.rb
+++ b/spec/services/feature_flags/destroy_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe FeatureFlags::DestroyService do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { developer }
let!(:feature_flag) { create(:operations_feature_flag, project: project) }
diff --git a/spec/services/feature_flags/disable_service_spec.rb b/spec/services/feature_flags/disable_service_spec.rb
index de0f70bf552..4b2137be35c 100644
--- a/spec/services/feature_flags/disable_service_spec.rb
+++ b/spec/services/feature_flags/disable_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe FeatureFlags::DisableService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:params) { {} }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/feature_flags/enable_service_spec.rb b/spec/services/feature_flags/enable_service_spec.rb
index 88c8028f6c5..c0008b1933f 100644
--- a/spec/services/feature_flags/enable_service_spec.rb
+++ b/spec/services/feature_flags/enable_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe FeatureFlags::EnableService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:params) { {} }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 9639cf3081d..1a127a0d472 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe FeatureFlags::UpdateService do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { developer }
let(:feature_flag) { create(:operations_feature_flag, project: project, active: true) }
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index 52df21897b9..19694a0a354 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Git::BranchHooksService do
+RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state do
include RepoHelpers
include ProjectForksHelper
@@ -116,8 +116,6 @@ RSpec.describe Git::BranchHooksService do
allow_next_instance_of(Gitlab::Git::Diff) do |diff|
allow(diff).to receive(:new_path).and_return('.gitlab-ci.yml')
end
-
- allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
end
let!(:commit_author) { create(:user, email: sample_commit.author_email) }
@@ -127,23 +125,11 @@ RSpec.describe Git::BranchHooksService do
end
it 'tracks the event' do
- execute_service
-
- expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .to have_received(:track_event).with(*tracking_params)
- end
-
- context 'when the FF usage_data_unique_users_committing_ciconfigfile is disabled' do
- before do
- stub_feature_flags(usage_data_unique_users_committing_ciconfigfile: false)
- end
+ time = Time.zone.now
- it 'does not track the event' do
- execute_service
+ execute_service
- expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .not_to have_received(:track_event).with(*tracking_params)
- end
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: 'o_pipeline_authoring_unique_users_committing_ciconfigfile', start_date: time, end_date: time + 7.days)).to eq(1)
end
context 'when usage ping is disabled' do
@@ -155,7 +141,7 @@ RSpec.describe Git::BranchHooksService do
execute_service
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .not_to have_received(:track_event).with(*tracking_params)
+ .not_to receive(:track_event).with(*tracking_params)
end
end
@@ -166,7 +152,7 @@ RSpec.describe Git::BranchHooksService do
execute_service
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .not_to have_received(:track_event).with(*tracking_params)
+ .not_to receive(:track_event).with(*tracking_params)
end
end
@@ -179,7 +165,7 @@ RSpec.describe Git::BranchHooksService do
execute_service
expect(Gitlab::UsageDataCounters::HLLRedisCounter)
- .not_to have_received(:track_event).with(*tracking_params)
+ .not_to receive(:track_event).with(*tracking_params)
end
end
end
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index df9a48d7b1c..151c2a1d014 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe Git::WikiPushService, services: true do
process_changes do
write_new_page
update_page(wiki_page_a.title)
- delete_page(wiki_page_b.page.path)
+ delete_page(wiki_page_b.page)
end
end
@@ -198,7 +198,7 @@ RSpec.describe Git::WikiPushService, services: true do
context 'when a page we do not know about has been deleted' do
def run_service
wiki_page = create(:wiki_page, wiki: wiki)
- process_changes { delete_page(wiki_page.page.path) }
+ process_changes { delete_page(wiki_page.page) }
end
it 'create a new meta-data record' do
@@ -350,8 +350,8 @@ RSpec.describe Git::WikiPushService, services: true do
git_wiki.update_page(page.path, title, 'markdown', 'Hey', commit_details)
end
- def delete_page(path)
- git_wiki.delete_page(path, commit_details)
+ def delete_page(page)
+ wiki.delete_page(page, 'commit message')
end
def commit_details
diff --git a/spec/services/groups/autocomplete_service_spec.rb b/spec/services/groups/autocomplete_service_spec.rb
new file mode 100644
index 00000000000..00d0ad3b347
--- /dev/null
+++ b/spec/services/groups/autocomplete_service_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::AutocompleteService do
+ let_it_be(:group, refind: true) { create(:group, :nested, :private, avatar: fixture_file_upload('spec/fixtures/dk.png')) }
+ let_it_be(:sub_group) { create(:group, :private, parent: group) }
+
+ let(:user) { create(:user) }
+
+ subject { described_class.new(group, user) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ def expect_labels_to_equal(labels, expected_labels)
+ extract_title = lambda { |label| label['title'] }
+ expect(labels.map(&extract_title)).to match_array(expected_labels.map(&extract_title))
+ end
+
+ describe '#labels_as_hash' do
+ let!(:label1) { create(:group_label, group: group) }
+ let!(:label2) { create(:group_label, group: group) }
+ let!(:sub_group_label) { create(:group_label, group: sub_group) }
+ let!(:parent_group_label) { create(:group_label, group: group.parent) }
+
+ it 'returns labels from own group and ancestor groups' do
+ results = subject.labels_as_hash(nil)
+
+ expected_labels = [label1, label2, parent_group_label]
+
+ expect_labels_to_equal(results, expected_labels)
+ end
+ end
+
+ describe '#issues' do
+ let(:project) { create(:project, group: group) }
+ let(:sub_group_project) { create(:project, group: sub_group) }
+
+ let!(:project_issue) { create(:issue, project: project) }
+ let!(:sub_group_project_issue) { create(:issue, confidential: true, project: sub_group_project) }
+
+ it 'returns issues in group and subgroups' do
+ issues = subject.issues
+
+ expect(issues.map(&:iid)).to contain_exactly(project_issue.iid, sub_group_project_issue.iid)
+ expect(issues.map(&:title)).to contain_exactly(project_issue.title, sub_group_project_issue.title)
+ end
+
+ it 'returns only confidential issues if confidential_only is true' do
+ issues = subject.issues(confidential_only: true)
+
+ expect(issues.map(&:iid)).to contain_exactly(sub_group_project_issue.iid)
+ expect(issues.map(&:title)).to contain_exactly(sub_group_project_issue.title)
+ end
+ end
+
+ describe '#merge_requests' do
+ let(:project) { create(:project, :repository, group: group) }
+ let(:sub_group_project) { create(:project, :repository, group: sub_group) }
+
+ let!(:project_mr) { create(:merge_request, source_project: project) }
+ let!(:sub_group_project_mr) { create(:merge_request, source_project: sub_group_project) }
+
+ it 'returns merge requests in group and subgroups' do
+ expect(subject.merge_requests.map(&:iid)).to contain_exactly(project_mr.iid, sub_group_project_mr.iid)
+ expect(subject.merge_requests.map(&:title)).to contain_exactly(project_mr.title, sub_group_project_mr.title)
+ end
+ end
+
+ describe '#milestones' do
+ let!(:group_milestone) { create(:milestone, group: group) }
+ let!(:subgroup_milestone) { create(:milestone, group: sub_group) }
+
+ before do
+ sub_group.add_maintainer(user)
+ end
+
+ context 'when group is public' do
+ let(:public_group) { create(:group, :public) }
+ let(:public_subgroup) { create(:group, :public, parent: public_group) }
+
+ before do
+ group_milestone.update!(group: public_group)
+ subgroup_milestone.update!(group: public_subgroup)
+ end
+
+ it 'returns milestones from groups and subgroups' do
+ subject = described_class.new(public_subgroup, user)
+
+ expect(subject.milestones.map(&:iid)).to contain_exactly(group_milestone.iid, subgroup_milestone.iid)
+ expect(subject.milestones.map(&:title)).to contain_exactly(group_milestone.title, subgroup_milestone.title)
+ end
+ end
+
+ it 'returns milestones from group' do
+ expect(subject.milestones.map(&:iid)).to contain_exactly(group_milestone.iid)
+ expect(subject.milestones.map(&:title)).to contain_exactly(group_milestone.title)
+ end
+
+ it 'returns milestones from groups and subgroups' do
+ milestones = described_class.new(sub_group, user).milestones
+
+ expect(milestones.map(&:iid)).to contain_exactly(group_milestone.iid, subgroup_milestone.iid)
+ expect(milestones.map(&:title)).to contain_exactly(group_milestone.title, subgroup_milestone.title)
+ end
+
+ it 'returns only milestones that user can read' do
+ user = create(:user)
+ sub_group.add_guest(user)
+
+ milestones = described_class.new(sub_group, user).milestones
+
+ expect(milestones.map(&:iid)).to contain_exactly(subgroup_milestone.iid)
+ expect(milestones.map(&:title)).to contain_exactly(subgroup_milestone.title)
+ end
+ end
+end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index f0cd42c1948..dca5497de06 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -164,9 +164,9 @@ RSpec.describe Groups::CreateService, '#execute' do
let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
it 'creates a service from the instance-level integration' do
- expect(created_group.services.count).to eq(1)
- expect(created_group.services.first.api_url).to eq(instance_integration.api_url)
- expect(created_group.services.first.inherit_from_id).to eq(instance_integration.id)
+ expect(created_group.integrations.count).to eq(1)
+ expect(created_group.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(instance_integration.id)
end
context 'with an active group-level integration' do
@@ -179,9 +179,9 @@ RSpec.describe Groups::CreateService, '#execute' do
end
it 'creates a service from the group-level integration' do
- expect(created_group.services.count).to eq(1)
- expect(created_group.services.first.api_url).to eq(group_integration.api_url)
- expect(created_group.services.first.inherit_from_id).to eq(group_integration.id)
+ expect(created_group.integrations.count).to eq(1)
+ expect(created_group.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(group_integration.id)
end
context 'with an active subgroup' do
@@ -194,9 +194,9 @@ RSpec.describe Groups::CreateService, '#execute' do
end
it 'creates a service from the subgroup-level integration' do
- expect(created_group.services.count).to eq(1)
- expect(created_group.services.first.api_url).to eq(subgroup_integration.api_url)
- expect(created_group.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ expect(created_group.integrations.count).to eq(1)
+ expect(created_group.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(created_group.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
end
end
diff --git a/spec/services/groups/open_issues_count_service_spec.rb b/spec/services/groups/open_issues_count_service_spec.rb
index 740e9846119..fca09bfdebe 100644
--- a/spec/services/groups/open_issues_count_service_spec.rb
+++ b/spec/services/groups/open_issues_count_service_spec.rb
@@ -57,4 +57,15 @@ RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_cac
it_behaves_like 'a counter caching service with threshold'
end
end
+
+ describe '#clear_all_cache_keys' do
+ it 'calls `Rails.cache.delete` with the correct keys' do
+ expect(Rails.cache).to receive(:delete)
+ .with(['groups', 'open_issues_count_service', 1, group.id, described_class::PUBLIC_COUNT_KEY])
+ expect(Rails.cache).to receive(:delete)
+ .with(['groups', 'open_issues_count_service', 1, group.id, described_class::TOTAL_COUNT_KEY])
+
+ subject.clear_all_cache_keys
+ end
+ end
end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 3a1197970f4..2fbd5eeef5f 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -240,6 +240,7 @@ RSpec.describe Groups::TransferService do
end
context 'when the group is allowed to be transferred' do
+ let_it_be(:new_parent_group, reload: true) { create(:group, :public) }
let_it_be(:new_parent_group_integration) { create(:slack_service, group: new_parent_group, project: nil, webhook: 'http://new-group.slack.com') }
before do
@@ -273,17 +274,16 @@ RSpec.describe Groups::TransferService do
end
context 'with a group integration' do
- let_it_be(:instance_integration) { create(:slack_service, :instance, webhook: 'http://project.slack.com') }
-
- let(:new_created_integration) { Service.find_by(group: group) }
+ let(:new_created_integration) { Integration.find_by(group: group) }
context 'with an inherited integration' do
+ let_it_be(:instance_integration) { create(:slack_service, :instance, webhook: 'http://project.slack.com') }
let_it_be(:group_integration) { create(:slack_service, group: group, project: nil, webhook: 'http://group.slack.com', inherit_from_id: instance_integration.id) }
it 'replaces inherited integrations', :aggregate_failures do
expect(new_created_integration.webhook).to eq(new_parent_group_integration.webhook)
expect(PropagateIntegrationWorker).to have_received(:perform_async).with(new_created_integration.id)
- expect(Service.count).to eq(3)
+ expect(Integration.count).to eq(3)
end
end
@@ -603,6 +603,7 @@ RSpec.describe Groups::TransferService do
create(:group_member, :owner, group: new_parent_group, user: user)
create(:group, :private, parent: group, require_two_factor_authentication: true)
group.update!(require_two_factor_authentication: true)
+ new_parent_group.reload # make sure traversal_ids are reloaded
end
it 'does not update group two factor authentication setting' do
diff --git a/spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb b/spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb
new file mode 100644
index 00000000000..3c461c91ff0
--- /dev/null
+++ b/spec/services/import/gitlab_projects/create_project_from_remote_file_service_spec.rb
@@ -0,0 +1,182 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::CreateProjectFromRemoteFileService do
+ let(:remote_url) { 'https://external.file.path/file' }
+
+ let(:params) do
+ {
+ path: 'path',
+ namespace: user.namespace,
+ name: 'name',
+ remote_import_url: remote_url
+ }
+ end
+
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(user, params) }
+
+ it 'creates a project and returns a successful response' do
+ stub_headers_for(remote_url, {
+ 'content-type' => 'application/gzip',
+ 'content-length' => '10'
+ })
+
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(1)
+
+ expect(response).to be_success
+ expect(response.http_status).to eq(:ok)
+ expect(response.payload).to be_instance_of(Project)
+ expect(response.payload.name).to eq('name')
+ expect(response.payload.path).to eq('path')
+ expect(response.payload.namespace).to eq(user.namespace)
+ end
+
+ context 'when the file url is invalid' do
+ it 'returns an erred response with the reason of the failure' do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ params[:remote_import_url] = 'https://localhost/file'
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('Requests to localhost are not allowed')
+ end
+ end
+
+ context 'validate file type' do
+ it 'returns erred response when the file type is not informed' do
+ stub_headers_for(remote_url, { 'content-length' => '10' })
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message)
+ .to eq("Missing 'ContentType' header")
+ end
+
+ it 'returns erred response when the file type is not allowed' do
+ stub_headers_for(remote_url, {
+ 'content-type' => 'application/js',
+ 'content-length' => '10'
+ })
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message)
+ .to eq("Remote file content type 'application/js' not allowed. (Allowed content types: application/gzip)")
+ end
+ end
+
+ context 'validate content type' do
+ it 'returns erred response when the file size is not informed' do
+ stub_headers_for(remote_url, { 'content-type' => 'application/gzip' })
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message)
+ .to eq("Missing 'ContentLength' header")
+ end
+
+ it 'returns error response when the file size is a text' do
+ stub_headers_for(remote_url, {
+ 'content-type' => 'application/gzip',
+ 'content-length' => 'some text'
+ })
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message)
+ .to eq("Missing 'ContentLength' header")
+ end
+
+ it 'returns erred response when the file is larger then allowed' do
+ stub_headers_for(remote_url, {
+ 'content-type' => 'application/gzip',
+ 'content-length' => 11.gigabytes.to_s
+ })
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message)
+ .to eq('Remote file larger than limit. (limit 10 GB)')
+ end
+ end
+
+ context 'when required parameters are not provided' do
+ let(:params) { {} }
+
+ it 'returns an erred response with the reason of the failure' do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq("Parameter 'path' is required")
+
+ expect(subject.errors.full_messages).to match_array([
+ "Missing 'ContentLength' header",
+ "Missing 'ContentType' header",
+ "Parameter 'namespace' is required",
+ "Parameter 'path' is required",
+ "Parameter 'remote_import_url' is required"
+ ])
+ end
+ end
+
+ context 'when the project is invalid' do
+ it 'returns an erred response with the reason of the failure' do
+ create(:project, namespace: user.namespace, path: 'path')
+
+ stub_headers_for(remote_url, {
+ 'content-type' => 'application/gzip',
+ 'content-length' => '10'
+ })
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('Path has already been taken')
+ end
+ end
+
+ def stub_headers_for(url, headers = {})
+ allow(Gitlab::HTTP)
+ .to receive(:head)
+ .with(url)
+ .and_return(double(headers: headers))
+ end
+end
diff --git a/spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb b/spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb
new file mode 100644
index 00000000000..a0e04a9a696
--- /dev/null
+++ b/spec/services/import/gitlab_projects/create_project_from_uploaded_file_service_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Import::GitlabProjects::CreateProjectFromUploadedFileService do
+ let(:file_upload) do
+ fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz')
+ end
+
+ let(:params) do
+ {
+ path: 'path',
+ namespace: user.namespace,
+ name: 'name',
+ file: file_upload
+ }
+ end
+
+ let_it_be(:user) { create(:user) }
+
+ subject { described_class.new(user, params) }
+
+ it 'creates a project and returns a successful response' do
+ response = nil
+ expect { response = subject.execute }
+ .to change(Project, :count).by(1)
+
+ expect(response).to be_success
+ expect(response.http_status).to eq(:ok)
+ expect(response.payload).to be_instance_of(Project)
+ expect(response.payload.name).to eq('name')
+ expect(response.payload.path).to eq('path')
+ expect(response.payload.namespace).to eq(user.namespace)
+ end
+
+ context 'when required parameters are not provided' do
+ let(:params) { {} }
+
+ it 'returns an erred response with the reason of the failure' do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq("Parameter 'path' is required")
+
+ expect(subject.errors.full_messages).to match_array([
+ "Parameter 'namespace' is required",
+ "Parameter 'path' is required",
+ "Parameter 'file' is required"
+ ])
+ end
+ end
+
+ context 'when the project is invalid' do
+ it 'returns an erred response with the reason of the failure' do
+ create(:project, namespace: user.namespace, path: 'path')
+
+ response = nil
+ expect { response = subject.execute }
+ .not_to change(Project, :count)
+
+ expect(response).not_to be_success
+ expect(response.http_status).to eq(:bad_request)
+ expect(response.message).to eq('Path has already been taken')
+ end
+ end
+end
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index c749f282cd3..dfdfb57111c 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -101,6 +101,22 @@ RSpec.describe Issuable::BulkUpdateService do
end
end
+ shared_examples 'scheduling cached group count clear' do
+ it 'schedules worker' do
+ expect(Issuables::ClearGroupsIssueCounterWorker).to receive(:perform_async)
+
+ bulk_update(issuables, params)
+ end
+ end
+
+ shared_examples 'not scheduling cached group count clear' do
+ it 'does not schedule worker' do
+ expect(Issuables::ClearGroupsIssueCounterWorker).not_to receive(:perform_async)
+
+ bulk_update(issuables, params)
+ end
+ end
+
context 'with issuables at a project level' do
let(:parent) { project }
@@ -131,6 +147,11 @@ RSpec.describe Issuable::BulkUpdateService do
expect(project.issues.opened).to be_empty
expect(project.issues.closed).not_to be_empty
end
+
+ it_behaves_like 'scheduling cached group count clear' do
+ let(:issuables) { issues }
+ let(:params) { { state_event: 'close' } }
+ end
end
describe 'reopen issues' do
@@ -149,6 +170,11 @@ RSpec.describe Issuable::BulkUpdateService do
expect(project.issues.closed).to be_empty
expect(project.issues.opened).not_to be_empty
end
+
+ it_behaves_like 'scheduling cached group count clear' do
+ let(:issuables) { issues }
+ let(:params) { { state_event: 'reopen' } }
+ end
end
describe 'updating merge request assignee' do
@@ -231,6 +257,10 @@ RSpec.describe Issuable::BulkUpdateService do
let(:milestone) { create(:milestone, project: project) }
it_behaves_like 'updates milestones'
+
+ it_behaves_like 'not scheduling cached group count clear' do
+ let(:params) { { milestone_id: milestone.id } }
+ end
end
describe 'updating labels' do
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index a988ab81754..1426ef2a1f6 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
end
it 'creates a resource label event' do
- described_class.new(project, user).execute(issuable, old_labels: [])
+ described_class.new(project: project, current_user: user).execute(issuable, old_labels: [])
event = issuable.reload.resource_label_events.last
expect(event).not_to be_nil
@@ -66,7 +66,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
context 'on issuable create' do
let(:issuable) { build(:issue, project: project) }
- subject { described_class.new(project, user).execute(issuable, old_labels: [], is_update: false) }
+ subject { described_class.new(project: project, current_user: user).execute(issuable, old_labels: [], is_update: false) }
it 'does not create system note for title and description' do
issuable.save!
diff --git a/spec/services/issuable/destroy_label_links_service_spec.rb b/spec/services/issuable/destroy_label_links_service_spec.rb
new file mode 100644
index 00000000000..bbc69e266c9
--- /dev/null
+++ b/spec/services/issuable/destroy_label_links_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuable::DestroyLabelLinksService do
+ describe '#execute' do
+ context 'when target is an Issue' do
+ let_it_be(:target) { create(:issue) }
+
+ it_behaves_like 'service deleting label links of an issuable'
+ end
+
+ context 'when target is a MergeRequest' do
+ let_it_be(:target) { create(:merge_request) }
+
+ it_behaves_like 'service deleting label links of an issuable'
+ end
+ end
+end
diff --git a/spec/services/issuable/destroy_service_spec.rb b/spec/services/issuable/destroy_service_spec.rb
index fa4902e5237..c72d48d5b77 100644
--- a/spec/services/issuable/destroy_service_spec.rb
+++ b/spec/services/issuable/destroy_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Issuable::DestroyService do
let(:group) { create(:group, :public) }
let(:project) { create(:project, :public, group: group) }
- subject(:service) { described_class.new(project, user) }
+ subject(:service) { described_class.new(project: project, current_user: user) }
describe '#execute' do
context 'when issuable is an issue' do
@@ -31,6 +31,10 @@ RSpec.describe Issuable::DestroyService do
it_behaves_like 'service deleting todos' do
let(:issuable) { issue }
end
+
+ it_behaves_like 'service deleting label links' do
+ let(:issuable) { issue }
+ end
end
context 'when issuable is a merge request' do
@@ -54,6 +58,10 @@ RSpec.describe Issuable::DestroyService do
it_behaves_like 'service deleting todos' do
let(:issuable) { merge_request }
end
+
+ it_behaves_like 'service deleting label links' do
+ let(:issuable) { merge_request }
+ end
end
end
end
diff --git a/spec/services/issue_rebalancing_service_spec.rb b/spec/services/issue_rebalancing_service_spec.rb
index 7b3d4213b24..1c7f74264b7 100644
--- a/spec/services/issue_rebalancing_service_spec.rb
+++ b/spec/services/issue_rebalancing_service_spec.rb
@@ -3,31 +3,35 @@
require 'spec_helper'
RSpec.describe IssueRebalancingService do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user) { project.creator }
let_it_be(:start) { RelativePositioning::START_POSITION }
let_it_be(:max_pos) { RelativePositioning::MAX_POSITION }
let_it_be(:min_pos) { RelativePositioning::MIN_POSITION }
let_it_be(:clump_size) { 300 }
- let_it_be(:unclumped) do
- (0..clump_size).to_a.map do |i|
+ let_it_be(:unclumped, reload: true) do
+ (1..clump_size).to_a.map do |i|
create(:issue, project: project, author: user, relative_position: start + (1024 * i))
end
end
- let_it_be(:end_clump) do
- (0..clump_size).to_a.map do |i|
+ let_it_be(:end_clump, reload: true) do
+ (1..clump_size).to_a.map do |i|
create(:issue, project: project, author: user, relative_position: max_pos - i)
end
end
- let_it_be(:start_clump) do
- (0..clump_size).to_a.map do |i|
+ let_it_be(:start_clump, reload: true) do
+ (1..clump_size).to_a.map do |i|
create(:issue, project: project, author: user, relative_position: min_pos + i)
end
end
+ before do
+ stub_feature_flags(issue_rebalancing_with_retry: false)
+ end
+
def issues_in_position_order
project.reload.issues.reorder(relative_position: :asc).to_a
end
@@ -101,19 +105,70 @@ RSpec.describe IssueRebalancingService do
end
end
+ shared_examples 'rebalancing is retried on statement timeout exceptions' do
+ subject { described_class.new(project.issues.first) }
+
+ it 'retries update statement' do
+ call_count = 0
+ allow(subject).to receive(:run_update_query) do
+ call_count += 1
+ if call_count < 13
+ raise(ActiveRecord::QueryCanceled)
+ else
+ call_count = 0 if call_count == 13 + 16 # 16 = 17 sub-batches - 1 call that succeeded as part of 5th batch
+ true
+ end
+ end
+
+ # call math:
+ # batches start at 100 and are split in half after every 3 retries if ActiveRecord::StatementTimeout exception is raised.
+ # We raise ActiveRecord::StatementTimeout exception for 13 calls:
+ # 1. 100 => 3 calls
+ # 2. 100/2=50 => 3 calls + 3 above = 6 calls, raise ActiveRecord::StatementTimeout
+ # 3. 50/2=25 => 3 calls + 6 above = 9 calls, raise ActiveRecord::StatementTimeout
+ # 4. 25/2=12 => 3 calls + 9 above = 12 calls, raise ActiveRecord::StatementTimeout
+ # 5. 12/2=6 => 1 call + 12 above = 13 calls, run successfully
+ #
+ # so out of 100 elements we created batches of 6 items => 100/6 = 17 sub-batches of 6 or less elements
+ #
+ # project.issues.count: 900 issues, so 9 batches of 100 => 9 * (13+16) = 261
+ expect(subject).to receive(:update_positions).exactly(261).times.and_call_original
+
+ subject.execute
+ end
+ end
+
context 'when issue_rebalancing_optimization feature flag is on' do
before do
stub_feature_flags(issue_rebalancing_optimization: true)
end
it_behaves_like 'IssueRebalancingService shared examples'
+
+ context 'when issue_rebalancing_with_retry feature flag is on' do
+ before do
+ stub_feature_flags(issue_rebalancing_with_retry: true)
+ end
+
+ it_behaves_like 'IssueRebalancingService shared examples'
+ it_behaves_like 'rebalancing is retried on statement timeout exceptions'
+ end
end
- context 'when issue_rebalancing_optimization feature flag is on' do
+ context 'when issue_rebalancing_optimization feature flag is off' do
before do
stub_feature_flags(issue_rebalancing_optimization: false)
end
it_behaves_like 'IssueRebalancingService shared examples'
+
+ context 'when issue_rebalancing_with_retry feature flag is on' do
+ before do
+ stub_feature_flags(issue_rebalancing_with_retry: true)
+ end
+
+ it_behaves_like 'IssueRebalancingService shared examples'
+ it_behaves_like 'rebalancing is retried on statement timeout exceptions'
+ end
end
end
diff --git a/spec/services/issues/after_create_service_spec.rb b/spec/services/issues/after_create_service_spec.rb
index bc9be3211d3..6b720d6e687 100644
--- a/spec/services/issues/after_create_service_spec.rb
+++ b/spec/services/issues/after_create_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Issues::AfterCreateService do
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:issue) { create(:issue, project: project, author: current_user, milestone: milestone, assignee_ids: [assignee.id]) }
- subject(:after_create_service) { described_class.new(project, current_user) }
+ subject(:after_create_service) { described_class.new(project: project, current_user: current_user) }
describe '#execute' do
it 'creates a pending todo for new assignee' do
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 80fe2474ecd..3f506ec58b0 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper.rb'
+require 'spec_helper'
RSpec.describe Issues::BuildService do
let_it_be(:project) { create(:project, :repository) }
@@ -15,7 +15,7 @@ RSpec.describe Issues::BuildService do
end
def build_issue(issue_params = {})
- described_class.new(project, user, issue_params).execute
+ described_class.new(project: project, current_user: user, params: issue_params).execute
end
context 'for a single discussion' do
@@ -41,7 +41,7 @@ RSpec.describe Issues::BuildService do
describe '#items_for_discussions' do
it 'has an item for each discussion' do
create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.source_project, line_number: 13)
- service = described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid)
+ service = described_class.new(project: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid })
service.execute
@@ -50,7 +50,7 @@ RSpec.describe Issues::BuildService do
end
describe '#item_for_discussion' do
- let(:service) { described_class.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid) }
+ let(:service) { described_class.new(project: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid }) }
it 'mentions the author of the note' do
discussion = create(:diff_note_on_merge_request, author: create(:user, username: 'author')).to_discussion
@@ -184,9 +184,9 @@ RSpec.describe Issues::BuildService do
end
it 'cannot set invalid type' do
- expect do
- build_issue(issue_type: 'invalid type')
- end.to raise_error(ArgumentError, "'invalid type' is not a valid issue_type")
+ issue = build_issue(issue_type: 'invalid type')
+
+ expect(issue).to be_issue
end
end
end
diff --git a/spec/services/issues/clone_service_spec.rb b/spec/services/issues/clone_service_spec.rb
index 44180a322ca..abbcb1c1d48 100644
--- a/spec/services/issues/clone_service_spec.rb
+++ b/spec/services/issues/clone_service_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Issues::CloneService do
let(:with_notes) { false }
subject(:clone_service) do
- described_class.new(old_project, user)
+ described_class.new(project: old_project, current_user: user)
end
shared_context 'user can clone issue' do
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 3cf45143594..8950bdd465f 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -3,24 +3,52 @@
require 'spec_helper'
RSpec.describe Issues::CloseService do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user, email: "user@example.com") }
- let(:user2) { create(:user, email: "user2@example.com") }
- let(:guest) { create(:user) }
- let(:issue) { create(:issue, title: "My issue", project: project, assignees: [user2], author: create(:user)) }
+ subject(:close_issue) { described_class.new(project: project, current_user: user).close_issue(issue) }
+
+ let_it_be(:project, refind: true) { create(:project, :repository) }
+ let_it_be(:label1) { create(:label, project: project) }
+ let_it_be(:label2) { create(:label, project: project, remove_on_close: true) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:user) { create(:user, email: "user@example.com") }
+ let_it_be(:user2) { create(:user, email: "user2@example.com") }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:closing_merge_request) { create(:merge_request, source_project: project) }
+
let(:external_issue) { ExternalIssue.new('JIRA-123', project) }
- let(:closing_merge_request) { create(:merge_request, source_project: project) }
- let(:closing_commit) { create(:commit, project: project) }
- let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) }
+ let!(:issue) { create(:issue, title: "My issue", project: project, assignees: [user2], author: author) }
- before do
+ before_all do
project.add_maintainer(user)
project.add_developer(user2)
project.add_guest(guest)
end
+ shared_examples 'removes labels marked for removal from issue when closed' do
+ before do
+ issue.update!(label_ids: [label1.id, label2.id])
+ end
+
+ it 'removes labels marked for removal' do
+ expect do
+ close_issue
+ end.to change { issue.reload.label_ids }.from(containing_exactly(label1.id, label2.id)).to(containing_exactly(label1.id))
+ end
+
+ it 'creates system notes for the removed labels' do
+ expect do
+ close_issue
+ end.to change(ResourceLabelEvent, :count).by(1)
+
+ expect(ResourceLabelEvent.last.slice(:action, :issue_id, :label_id)).to eq(
+ 'action' => 'remove',
+ 'issue_id' => issue.id,
+ 'label_id' => label2.id
+ )
+ end
+ end
+
describe '#execute' do
- let(:service) { described_class.new(project, user) }
+ let(:service) { described_class.new(project: project, current_user: user) }
it 'checks if the user is authorized to update the issue' do
expect(service).to receive(:can?).with(user, :update_issue, issue)
@@ -87,18 +115,18 @@ RSpec.describe Issues::CloseService do
project.reload
expect(project.external_issue_tracker).to receive(:close_issue)
- described_class.new(project, user).close_issue(external_issue)
+ described_class.new(project: project, current_user: user).close_issue(external_issue)
end
end
- context 'with innactive external issue tracker supporting close_issue' do
+ context 'with inactive external issue tracker supporting close_issue' do
let!(:external_issue_tracker) { create(:jira_service, project: project, active: false) }
it 'does not close the issue on the external issue tracker' do
project.reload
expect(project.external_issue_tracker).not_to receive(:close_issue)
- described_class.new(project, user).close_issue(external_issue)
+ described_class.new(project: project, current_user: user).close_issue(external_issue)
end
end
@@ -109,7 +137,7 @@ RSpec.describe Issues::CloseService do
project.reload
expect(project.external_issue_tracker).not_to receive(:close_issue)
- described_class.new(project, user).close_issue(external_issue)
+ described_class.new(project: project, current_user: user).close_issue(external_issue)
end
end
end
@@ -117,10 +145,12 @@ RSpec.describe Issues::CloseService do
context "closed by a merge request", :sidekiq_might_not_need_inline do
subject(:close_issue) do
perform_enqueued_jobs do
- described_class.new(project, user).close_issue(issue, closed_via: closing_merge_request)
+ described_class.new(project: project, current_user: user).close_issue(issue, closed_via: closing_merge_request)
end
end
+ it_behaves_like 'removes labels marked for removal from issue when closed'
+
it 'mentions closure via a merge request' do
close_issue
@@ -184,10 +214,18 @@ RSpec.describe Issues::CloseService do
end
context "closed by a commit", :sidekiq_might_not_need_inline do
- it 'mentions closure via a commit' do
+ subject(:close_issue) do
perform_enqueued_jobs do
- described_class.new(project, user).close_issue(issue, closed_via: closing_commit)
+ described_class.new(project: project, current_user: user).close_issue(issue, closed_via: closing_commit)
end
+ end
+
+ let(:closing_commit) { create(:commit, project: project) }
+
+ it_behaves_like 'removes labels marked for removal from issue when closed'
+
+ it 'mentions closure via a commit' do
+ close_issue
email = ActionMailer::Base.deliveries.last
@@ -199,9 +237,8 @@ RSpec.describe Issues::CloseService do
context 'when user cannot read the commit' do
it 'does not mention the commit id' do
project.project_feature.update_attribute(:repository_access_level, ProjectFeature::DISABLED)
- perform_enqueued_jobs do
- described_class.new(project, user).close_issue(issue, closed_via: closing_commit)
- end
+
+ close_issue
email = ActionMailer::Base.deliveries.last
body_text = email.body.parts.map(&:body).join(" ")
@@ -216,10 +253,20 @@ RSpec.describe Issues::CloseService do
context "valid params" do
subject(:close_issue) do
perform_enqueued_jobs do
- described_class.new(project, user).close_issue(issue)
+ described_class.new(project: project, current_user: user).close_issue(issue)
end
end
+ it 'verifies the number of queries' do
+ recorded = ActiveRecord::QueryRecorder.new { close_issue }
+ expected_queries = 32
+
+ expect(recorded.count).to be <= expected_queries
+ expect(recorded.cached_count).to eq(0)
+ end
+
+ it_behaves_like 'removes labels marked for removal from issue when closed'
+
it 'closes the issue' do
close_issue
@@ -230,7 +277,7 @@ RSpec.describe Issues::CloseService do
it 'records closed user' do
close_issue
- expect(issue.closed_by_id).to be(user.id)
+ expect(issue.reload.closed_by_id).to be(user.id)
end
it 'sends email to user2 about assign of new issue', :sidekiq_might_not_need_inline do
@@ -249,11 +296,23 @@ RSpec.describe Issues::CloseService do
end
it 'marks todos as done' do
+ todo = create(:todo, :assigned, user: user, project: project, target: issue, author: user2)
+
close_issue
expect(todo.reload).to be_done
end
+ context 'when closing the issue fails' do
+ it 'does not assign a closed_by value for the issue' do
+ allow(issue).to receive(:close).and_return(false)
+
+ close_issue
+
+ expect(issue.closed_by_id).to be_nil
+ end
+ end
+
context 'when there is an associated Alert Management Alert' do
context 'when alert can be resolved' do
let!(:alert) { create(:alert_management_alert, issue: issue, project: project) }
@@ -303,26 +362,32 @@ RSpec.describe Issues::CloseService do
end
context 'when issue is not confidential' do
+ it_behaves_like 'removes labels marked for removal from issue when closed'
+
it 'executes issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
- described_class.new(project, user).close_issue(issue)
+ close_issue
end
end
context 'when issue is confidential' do
- it 'executes confidential issue hooks' do
- issue = create(:issue, :confidential, project: project)
+ let(:issue) { create(:issue, :confidential, project: project) }
+
+ it_behaves_like 'removes labels marked for removal from issue when closed'
+ it 'executes confidential issue hooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
- described_class.new(project, user).close_issue(issue)
+ close_issue
end
end
context 'internal issues disabled' do
+ let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) }
+
before do
project.issues_enabled = false
project.save!
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 83c6373c335..9c84242d8ae 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Issues::CreateService do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
- let(:issue) { described_class.new(project, user, opts).execute }
+ let(:issue) { described_class.new(project: project, current_user: user, params: opts).execute }
context 'when params are valid' do
let_it_be(:labels) { create_pair(:label, project: project) }
@@ -44,7 +44,7 @@ RSpec.describe Issues::CreateService do
end
context 'when skip_system_notes is true' do
- let(:issue) { described_class.new(project, user, opts).execute(skip_system_notes: true) }
+ let(:issue) { described_class.new(project: project, current_user: user, params: opts).execute(skip_system_notes: true) }
it 'does not call Issuable::CommonSystemNotesService' do
expect(Issuable::CommonSystemNotesService).not_to receive(:new)
@@ -96,7 +96,7 @@ RSpec.describe Issues::CreateService do
end
it 'filters out params that cannot be set without the :admin_issue permission' do
- issue = described_class.new(project, guest, opts).execute
+ issue = described_class.new(project: project, current_user: guest, params: opts).execute
expect(issue).to be_persisted
expect(issue.title).to eq('Awesome issue')
@@ -108,7 +108,7 @@ RSpec.describe Issues::CreateService do
end
it 'creates confidential issues' do
- issue = described_class.new(project, guest, confidential: true).execute
+ issue = described_class.new(project: project, current_user: guest, params: { confidential: true }).execute
expect(issue.confidential).to be_truthy
end
@@ -117,7 +117,7 @@ RSpec.describe Issues::CreateService do
it 'moves the issue to the end, in an asynchronous worker' do
expect(IssuePlacementWorker).to receive(:perform_async).with(be_nil, Integer)
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
end
context 'when label belongs to project group' do
@@ -204,7 +204,7 @@ RSpec.describe Issues::CreateService do
it 'invalidates open issues counter for assignees when issue is assigned' do
project.add_maintainer(assignee)
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
expect(assignee.assigned_open_issues_count).to eq 1
end
@@ -230,7 +230,7 @@ RSpec.describe Issues::CreateService do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
end
it 'executes confidential issue hooks when issue is confidential' do
@@ -239,7 +239,7 @@ RSpec.describe Issues::CreateService do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
end
context 'after_save callback to store_mentions' do
@@ -283,7 +283,7 @@ RSpec.describe Issues::CreateService do
it 'removes assignee when user id is invalid' do
opts = { title: 'Title', description: 'Description', assignee_ids: [-1] }
- issue = described_class.new(project, user, opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts).execute
expect(issue.assignees).to be_empty
end
@@ -291,7 +291,7 @@ RSpec.describe Issues::CreateService do
it 'removes assignee when user id is 0' do
opts = { title: 'Title', description: 'Description', assignee_ids: [0] }
- issue = described_class.new(project, user, opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts).execute
expect(issue.assignees).to be_empty
end
@@ -300,7 +300,7 @@ RSpec.describe Issues::CreateService do
project.add_maintainer(assignee)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- issue = described_class.new(project, user, opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts).execute
expect(issue.assignees).to eq([assignee])
end
@@ -318,7 +318,7 @@ RSpec.describe Issues::CreateService do
project.update!(visibility_level: level)
opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] }
- issue = described_class.new(project, user, opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts).execute
expect(issue.assignees).to be_empty
end
@@ -328,7 +328,7 @@ RSpec.describe Issues::CreateService do
end
it_behaves_like 'issuable record that supports quick actions' do
- let(:issuable) { described_class.new(project, user, params).execute }
+ let(:issuable) { described_class.new(project: project, current_user: user, params: params).execute }
end
context 'Quick actions' do
@@ -368,14 +368,14 @@ RSpec.describe Issues::CreateService do
let(:opts) { { discussion_to_resolve: discussion.id, merge_request_to_resolve_discussions_of: merge_request.iid } }
it 'resolves the discussion' do
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
@@ -383,17 +383,19 @@ RSpec.describe Issues::CreateService do
end
it 'assigns the title and description for the issue' do
- issue = described_class.new(project, user, opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts).execute
expect(issue.title).not_to be_nil
expect(issue.description).not_to be_nil
end
it 'can set nil explicitly to the title and description' do
- issue = described_class.new(project, user,
- merge_request_to_resolve_discussions_of: merge_request,
- description: nil,
- title: nil).execute
+ issue = described_class.new(project: project, current_user: user,
+ params: {
+ merge_request_to_resolve_discussions_of: merge_request,
+ description: nil,
+ title: nil
+ }).execute
expect(issue.description).to be_nil
expect(issue.title).to be_nil
@@ -404,14 +406,14 @@ RSpec.describe Issues::CreateService do
let(:opts) { { merge_request_to_resolve_discussions_of: merge_request.iid } }
it 'resolves the discussion' do
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
discussion.first_note.reload
expect(discussion.resolved?).to be(true)
end
it 'added a system note to the discussion' do
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first
@@ -419,17 +421,19 @@ RSpec.describe Issues::CreateService do
end
it 'assigns the title and description for the issue' do
- issue = described_class.new(project, user, opts).execute
+ issue = described_class.new(project: project, current_user: user, params: opts).execute
expect(issue.title).not_to be_nil
expect(issue.description).not_to be_nil
end
it 'can set nil explicitly to the title and description' do
- issue = described_class.new(project, user,
- merge_request_to_resolve_discussions_of: merge_request,
- description: nil,
- title: nil).execute
+ issue = described_class.new(project: project, current_user: user,
+ params: {
+ merge_request_to_resolve_discussions_of: merge_request,
+ description: nil,
+ title: nil
+ }).execute
expect(issue.description).to be_nil
expect(issue.title).to be_nil
@@ -454,7 +458,7 @@ RSpec.describe Issues::CreateService do
end
subject do
- described_class.new(project, user, params)
+ described_class.new(project: project, current_user: user, params: params)
end
before do
diff --git a/spec/services/issues/duplicate_service_spec.rb b/spec/services/issues/duplicate_service_spec.rb
index 0b5bc3f32ef..0eb0bbb1480 100644
--- a/spec/services/issues/duplicate_service_spec.rb
+++ b/spec/services/issues/duplicate_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Issues::DuplicateService do
let(:canonical_issue) { create(:issue, project: canonical_project) }
let(:duplicate_issue) { create(:issue, project: duplicate_project) }
- subject { described_class.new(duplicate_project, user, {}) }
+ subject { described_class.new(project: duplicate_project, current_user: user) }
describe '#execute' do
context 'when the issues passed are the same' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 2f29a2e2022..76588860957 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Issues::MoveService do
end
subject(:move_service) do
- described_class.new(old_project, user)
+ described_class.new(project: old_project, current_user: user)
end
shared_context 'user can move issue' do
diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb
index bf7a4c97e48..dc55ba8ebea 100644
--- a/spec/services/issues/referenced_merge_requests_service_spec.rb
+++ b/spec/services/issues/referenced_merge_requests_service_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper.rb'
+require 'spec_helper'
RSpec.describe Issues::ReferencedMergeRequestsService do
def create_referencing_mr(attributes = {})
@@ -26,7 +26,7 @@ RSpec.describe Issues::ReferencedMergeRequestsService do
let_it_be(:referencing_mr) { create_referencing_mr(source_project: project, source_branch: 'csv') }
let_it_be(:referencing_mr_other_project) { create_referencing_mr(source_project: other_project, source_branch: 'csv') }
- let(:service) { described_class.new(project, user) }
+ let(:service) { described_class.new(project: project, current_user: user) }
describe '#execute' do
it 'returns a list of sorted merge requests' do
diff --git a/spec/services/issues/related_branches_service_spec.rb b/spec/services/issues/related_branches_service_spec.rb
index c9c029bca4f..7a4bae7f852 100644
--- a/spec/services/issues/related_branches_service_spec.rb
+++ b/spec/services/issues/related_branches_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Issues::RelatedBranchesService do
let(:user) { developer }
- subject { described_class.new(issue.project, user) }
+ subject { described_class.new(project: issue.project, current_user: user) }
before do
issue.project.add_developer(developer)
@@ -95,7 +95,7 @@ RSpec.describe Issues::RelatedBranchesService do
merge_request.create_cross_references!(user)
referenced_merge_requests = Issues::ReferencedMergeRequestsService
- .new(issue.project, user)
+ .new(project: issue.project, current_user: user)
.referenced_merge_requests(issue)
expect(referenced_merge_requests).not_to be_empty
diff --git a/spec/services/issues/reopen_service_spec.rb b/spec/services/issues/reopen_service_spec.rb
index ffe74cca9cf..746a9105531 100644
--- a/spec/services/issues/reopen_service_spec.rb
+++ b/spec/services/issues/reopen_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Issues::ReopenService do
project.add_guest(guest)
perform_enqueued_jobs do
- described_class.new(project, guest).execute(issue)
+ described_class.new(project: project, current_user: guest).execute(issue)
end
end
@@ -33,11 +33,11 @@ RSpec.describe Issues::ReopenService do
issue.assignees << user
expect_any_instance_of(User).to receive(:invalidate_issue_cache_counts)
- described_class.new(project, user).execute(issue)
+ described_class.new(project: project, current_user: user).execute(issue)
end
it 'refreshes the number of opened issues' do
- service = described_class.new(project, user)
+ service = described_class.new(project: project, current_user: user)
expect { service.execute(issue) }
.to change { project.open_issues_count }.from(0).to(1)
@@ -50,14 +50,14 @@ RSpec.describe Issues::ReopenService do
expect(service).to receive(:delete_cache).and_call_original
end
- described_class.new(project, user).execute(issue)
+ described_class.new(project: project, current_user: user).execute(issue)
end
context 'issue is incident type' do
let(:issue) { create(:incident, :closed, project: project) }
let(:current_user) { user }
- subject { described_class.new(project, user).execute(issue) }
+ subject { described_class.new(project: project, current_user: user).execute(issue) }
it_behaves_like 'an incident management tracked event', :incident_management_incident_reopened
end
@@ -67,7 +67,7 @@ RSpec.describe Issues::ReopenService do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :issue_hooks)
- described_class.new(project, user).execute(issue)
+ described_class.new(project: project, current_user: user).execute(issue)
end
end
@@ -78,7 +78,7 @@ RSpec.describe Issues::ReopenService do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks)
expect(project).to receive(:execute_services).with(an_instance_of(Hash), :confidential_issue_hooks)
- described_class.new(project, user).execute(issue)
+ described_class.new(project: project, current_user: user).execute(issue)
end
end
end
diff --git a/spec/services/issues/reorder_service_spec.rb b/spec/services/issues/reorder_service_spec.rb
index 78b937a1caf..15668a3aa23 100644
--- a/spec/services/issues/reorder_service_spec.rb
+++ b/spec/services/issues/reorder_service_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe Issues::ReorderService do
match_params = { move_between_ids: [issue2.id, issue3.id], board_group_id: group.id }
expect(Issues::UpdateService)
- .to receive(:new).with(project, user, match_params)
+ .to receive(:new).with(project: project, current_user: user, params: match_params)
.and_return(double(execute: build(:issue)))
subject.execute(issue1)
@@ -95,6 +95,6 @@ RSpec.describe Issues::ReorderService do
end
def service(params)
- described_class.new(project, user, params)
+ described_class.new(project: project, current_user: user, params: params)
end
end
diff --git a/spec/services/issues/resolve_discussions_spec.rb b/spec/services/issues/resolve_discussions_spec.rb
index 9fbc9cbcca6..1ac71b966bc 100644
--- a/spec/services/issues/resolve_discussions_spec.rb
+++ b/spec/services/issues/resolve_discussions_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'spec_helper.rb'
+require 'spec_helper'
RSpec.describe Issues::ResolveDiscussions do
let(:project) { create(:project, :repository) }
@@ -11,7 +11,7 @@ RSpec.describe Issues::ResolveDiscussions do
DummyService.class_eval do
include ::Issues::ResolveDiscussions
- def initialize(*args)
+ def initialize(project:, current_user: nil, params: {})
super
filter_resolve_discussion_params
end
@@ -26,7 +26,7 @@ RSpec.describe Issues::ResolveDiscussions do
let(:other_merge_request) { create(:merge_request, source_project: project, source_branch: "fix") }
describe "#merge_request_for_resolving_discussion" do
- let(:service) { DummyService.new(project, user, merge_request_to_resolve_discussions_of: merge_request.iid) }
+ let(:service) { DummyService.new(project: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid }) }
it "finds the merge request" do
expect(service.merge_request_to_resolve_discussions_of).to eq(merge_request)
@@ -45,10 +45,12 @@ RSpec.describe Issues::ResolveDiscussions do
describe "#discussions_to_resolve" do
it "contains a single discussion when matching merge request and discussion are passed" do
service = DummyService.new(
- project,
- user,
- discussion_to_resolve: discussion.id,
- merge_request_to_resolve_discussions_of: merge_request.iid
+ project: project,
+ current_user: user,
+ params: {
+ discussion_to_resolve: discussion.id,
+ merge_request_to_resolve_discussions_of: merge_request.iid
+ }
)
# We need to compare discussion id's because the Discussion-objects are rebuilt
# which causes the object-id's not to be different.
@@ -63,9 +65,9 @@ RSpec.describe Issues::ResolveDiscussions do
project: merge_request.target_project,
line_number: 15)])
service = DummyService.new(
- project,
- user,
- merge_request_to_resolve_discussions_of: merge_request.iid
+ project: project,
+ current_user: user,
+ params: { merge_request_to_resolve_discussions_of: merge_request.iid }
)
# We need to compare discussion id's because the Discussion-objects are rebuilt
# which causes the object-id's not to be different.
@@ -81,9 +83,9 @@ RSpec.describe Issues::ResolveDiscussions do
line_number: 15
)])
service = DummyService.new(
- project,
- user,
- merge_request_to_resolve_discussions_of: merge_request.iid
+ project: project,
+ current_user: user,
+ params: { merge_request_to_resolve_discussions_of: merge_request.iid }
)
# We need to compare discussion id's because the Discussion-objects are rebuilt
# which causes the object-id's not to be different.
@@ -94,10 +96,12 @@ RSpec.describe Issues::ResolveDiscussions do
it "is empty when a discussion and another merge request are passed" do
service = DummyService.new(
- project,
- user,
- discussion_to_resolve: discussion.id,
- merge_request_to_resolve_discussions_of: other_merge_request.iid
+ project: project,
+ current_user: user,
+ params: {
+ discussion_to_resolve: discussion.id,
+ merge_request_to_resolve_discussions_of: other_merge_request.iid
+ }
)
expect(service.discussions_to_resolve).to be_empty
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index fd42a84e405..8c97dd95ced 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Issues::UpdateService, :mailer do
end
def update_issue(opts)
- described_class.new(project, user, opts).execute(issue)
+ described_class.new(project: project, current_user: user, params: opts).execute(issue)
end
context 'valid params' do
@@ -165,20 +165,38 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(user2.assigned_open_issues_count).to eq 1
end
- it 'sorts issues as specified by parameters' do
- issue1 = create(:issue, project: project, assignees: [user3])
- issue2 = create(:issue, project: project, assignees: [user3])
+ context 'when changing relative position' do
+ let(:issue1) { create(:issue, project: project, assignees: [user3]) }
+ let(:issue2) { create(:issue, project: project, assignees: [user3]) }
- [issue, issue1, issue2].each do |issue|
- issue.move_to_end
- issue.save!
+ before do
+ [issue, issue1, issue2].each do |issue|
+ issue.move_to_end
+ issue.save!
+ end
end
- opts[:move_between_ids] = [issue1.id, issue2.id]
+ it 'sorts issues as specified by parameters' do
+ opts[:move_between_ids] = [issue1.id, issue2.id]
- update_issue(opts)
+ update_issue(opts)
- expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
+ expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position)
+ end
+
+ context 'when block_issue_positioning flag is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: true)
+ end
+
+ it 'raises error' do
+ old_position = issue.relative_position
+ opts[:move_between_ids] = [issue1.id, issue2.id]
+
+ expect { update_issue(opts) }.to raise_error(::Gitlab::RelativePositioning::IssuePositioningDisabled)
+ expect(issue.reload.relative_position).to eq(old_position)
+ end
+ end
end
it 'does not rebalance even if needed if the flag is disabled' do
@@ -269,7 +287,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts[:move_between_ids] = [issue_1.id, issue_2.id]
opts[:board_group_id] = group.id
- described_class.new(issue_3.project, user, opts).execute(issue_3)
+ described_class.new(project: issue_3.project, current_user: user, params: opts).execute(issue_3)
expect(issue_2.relative_position).to be_between(issue_1.relative_position, issue_2.relative_position)
end
end
@@ -282,7 +300,12 @@ RSpec.describe Issues::UpdateService, :mailer do
end
it 'filters out params that cannot be set without the :admin_issue permission' do
- described_class.new(project, guest, opts.merge(confidential: true)).execute(issue)
+ described_class.new(
+ project: project, current_user: guest, params: opts.merge(
+ confidential: true,
+ issue_type: 'test_case'
+ )
+ ).execute(issue)
expect(issue).to be_valid
expect(issue.title).to eq 'New title'
@@ -293,6 +316,7 @@ RSpec.describe Issues::UpdateService, :mailer do
expect(issue.due_date).to be_nil
expect(issue.discussion_locked).to be_falsey
expect(issue.confidential).to be_falsey
+ expect(issue.issue_type).to eql('issue')
end
end
@@ -650,7 +674,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts = { label_ids: [label.id] }
perform_enqueued_jobs do
- @issue = described_class.new(project, user, opts).execute(issue)
+ @issue = described_class.new(project: project, current_user: user, params: opts).execute(issue)
end
should_email(subscriber)
@@ -666,7 +690,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts = { label_ids: [label.id, label2.id] }
perform_enqueued_jobs do
- @issue = described_class.new(project, user, opts).execute(issue)
+ @issue = described_class.new(project: project, current_user: user, params: opts).execute(issue)
end
should_not_email(subscriber)
@@ -677,7 +701,7 @@ RSpec.describe Issues::UpdateService, :mailer do
opts = { label_ids: [label2.id] }
perform_enqueued_jobs do
- @issue = described_class.new(project, user, opts).execute(issue)
+ @issue = described_class.new(project: project, current_user: user, params: opts).execute(issue)
end
should_not_email(subscriber)
@@ -709,7 +733,7 @@ RSpec.describe Issues::UpdateService, :mailer do
line_number: 1
}
}
- service = described_class.new(project, user, params)
+ service = described_class.new(project: project, current_user: user, params: params)
expect(Spam::SpamActionService).not_to receive(:new)
@@ -785,7 +809,7 @@ RSpec.describe Issues::UpdateService, :mailer do
context 'updating labels' do
let(:label3) { create(:label, project: project) }
- let(:result) { described_class.new(project, user, params).execute(issue).reload }
+ let(:result) { described_class.new(project: project, current_user: user, params: params).execute(issue).reload }
context 'when add_label_ids and label_ids are passed' do
let(:params) { { label_ids: [label.id], add_label_ids: [label3.id] } }
@@ -983,14 +1007,14 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'raises an error for invalid move ids within a project' do
opts = { move_between_ids: [9000, non_existing_record_id] }
- expect { described_class.new(issue.project, user, opts).execute(issue) }
+ expect { described_class.new(project: issue.project, current_user: user, params: opts).execute(issue) }
.to raise_error(ActiveRecord::RecordNotFound)
end
it 'raises an error for invalid move ids within a group' do
opts = { move_between_ids: [9000, non_existing_record_id], board_group_id: create(:group).id }
- expect { described_class.new(issue.project, user, opts).execute(issue) }
+ expect { described_class.new(project: issue.project, current_user: user, params: opts).execute(issue) }
.to raise_error(ActiveRecord::RecordNotFound)
end
end
@@ -1014,13 +1038,13 @@ RSpec.describe Issues::UpdateService, :mailer do
with_them do
it 'broadcasts to the issues channel based on ActionCable and feature flag values' do
- expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(action_cable_in_app_enabled)
+ allow(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(action_cable_in_app_enabled)
stub_feature_flags(broadcast_issue_updates: feature_flag_enabled)
if should_broadcast
- expect(IssuesChannel).to receive(:broadcast_to).with(issue, event: 'updated')
+ expect(GraphqlTriggers).to receive(:issuable_assignees_updated).with(issue)
else
- expect(IssuesChannel).not_to receive(:broadcast_to)
+ expect(GraphqlTriggers).not_to receive(:issuable_assignees_updated).with(issue)
end
update_issue(update_params)
@@ -1030,7 +1054,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it_behaves_like 'issuable record that supports quick actions' do
let(:existing_issue) { create(:issue, project: project) }
- let(:issuable) { described_class.new(project, user, params).execute(existing_issue) }
+ let(:issuable) { described_class.new(project: project, current_user: user, params: params).execute(existing_issue) }
end
end
end
diff --git a/spec/services/issues/zoom_link_service_spec.rb b/spec/services/issues/zoom_link_service_spec.rb
index 8e8adc516cf..19db892fcae 100644
--- a/spec/services/issues/zoom_link_service_spec.rb
+++ b/spec/services/issues/zoom_link_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Issues::ZoomLinkService do
let_it_be(:issue) { create(:issue) }
let(:project) { issue.project }
- let(:service) { described_class.new(issue, user) }
+ let(:service) { described_class.new(project: project, current_user: user, params: { issue: issue }) }
let(:zoom_link) { 'https://zoom.us/j/123456789' }
before do
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index 9ee0b80edcd..355dbd0c712 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -36,6 +36,15 @@ RSpec.describe Labels::AvailableLabelsService do
expect(result).to include(project_label, group_label)
expect(result).not_to include(other_project_label, other_group_label)
end
+
+ it 'do not cause additional query for finding labels' do
+ label_titles = [project_label.title]
+ control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }
+
+ new_label = create(:label, project: project)
+ label_titles = [project_label.title, new_label.title]
+ expect { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }.not_to exceed_query_limit(control_count)
+ end
end
end
diff --git a/spec/services/labels/find_or_create_service_spec.rb b/spec/services/labels/find_or_create_service_spec.rb
index aa9eb0e6a0d..3ea2727dc60 100644
--- a/spec/services/labels/find_or_create_service_spec.rb
+++ b/spec/services/labels/find_or_create_service_spec.rb
@@ -25,6 +25,35 @@ RSpec.describe Labels::FindOrCreateService do
project.add_developer(user)
end
+ context 'when existing_labels_by_title is provided' do
+ let(:preloaded_label) { build(:label, title: 'Security') }
+
+ before do
+ params.merge!(
+ existing_labels_by_title: {
+ 'Security' => preloaded_label
+ })
+ end
+
+ context 'when label exists' do
+ it 'returns preloaded label' do
+ expect(service.execute).to eq preloaded_label
+ end
+ end
+
+ context 'when label does not exists' do
+ before do
+ params[:title] = 'Audit'
+ end
+
+ it 'does not generates additional label search' do
+ service.execute
+
+ expect(LabelsFinder).not_to receive(:new)
+ end
+ end
+ end
+
context 'when label does not exist at group level' do
it 'creates a new label at project level' do
expect { service.execute }.to change(project.labels, :count).by(1)
diff --git a/spec/services/lfs/push_service_spec.rb b/spec/services/lfs/push_service_spec.rb
index f67284ff48d..58fb2f3fb9b 100644
--- a/spec/services/lfs/push_service_spec.rb
+++ b/spec/services/lfs/push_service_spec.rb
@@ -63,6 +63,7 @@ RSpec.describe Lfs::PushService do
it 'returns a failure when submitting a batch fails' do
expect(lfs_client).to receive(:batch!) { raise 'failed' }
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
expect(service.execute).to eq(status: :error, message: 'failed')
end
@@ -70,6 +71,7 @@ RSpec.describe Lfs::PushService do
stub_lfs_batch(lfs_object)
expect(lfs_client).to receive(:upload!) { raise 'failed' }
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).and_call_original
expect(service.execute).to eq(status: :error, message: 'failed')
end
diff --git a/spec/services/merge_requests/add_context_service_spec.rb b/spec/services/merge_requests/add_context_service_spec.rb
index 27b46a9023c..448be27efe8 100644
--- a/spec/services/merge_requests/add_context_service_spec.rb
+++ b/spec/services/merge_requests/add_context_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe MergeRequests::AddContextService do
let(:commits) { ["874797c3a73b60d2187ed6e2fcabd289ff75171e"] }
let(:raw_repository) { project.repository.raw }
- subject(:service) { described_class.new(project, admin, merge_request: merge_request, commits: commits) }
+ subject(:service) { described_class.new(project: project, current_user: admin, params: { merge_request: merge_request, commits: commits }) }
describe "#execute" do
context "when admin mode is enabled", :enable_admin_mode do
@@ -32,7 +32,7 @@ RSpec.describe MergeRequests::AddContextService do
let(:user) { create(:user) }
let(:merge_request1) { create(:merge_request, source_project: project, author: user) }
- subject(:service) { described_class.new(project, user, merge_request: merge_request, commits: commits) }
+ subject(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, commits: commits }) }
it "doesn't add context commit" do
subject.execute
@@ -42,7 +42,7 @@ RSpec.describe MergeRequests::AddContextService do
end
context "when the commits array is empty" do
- subject(:service) { described_class.new(project, admin, merge_request: merge_request, commits: []) }
+ subject(:service) { described_class.new(project: project, current_user: admin, params: { merge_request: merge_request, commits: [] }) }
it "doesn't add context commit" do
subject.execute
diff --git a/spec/services/merge_requests/add_spent_time_service_spec.rb b/spec/services/merge_requests/add_spent_time_service_spec.rb
new file mode 100644
index 00000000000..db3380e9582
--- /dev/null
+++ b/spec/services/merge_requests/add_spent_time_service_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::AddSpentTimeService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be_with_reload(:merge_request) { create(:merge_request, :simple, :unique_branches, source_project: project) }
+
+ let(:duration) { 1500 }
+ let(:params) { { spend_time: { duration: duration, user_id: user.id } } }
+ let(:service) { described_class.new(project: project, current_user: user, params: params) }
+
+ describe '#execute' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'creates a new timelog with the specified duration' do
+ expect { service.execute(merge_request) }.to change { Timelog.count }.from(0).to(1)
+
+ timelog = merge_request.timelogs.last
+
+ expect(timelog).not_to be_nil
+ expect(timelog.time_spent).to eq(1500)
+ end
+
+ it 'creates a system note with the time added' do
+ expect { service.execute(merge_request) }.to change { Note.count }.from(0).to(1)
+
+ system_note = merge_request.notes.last
+
+ expect(system_note).not_to be_nil
+ expect(system_note.note_html).to include('added 25m of time spent')
+ end
+
+ it 'saves usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_time_spent_changed_action).once.with(user: user)
+
+ service.execute(merge_request)
+ end
+
+ it 'is more efficient than using the full update-service' do
+ other_mr = create(:merge_request, :simple, :unique_branches, source_project: project)
+
+ update_service = ::MergeRequests::UpdateService.new(project: project, current_user: user, params: params)
+ other_mr.reload
+
+ expect { service.execute(merge_request) }
+ .to issue_fewer_queries_than { update_service.execute(other_mr) }
+ end
+
+ context 'when duration is nil' do
+ let(:duration) { nil }
+
+ it 'does not create a timelog with the specified duration' do
+ expect { service.execute(merge_request) }.not_to change { Timelog.count }
+ expect(merge_request).not_to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
index 6edaa91b8b2..8d1abe5ea89 100644
--- a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
+++ b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe ::MergeRequests::AddTodoWhenBuildFailsService do
let(:ref) { merge_request.source_branch }
let(:service) do
- described_class.new(project, user, commit_message: 'Awesome message')
+ described_class.new(project: project, current_user: user, params: { commit_message: 'Awesome message' })
end
let(:todo_service) { spy('todo service') }
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index e1f28e32164..cbbd193a411 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe MergeRequests::AfterCreateService do
let_it_be(:merge_request) { create(:merge_request) }
subject(:after_create_service) do
- described_class.new(merge_request.target_project, merge_request.author)
+ described_class.new(project: merge_request.target_project, current_user: merge_request.author)
end
describe '#execute' do
@@ -191,7 +191,7 @@ RSpec.describe MergeRequests::AfterCreateService do
it 'calls MergeRequests::LinkLfsObjectsService#execute' do
service = instance_spy(MergeRequests::LinkLfsObjectsService)
- allow(MergeRequests::LinkLfsObjectsService).to receive(:new).with(merge_request.target_project).and_return(service)
+ allow(MergeRequests::LinkLfsObjectsService).to receive(:new).with(project: merge_request.target_project).and_return(service)
execute_service
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index df9a98c5540..d30b2721a36 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe MergeRequests::ApprovalService do
let(:project) { merge_request.project }
let!(:todo) { create(:todo, user: user, project: project, target: merge_request) }
- subject(:service) { described_class.new(project, user) }
+ subject(:service) { described_class.new(project: project, current_user: user) }
before do
project.add_developer(user)
diff --git a/spec/services/merge_requests/assign_issues_service_spec.rb b/spec/services/merge_requests/assign_issues_service_spec.rb
index 6398e8c533e..b857f26c052 100644
--- a/spec/services/merge_requests/assign_issues_service_spec.rb
+++ b/spec/services/merge_requests/assign_issues_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe MergeRequests::AssignIssuesService do
let(:project) { create(:project, :public, :repository) }
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, :simple, source_project: project, author: user, description: "fixes #{issue.to_reference}") }
- let(:service) { described_class.new(project, user, merge_request: merge_request) }
+ let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request }) }
before do
project.add_developer(user)
@@ -37,10 +37,12 @@ RSpec.describe MergeRequests::AssignIssuesService do
it 'accepts precomputed data for closes_issues' do
issue2 = create(:issue, project: project)
- service2 = described_class.new(project,
- user,
- merge_request: merge_request,
- closes_issues: [issue, issue2])
+ service2 = described_class.new(project: project,
+ current_user: user,
+ params: {
+ merge_request: merge_request,
+ closes_issues: [issue, issue2]
+ })
expect(service2.assignable_issues.count).to eq 2
end
@@ -52,10 +54,12 @@ RSpec.describe MergeRequests::AssignIssuesService do
it 'ignores external issues' do
external_issue = ExternalIssue.new('JIRA-123', project)
service = described_class.new(
- project,
- user,
- merge_request: merge_request,
- closes_issues: [external_issue]
+ project: project,
+ current_user: user,
+ params: {
+ merge_request: merge_request,
+ closes_issues: [external_issue]
+ }
)
expect(service.assignable_issues.count).to eq 0
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index d8ba2bc43fb..7911392ef19 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe MergeRequests::BaseService do
}
end
- subject { MergeRequests::CreateService.new(project, project.owner, params) }
+ subject { MergeRequests::CreateService.new(project: project, current_user: project.owner, params: params) }
describe '#execute_hooks' do
shared_examples 'enqueues Jira sync worker' do
diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb
index 8adf6d69f73..5a6a9df3f44 100644
--- a/spec/services/merge_requests/build_service_spec.rb
+++ b/spec/services/merge_requests/build_service_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe MergeRequests::BuildService do
end
let(:service) do
- described_class.new(project, user, params)
+ described_class.new(project: project, current_user: user, params: params)
end
before do
@@ -100,7 +100,7 @@ RSpec.describe MergeRequests::BuildService do
context 'with force_remove_source_branch parameter when the user is authorized' do
let(:mr_params) { params.merge(force_remove_source_branch: '1') }
let(:source_project) { fork_project(project, user) }
- let(:merge_request) { described_class.new(project, user, mr_params).execute }
+ let(:merge_request) { described_class.new(project: project, current_user: user, params: mr_params).execute }
before do
project.add_reporter(user)
diff --git a/spec/services/merge_requests/cleanup_refs_service_spec.rb b/spec/services/merge_requests/cleanup_refs_service_spec.rb
index a1822a4d5ba..e8690ae5bf2 100644
--- a/spec/services/merge_requests/cleanup_refs_service_spec.rb
+++ b/spec/services/merge_requests/cleanup_refs_service_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe MergeRequests::CleanupRefsService do
context 'when merge request has merge ref' do
before do
MergeRequests::MergeToRefService
- .new(merge_request.project, merge_request.author)
+ .new(project: merge_request.project, current_user: merge_request.author)
.execute(merge_request)
end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 48f56b3ec68..f6336a85a25 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe MergeRequests::CloseService do
it_behaves_like 'merge request reviewers cache counters invalidator'
context 'valid params' do
- let(:service) { described_class.new(project, user, {}) }
+ let(:service) { described_class.new(project: project, current_user: user) }
before do
allow(service).to receive(:execute_hooks)
@@ -73,7 +73,7 @@ RSpec.describe MergeRequests::CloseService do
expect(metrics_service).to receive(:close)
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
it 'calls the merge request activity counter' do
@@ -81,11 +81,11 @@ RSpec.describe MergeRequests::CloseService do
.to receive(:track_close_mr_action)
.with(user: user)
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
- service = described_class.new(project, user, {})
+ service = described_class.new(project: project, current_user: user)
expect { service.execute(merge_request) }
.to change { project.open_merge_requests_count }.from(1).to(0)
@@ -96,19 +96,19 @@ RSpec.describe MergeRequests::CloseService do
expect(service).to receive(:execute_for_merge_request).with(merge_request)
end
- described_class.new(project, user).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
it 'schedules CleanupRefsService' do
expect(MergeRequests::CleanupRefsService).to receive(:schedule).with(merge_request)
- described_class.new(project, user).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
context 'current user is not authorized to close merge request' do
before do
perform_enqueued_jobs do
- @merge_request = described_class.new(project, guest).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: guest).execute(merge_request)
end
end
diff --git a/spec/services/merge_requests/create_from_issue_service_spec.rb b/spec/services/merge_requests/create_from_issue_service_spec.rb
index 6528edfc8b7..749b30bff5f 100644
--- a/spec/services/merge_requests/create_from_issue_service_spec.rb
+++ b/spec/services/merge_requests/create_from_issue_service_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe MergeRequests::CreateFromIssueService do
let(:milestone_id) { create(:milestone, project: project).id }
let(:issue) { create(:issue, project: project, milestone_id: milestone_id) }
let(:custom_source_branch) { 'custom-source-branch' }
- let(:service) { described_class.new(project, user, service_params) }
- let(:service_with_custom_source_branch) { described_class.new(project, user, branch_name: custom_source_branch, **service_params) }
+ let(:service) { described_class.new(project: project, current_user: user, mr_params: service_params) }
+ let(:service_with_custom_source_branch) { described_class.new(project: project, current_user: user, mr_params: { branch_name: custom_source_branch, **service_params }) }
before do
project.add_developer(user)
@@ -21,14 +21,14 @@ RSpec.describe MergeRequests::CreateFromIssueService do
describe '#execute' do
shared_examples_for 'a service that creates a merge request from an issue' do
it 'returns an error when user can not create merge request on target project' do
- result = described_class.new(project, create(:user), service_params).execute
+ result = described_class.new(project: project, current_user: create(:user), mr_params: service_params).execute
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Not allowed to create merge request')
end
it 'returns an error with invalid issue iid' do
- result = described_class.new(project, user, issue_iid: -1).execute
+ result = described_class.new(project: project, current_user: user, mr_params: { issue_iid: -1 }).execute
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('Invalid issue iid')
@@ -123,7 +123,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
end
context 'when ref branch is set', :sidekiq_might_not_need_inline do
- subject { described_class.new(project, user, ref: 'feature', **service_params).execute }
+ subject { described_class.new(project: project, current_user: user, mr_params: { ref: 'feature', **service_params }).execute }
it 'sets the merge request source branch to the new issue branch' do
expect(subject[:merge_request].source_branch).to eq(issue.to_branch_name)
@@ -134,7 +134,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
end
context 'when the ref is a tag' do
- subject { described_class.new(project, user, ref: 'v1.0.0', **service_params).execute }
+ subject { described_class.new(project: project, current_user: user, mr_params: { ref: 'v1.0.0', **service_params }).execute }
it 'sets the merge request source branch to the new issue branch' do
expect(subject[:merge_request].source_branch).to eq(issue.to_branch_name)
@@ -150,7 +150,7 @@ RSpec.describe MergeRequests::CreateFromIssueService do
end
context 'when ref branch does not exist' do
- subject { described_class.new(project, user, ref: 'no-such-branch', **service_params).execute }
+ subject { described_class.new(project: project, current_user: user, mr_params: { ref: 'no-such-branch', **service_params }).execute }
it 'creates a merge request' do
expect { subject }.to change(target_project.merge_requests, :count).by(1)
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index 3e2e940dc24..a0ac168f3d7 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:service) { described_class.new(project, actor, params) }
+ let(:service) { described_class.new(project: project, current_user: actor, params: params) }
let(:actor) { user }
let(:params) { {} }
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index f2bc55103f0..b2351ab53bd 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
}
end
- let(:service) { described_class.new(project, user, opts) }
+ let(:service) { described_class.new(project: project, current_user: user, params: opts) }
let(:merge_request) { service.execute }
before do
@@ -347,12 +347,12 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
}
end
- let(:issuable) { described_class.new(project, user, params).execute }
+ let(:issuable) { described_class.new(project: project, current_user: user, params: params).execute }
end
context 'Quick actions' do
context 'with assignee and milestone in params and command' do
- let(:merge_request) { described_class.new(project, user, opts).execute }
+ let(:merge_request) { described_class.new(project: project, current_user: user, params: opts).execute }
let(:milestone) { create(:milestone, project: project) }
let(:opts) do
@@ -390,7 +390,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
it 'removes assignee_id when user id is invalid' do
opts = { title: 'Title', description: 'Description', assignee_ids: [-1] }
- merge_request = described_class.new(project, user, opts).execute
+ merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request.assignee_ids).to be_empty
end
@@ -398,7 +398,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
it 'removes assignee_id when user id is 0' do
opts = { title: 'Title', description: 'Description', assignee_ids: [0] }
- merge_request = described_class.new(project, user, opts).execute
+ merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request.assignee_ids).to be_empty
end
@@ -407,7 +407,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
project.add_maintainer(user2)
opts = { title: 'Title', description: 'Description', assignee_ids: [user2.id] }
- merge_request = described_class.new(project, user, opts).execute
+ merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request.assignees).to eq([user2])
end
@@ -426,7 +426,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
it 'invalidates open merge request counter for assignees when merge request is assigned' do
project.add_maintainer(user2)
- described_class.new(project, user, opts).execute
+ described_class.new(project: project, current_user: user, params: opts).execute
expect(user2.assigned_open_merge_requests_count).to eq 1
end
@@ -445,7 +445,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
project.update!(visibility_level: level)
opts = { title: 'Title', description: 'Description', assignee_ids: [user2.id] }
- merge_request = described_class.new(project, user, opts).execute
+ merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request.assignee_id).to be_nil
end
@@ -473,7 +473,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'raises an error' do
- expect { described_class.new(project, user, opts).execute }
+ expect { described_class.new(project: project, current_user: user, params: opts).execute }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
@@ -485,7 +485,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'raises an error' do
- expect { described_class.new(project, user, opts).execute }
+ expect { described_class.new(project: project, current_user: user, params: opts).execute }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
@@ -497,7 +497,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'creates the merge request', :sidekiq_might_not_need_inline do
- merge_request = described_class.new(project, user, opts).execute
+ merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request).to be_persisted
end
@@ -505,7 +505,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
it 'does not create the merge request when the target project is archived' do
target_project.update!(archived: true)
- expect { described_class.new(project, user, opts).execute }
+ expect { described_class.new(project: project, current_user: user, params: opts).execute }
.to raise_error Gitlab::Access::AccessDeniedError
end
end
@@ -529,7 +529,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it 'ignores source_project_id' do
- merge_request = described_class.new(project, user, opts).execute
+ merge_request = described_class.new(project: project, current_user: user, params: opts).execute
expect(merge_request.source_project_id).to eq(project.id)
end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index aec5a3b3fa3..24a1a8b3113 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe MergeRequests::FfMergeService do
describe '#execute' do
context 'valid params' do
- let(:service) { described_class.new(project, user, valid_merge_params) }
+ let(:service) { described_class.new(project: project, current_user: user, params: valid_merge_params) }
def execute_ff_merge
perform_enqueued_jobs do
@@ -92,7 +92,7 @@ RSpec.describe MergeRequests::FfMergeService do
end
context 'error handling' do
- let(:service) { described_class.new(project, user, valid_merge_params.merge(commit_message: 'Awesome message')) }
+ let(:service) { described_class.new(project: project, current_user: user, params: valid_merge_params.merge(commit_message: 'Awesome message')) }
before do
allow(Gitlab::AppLogger).to receive(:error)
diff --git a/spec/services/merge_requests/get_urls_service_spec.rb b/spec/services/merge_requests/get_urls_service_spec.rb
index 053752626dc..5f81e1728fa 100644
--- a/spec/services/merge_requests/get_urls_service_spec.rb
+++ b/spec/services/merge_requests/get_urls_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe MergeRequests::GetUrlsService do
include ProjectForksHelper
let(:project) { create(:project, :public, :repository) }
- let(:service) { described_class.new(project) }
+ let(:service) { described_class.new(project: project) }
let(:source_branch) { "merge-test" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/merge_requests/#{merge_request.iid}" }
@@ -106,7 +106,7 @@ RSpec.describe MergeRequests::GetUrlsService do
let!(:merge_request) { create(:merge_request, source_project: forked_project, target_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
# Source project is now the forked one
- let(:service) { described_class.new(forked_project) }
+ let(:service) { described_class.new(project: forked_project) }
before do
allow(forked_project).to receive(:empty_repo?).and_return(false)
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index cc595aab04b..0bf18f16abb 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
let_it_be(:old_assignees) { create_list(:user, 3) }
let(:options) { {} }
- let(:service) { described_class.new(project, user) }
+ let(:service) { described_class.new(project: project, current_user: user) }
before_all do
project.add_maintainer(user)
@@ -38,18 +38,6 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
async_execute
end
-
- context 'when async_handle_merge_request_assignees_change feature is disabled' do
- before do
- stub_feature_flags(async_handle_merge_request_assignees_change: false)
- end
-
- it 'calls #execute' do
- expect(service).to receive(:execute).with(merge_request, old_assignees, options)
-
- async_execute
- end
- end
end
describe '#execute' do
diff --git a/spec/services/merge_requests/link_lfs_objects_service_spec.rb b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
index c1765e3a2ab..2fb6bbaf02f 100644
--- a/spec/services/merge_requests/link_lfs_objects_service_spec.rb
+++ b/spec/services/merge_requests/link_lfs_objects_service_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe MergeRequests::LinkLfsObjectsService, :sidekiq_inline do
)
end
- subject { described_class.new(target_project) }
+ subject { described_class.new(project: target_project) }
shared_examples_for 'linking LFS objects' do
context 'when source project is the same as target project' do
diff --git a/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb b/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
index 1075f6f9034..4d7bd3d8800 100644
--- a/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
+++ b/spec/services/merge_requests/mark_reviewer_reviewed_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe MergeRequests::MarkReviewerReviewedService do
let(:merge_request) { create(:merge_request, reviewers: [current_user]) }
let(:reviewer) { merge_request.merge_request_reviewers.find_by(user_id: current_user.id) }
let(:project) { merge_request.project }
- let(:service) { described_class.new(project, current_user) }
+ let(:service) { described_class.new(project: project, current_user: current_user) }
let(:result) { service.execute(merge_request) }
before do
@@ -16,7 +16,7 @@ RSpec.describe MergeRequests::MarkReviewerReviewedService do
describe '#execute' do
describe 'invalid permissions' do
- let(:service) { described_class.new(project, create(:user)) }
+ let(:service) { described_class.new(project: project, current_user: create(:user)) }
it 'returns an error' do
expect(result[:status]).to eq :error
@@ -24,7 +24,7 @@ RSpec.describe MergeRequests::MarkReviewerReviewedService do
end
describe 'reviewer does not exist' do
- let(:service) { described_class.new(project, create(:user)) }
+ let(:service) { described_class.new(project: project, current_user: create(:user)) }
it 'returns an error' do
expect(result[:status]).to eq :error
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index c73cbad9d2f..ac39fb59c62 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe MergeRequests::MergeService do
+ include ExclusiveLeaseHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
@@ -15,11 +17,14 @@ RSpec.describe MergeRequests::MergeService do
end
describe '#execute' do
- let(:service) { described_class.new(project, user, merge_params) }
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params) }
let(:merge_params) do
{ commit_message: 'Awesome message', sha: merge_request.diff_head_sha }
end
+ let(:lease_key) { "merge_requests_merge_service:#{merge_request.id}" }
+ let!(:lease) { stub_exclusive_lease(lease_key) }
+
context 'valid params' do
before do
allow(service).to receive(:execute_hooks)
@@ -90,6 +95,20 @@ RSpec.describe MergeRequests::MergeService do
end
end
+ context 'running the service multiple time' do
+ it 'is idempotent' do
+ 2.times { service.execute(merge_request) }
+
+ expect(merge_request.merge_error).to be_falsey
+ expect(merge_request).to be_valid
+ expect(merge_request).to be_merged
+
+ commit_messages = project.repository.commits('master', limit: 2).map(&:message)
+ expect(commit_messages.uniq.size).to eq(2)
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
+ end
+
context 'when an invalid sha is passed' do
let(:merge_request) do
create(:merge_request, :simple,
@@ -209,7 +228,7 @@ RSpec.describe MergeRequests::MergeService do
context 'source branch removal' do
context 'when the source branch is protected' do
let(:service) do
- described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
before do
@@ -225,7 +244,7 @@ RSpec.describe MergeRequests::MergeService do
context 'when the source branch is the default branch' do
let(:service) do
- described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
before do
@@ -251,7 +270,7 @@ RSpec.describe MergeRequests::MergeService do
end
context 'when the merger set the source branch not to be removed' do
- let(:service) { described_class.new(project, user, merge_params.merge('should_remove_source_branch' => false)) }
+ let(:service) { described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => false)) }
it 'does not delete the source branch' do
expect(::MergeRequests::DeleteSourceBranchWorker).not_to receive(:perform_async)
@@ -263,7 +282,7 @@ RSpec.describe MergeRequests::MergeService do
context 'when MR merger set the source branch to be removed' do
let(:service) do
- described_class.new(project, user, merge_params.merge('should_remove_source_branch' => true))
+ described_class.new(project: project, current_user: user, params: merge_params.merge('should_remove_source_branch' => true))
end
it 'removes the source branch using the current user' do
@@ -306,10 +325,12 @@ RSpec.describe MergeRequests::MergeService do
end
it 'logs and saves error if user is not authorized' do
+ stub_exclusive_lease
+
unauthorized_user = create(:user)
project.add_reporter(unauthorized_user)
- service = described_class.new(project, unauthorized_user)
+ service = described_class.new(project: project, current_user: unauthorized_user)
service.execute(merge_request)
@@ -423,6 +444,7 @@ RSpec.describe MergeRequests::MergeService do
merge_request.project.update!(merge_method: merge_method)
error_message = 'Only fast-forward merge is allowed for your project. Please update your source branch'
allow(service).to receive(:execute_hooks)
+ expect(lease).to receive(:cancel)
service.execute(merge_request)
@@ -473,5 +495,17 @@ RSpec.describe MergeRequests::MergeService do
end
end
end
+
+ context 'when the other sidekiq worker has already been running' do
+ before do
+ stub_exclusive_lease_taken(lease_key)
+ end
+
+ it 'does not execute service' do
+ expect(service).not_to receive(:commit)
+
+ service.execute(merge_request)
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 938165a807c..bb764ff5672 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe MergeRequests::MergeToRefService do
describe '#execute' do
let(:service) do
- described_class.new(project, user, **params)
+ described_class.new(project: project, current_user: user, params: params)
end
let(:params) { { commit_message: 'Awesome message', should_remove_source_branch: true, sha: merge_request.diff_head_sha } }
@@ -94,7 +94,7 @@ RSpec.describe MergeRequests::MergeToRefService do
it 'returns an error when Gitlab::Git::CommandError is raised during merge' do
allow(project.repository).to receive(:merge_to_ref) do
- raise Gitlab::Git::CommandError.new('Failed to create merge commit')
+ raise Gitlab::Git::CommandError, 'Failed to create merge commit'
end
result = service.execute(merge_request)
@@ -111,11 +111,11 @@ RSpec.describe MergeRequests::MergeToRefService do
end
let(:merge_ref_service) do
- described_class.new(project, user, {})
+ described_class.new(project: project, current_user: user)
end
let(:merge_service) do
- MergeRequests::MergeService.new(project, user, { sha: merge_request.diff_head_sha })
+ MergeRequests::MergeService.new(project: project, current_user: user, params: { sha: merge_request.diff_head_sha })
end
context 'when merge commit' do
diff --git a/spec/services/merge_requests/mergeability_check_service_spec.rb b/spec/services/merge_requests/mergeability_check_service_spec.rb
index e0baf5af8b4..65599b7e046 100644
--- a/spec/services/merge_requests/mergeability_check_service_spec.rb
+++ b/spec/services/merge_requests/mergeability_check_service_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
described_class.new(merge_request).async_execute
end
- context 'when read only DB' do
+ context 'when read-only DB' do
before do
allow(Gitlab::Database).to receive(:read_only?) { true }
end
@@ -232,7 +232,7 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
context 'when MR cannot be merged and has outdated merge ref' do
before do
- MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+ MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author).execute(merge_request)
merge_request.mark_as_unmergeable!
end
@@ -258,7 +258,7 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
end
end
- context 'when read only DB' do
+ context 'when read-only DB' do
it 'returns ServiceResponse.error' do
allow(Gitlab::Database).to receive(:read_only?) { true }
@@ -332,7 +332,7 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
context 'when MR is mergeable but merge-ref is already updated' do
before do
- MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
+ MergeRequests::MergeToRefService.new(project: project, current_user: merge_request.author).execute(merge_request)
merge_request.mark_as_mergeable!
end
@@ -361,7 +361,7 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
context 'merge with conflicts' do
it 'calls MergeToRefService with true allow_conflicts param' do
expect(MergeRequests::MergeToRefService).to receive(:new)
- .with(project, merge_request.author, { allow_conflicts: true }).and_call_original
+ .with(project: project, current_user: merge_request.author, params: { allow_conflicts: true }).and_call_original
subject
end
@@ -373,7 +373,7 @@ RSpec.describe MergeRequests::MergeabilityCheckService, :clean_gitlab_redis_shar
it 'calls MergeToRefService with false allow_conflicts param' do
expect(MergeRequests::MergeToRefService).to receive(:new)
- .with(project, merge_request.author, { allow_conflicts: false }).and_call_original
+ .with(project: project, current_user: merge_request.author, params: { allow_conflicts: false }).and_call_original
subject
end
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index 247b053e729..14804aa33d4 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe MergeRequests::PostMergeService do
let_it_be(:merge_request, reload: true) { create(:merge_request, assignees: [user]) }
let_it_be(:project) { merge_request.project }
- subject { described_class.new(project, user).execute(merge_request) }
+ subject { described_class.new(project: project, current_user: user).execute(merge_request) }
before do
project.add_maintainer(user)
@@ -22,7 +22,6 @@ RSpec.describe MergeRequests::PostMergeService do
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
# Cache the counter before the MR changed state.
project.open_merge_requests_count
- merge_request.update!(state: 'merged')
expect { subject }.to change { project.open_merge_requests_count }.from(1).to(0)
end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index b5086ea3a82..87c3fc6a2d8 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let_it_be(:user3) { create(:user, developer_projects: [project]) }
let_it_be(:forked_project) { fork_project(project, user1, repository: true) }
- let(:service) { described_class.new(project, user1, changes, push_options) }
+ let(:service) { described_class.new(project: project, current_user: user1, changes: changes, push_options: push_options) }
let(:source_branch) { 'fix' }
let(:target_branch) { 'feature' }
let(:title) { 'my title' }
diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb
index cd6af4c275e..59424263ec5 100644
--- a/spec/services/merge_requests/pushed_branches_service_spec.rb
+++ b/spec/services/merge_requests/pushed_branches_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::PushedBranchesService do
let(:project) { create(:project) }
- let!(:service) { described_class.new(project, nil, changes: pushed_branches) }
+ let!(:service) { described_class.new(project: project, current_user: nil, params: { changes: pushed_branches }) }
context 'when branches pushed' do
let(:pushed_branches) do
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 653fcf12a76..a46f3cf6148 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe MergeRequests::RebaseService do
let(:repository) { project.repository.raw }
let(:skip_ci) { false }
- subject(:service) { described_class.new(project, user, {}) }
+ subject(:service) { described_class.new(project: project, current_user: user) }
before do
project.add_maintainer(user)
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index f9b76db877b..6e6b4a91e0d 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe MergeRequests::RefreshService do
end
context 'push to origin repo source branch' do
- let(:refresh_service) { service.new(@project, @user) }
+ let(:refresh_service) { service.new(project: @project, current_user: @user) }
let(:notification_service) { spy('notification_service') }
before do
@@ -187,7 +187,7 @@ RSpec.describe MergeRequests::RefreshService do
context 'when pipeline exists for the source branch' do
let!(:pipeline) { create(:ci_empty_pipeline, ref: @merge_request.source_branch, project: @project, sha: @commits.first.sha)}
- subject { service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master') }
+ subject { service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/master') }
it 'updates the head_pipeline_id for @merge_request', :sidekiq_might_not_need_inline do
expect { subject }.to change { @merge_request.reload.head_pipeline_id }.from(nil).to(pipeline.id)
@@ -198,12 +198,12 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- shared_examples 'Pipelines for merge requests' do
+ context 'Pipelines for merge requests', :sidekiq_inline do
before do
stub_ci_pipeline_yaml_file(config)
end
- subject { service.new(project, @user).execute(@oldrev, @newrev, ref) }
+ subject { service.new(project: project, current_user: @user).execute(@oldrev, @newrev, ref) }
let(:ref) { 'refs/heads/master' }
let(:project) { @project }
@@ -291,11 +291,11 @@ RSpec.describe MergeRequests::RefreshService do
context "when MergeRequestUpdateWorker is retried by an exception" do
it 'does not re-create a duplicate detached merge request pipeline' do
expect do
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master')
+ service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/master')
end.to change { @merge_request.pipelines_for_merge_request.count }.by(1)
expect do
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/master')
+ service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/master')
end.not_to change { @merge_request.pipelines_for_merge_request.count }
end
end
@@ -364,20 +364,8 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- context 'when the code_review_async_pipeline_creation feature flag is on', :sidekiq_inline do
- it_behaves_like 'Pipelines for merge requests'
- end
-
- context 'when the code_review_async_pipeline_creation feature flag is off', :sidekiq_inline do
- before do
- stub_feature_flags(code_review_async_pipeline_creation: false)
- end
-
- it_behaves_like 'Pipelines for merge requests'
- end
-
context 'push to origin repo source branch' do
- let(:refresh_service) { service.new(@project, @user) }
+ let(:refresh_service) { service.new(project: @project, current_user: @user) }
let(:notification_service) { spy('notification_service') }
before do
@@ -409,7 +397,7 @@ RSpec.describe MergeRequests::RefreshService do
context 'push to origin repo target branch', :sidekiq_might_not_need_inline do
context 'when all MRs to the target branch had diffs' do
before do
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
reload_mrs
end
@@ -438,7 +426,7 @@ RSpec.describe MergeRequests::RefreshService do
# feature all along.
empty_fork_merge_request.update_columns(target_branch: 'feature')
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
reload_mrs
empty_fork_merge_request.reload
end
@@ -461,7 +449,7 @@ RSpec.describe MergeRequests::RefreshService do
# Merge master -> feature branch
@project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
commit = @project.repository.commit('feature')
- service.new(@project, @user).execute(@oldrev, commit.id, 'refs/heads/feature')
+ service.new(project: @project, current_user: @user).execute(@oldrev, commit.id, 'refs/heads/feature')
reload_mrs
end
@@ -479,7 +467,7 @@ RSpec.describe MergeRequests::RefreshService do
end
context 'push to fork repo source branch', :sidekiq_might_not_need_inline do
- let(:refresh_service) { service.new(@fork_project, @user) }
+ let(:refresh_service) { service.new(project: @fork_project, current_user: @user) }
def refresh
allow(refresh_service).to receive(:execute_hooks)
@@ -546,7 +534,7 @@ RSpec.describe MergeRequests::RefreshService do
context 'push to fork repo target branch', :sidekiq_might_not_need_inline do
describe 'changes to merge requests' do
before do
- service.new(@fork_project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ service.new(project: @fork_project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
reload_mrs
end
@@ -563,7 +551,7 @@ RSpec.describe MergeRequests::RefreshService do
describe 'merge request diff' do
it 'does not reload the diff of the merge request made from fork' do
expect do
- service.new(@fork_project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ service.new(project: @fork_project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
end.not_to change { @fork_merge_request.reload.merge_request_diff }
end
end
@@ -594,28 +582,28 @@ RSpec.describe MergeRequests::RefreshService do
it 'reloads a new diff for a push to the forked project' do
expect do
- service.new(@fork_project, @user).execute(@oldrev, first_commit, 'refs/heads/master')
+ service.new(project: @fork_project, current_user: @user).execute(@oldrev, first_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'reloads a new diff for a force push to the source branch' do
expect do
- service.new(@fork_project, @user).execute(@oldrev, force_push_commit, 'refs/heads/master')
+ service.new(project: @fork_project, current_user: @user).execute(@oldrev, force_push_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'reloads a new diff for a force push to the target branch' do
expect do
- service.new(@project, @user).execute(@oldrev, force_push_commit, 'refs/heads/master')
+ service.new(project: @project, current_user: @user).execute(@oldrev, force_push_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
it 'reloads a new diff for a push to the target project that contains a commit in the MR' do
expect do
- service.new(@project, @user).execute(@oldrev, first_commit, 'refs/heads/master')
+ service.new(project: @project, current_user: @user).execute(@oldrev, first_commit, 'refs/heads/master')
reload_mrs
end.to change { forked_master_mr.merge_request_diffs.count }.by(1)
end
@@ -626,7 +614,7 @@ RSpec.describe MergeRequests::RefreshService do
branch_name: 'master')
expect do
- service.new(@project, @user).execute(@newrev, new_commit, 'refs/heads/master')
+ service.new(project: @project, current_user: @user).execute(@newrev, new_commit, 'refs/heads/master')
reload_mrs
end.not_to change { forked_master_mr.merge_request_diffs.count }
end
@@ -635,7 +623,7 @@ RSpec.describe MergeRequests::RefreshService do
context 'push to origin repo target branch after fork project was removed' do
before do
@fork_project.destroy!
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/feature')
reload_mrs
end
@@ -651,7 +639,7 @@ RSpec.describe MergeRequests::RefreshService do
end
context 'push new branch that exists in a merge request' do
- let(:refresh_service) { service.new(@fork_project, @user) }
+ let(:refresh_service) { service.new(project: @fork_project, current_user: @user) }
it 'refreshes the merge request', :sidekiq_might_not_need_inline do
expect(refresh_service).to receive(:execute_hooks)
@@ -700,7 +688,7 @@ RSpec.describe MergeRequests::RefreshService do
source_branch: 'close-by-commit',
source_project: project)
- refresh_service = service.new(project, user)
+ refresh_service = service.new(project: project, current_user: user)
allow(refresh_service).to receive(:execute_hooks)
refresh_service.execute(@oldrev, @newrev, 'refs/heads/close-by-commit')
@@ -723,7 +711,7 @@ RSpec.describe MergeRequests::RefreshService do
source_branch: 'close-by-commit',
source_project: forked_project)
- refresh_service = service.new(forked_project, user)
+ refresh_service = service.new(project: forked_project, current_user: user)
allow(refresh_service).to receive(:execute_hooks)
refresh_service.execute(@oldrev, @newrev, 'refs/heads/close-by-commit')
@@ -734,7 +722,7 @@ RSpec.describe MergeRequests::RefreshService do
end
context 'marking the merge request as draft' do
- let(:refresh_service) { service.new(@project, @user) }
+ let(:refresh_service) { service.new(project: @project, current_user: @user) }
before do
allow(refresh_service).to receive(:execute_hooks)
@@ -814,7 +802,7 @@ RSpec.describe MergeRequests::RefreshService do
end
describe 'updating merge_commit' do
- let(:service) { described_class.new(project, user) }
+ let(:service) { described_class.new(project: project, current_user: user) }
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -902,7 +890,7 @@ RSpec.describe MergeRequests::RefreshService do
end
let(:auto_merge_strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
- let(:refresh_service) { service.new(project, user) }
+ let(:refresh_service) { service.new(project: project, current_user: user) }
before do
target_project.merge_method = merge_method
diff --git a/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb b/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
index 3152a4e3861..b333d4af6cf 100644
--- a/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
+++ b/spec/services/merge_requests/reload_merge_head_diff_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe MergeRequests::ReloadMergeHeadDiffService do
describe '#execute' do
before do
MergeRequests::MergeToRefService
- .new(merge_request.project, merge_request.author)
+ .new(project: merge_request.project, current_user: merge_request.author)
.execute(merge_request)
end
diff --git a/spec/services/merge_requests/remove_approval_service_spec.rb b/spec/services/merge_requests/remove_approval_service_spec.rb
index 4ef2da290e1..ef6a0ec69bd 100644
--- a/spec/services/merge_requests/remove_approval_service_spec.rb
+++ b/spec/services/merge_requests/remove_approval_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe MergeRequests::RemoveApprovalService do
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:existing_approval) { create(:approval, merge_request: merge_request) }
- subject(:service) { described_class.new(project, user) }
+ subject(:service) { described_class.new(project: project, current_user: user) }
def execute!
service.execute(merge_request)
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index 8541d597581..b9df31b6727 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe MergeRequests::ReopenService do
it_behaves_like 'merge request reviewers cache counters invalidator'
context 'valid params' do
- let(:service) { described_class.new(project, user, {}) }
+ let(:service) { described_class.new(project: project, current_user: user) }
before do
allow(service).to receive(:execute_hooks)
@@ -65,7 +65,7 @@ RSpec.describe MergeRequests::ReopenService do
it 'caches merge request closing issues' do
expect(merge_request).to receive(:cache_merge_request_closes_issues!)
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
it 'updates metrics' do
@@ -78,7 +78,7 @@ RSpec.describe MergeRequests::ReopenService do
expect(service).to receive(:reopen)
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
it 'calls the merge request activity counter' do
@@ -86,11 +86,11 @@ RSpec.describe MergeRequests::ReopenService do
.to receive(:track_reopen_mr_action)
.with(user: user)
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
it 'refreshes the number of open merge requests for a valid MR' do
- service = described_class.new(project, user, {})
+ service = described_class.new(project: project, current_user: user)
expect { service.execute(merge_request) }
.to change { project.open_merge_requests_count }.from(0).to(1)
@@ -99,7 +99,7 @@ RSpec.describe MergeRequests::ReopenService do
context 'current user is not authorized to reopen merge request' do
before do
perform_enqueued_jobs do
- @merge_request = described_class.new(project, guest).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: guest).execute(merge_request)
end
end
diff --git a/spec/services/merge_requests/request_review_service_spec.rb b/spec/services/merge_requests/request_review_service_spec.rb
index 5cb4120852a..8bc31df605c 100644
--- a/spec/services/merge_requests/request_review_service_spec.rb
+++ b/spec/services/merge_requests/request_review_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe MergeRequests::RequestReviewService do
let(:merge_request) { create(:merge_request, reviewers: [user]) }
let(:reviewer) { merge_request.find_reviewer(user) }
let(:project) { merge_request.project }
- let(:service) { described_class.new(project, current_user) }
+ let(:service) { described_class.new(project: project, current_user: current_user) }
let(:result) { service.execute(merge_request, user) }
let(:todo_service) { spy('todo service') }
let(:notification_service) { spy('notification service') }
@@ -26,7 +26,7 @@ RSpec.describe MergeRequests::RequestReviewService do
describe '#execute' do
describe 'invalid permissions' do
- let(:service) { described_class.new(project, create(:user)) }
+ let(:service) { described_class.new(project: project, current_user: create(:user)) }
it 'returns an error' do
expect(result[:status]).to eq :error
diff --git a/spec/services/merge_requests/resolve_todos_service_spec.rb b/spec/services/merge_requests/resolve_todos_service_spec.rb
index 3e6f2ea3f5d..53bd259f0f4 100644
--- a/spec/services/merge_requests/resolve_todos_service_spec.rb
+++ b/spec/services/merge_requests/resolve_todos_service_spec.rb
@@ -23,18 +23,6 @@ RSpec.describe MergeRequests::ResolveTodosService do
async_execute
end
-
- context 'when resolve_merge_request_todos_async feature is disabled' do
- before do
- stub_feature_flags(resolve_merge_request_todos_async: false)
- end
-
- it 'calls #execute' do
- expect(service).to receive(:execute)
-
- async_execute
- end
- end
end
describe '#execute' do
diff --git a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
index 874cf66659a..74f3a1b06fc 100644
--- a/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
+++ b/spec/services/merge_requests/resolved_discussion_notification_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe MergeRequests::ResolvedDiscussionNotificationService do
let(:user) { create(:user) }
let(:project) { merge_request.project }
- subject { described_class.new(project, user) }
+ subject { described_class.new(project: project, current_user: user) }
describe "#execute" do
context "when not all discussions are resolved" do
diff --git a/spec/services/merge_requests/retarget_chain_service_spec.rb b/spec/services/merge_requests/retarget_chain_service_spec.rb
index 3937fbe58c3..87bde4a1400 100644
--- a/spec/services/merge_requests/retarget_chain_service_spec.rb
+++ b/spec/services/merge_requests/retarget_chain_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe MergeRequests::RetargetChainService do
let_it_be(:merge_request, reload: true) { create(:merge_request, assignees: [user]) }
let_it_be(:project) { merge_request.project }
- subject { described_class.new(project, user).execute(merge_request) }
+ subject { described_class.new(project: project, current_user: user).execute(merge_request) }
before do
project.add_maintainer(user)
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index acbd0a42fcd..149748cdabc 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::SquashService do
include GitHelpers
- let(:service) { described_class.new(project, user, { merge_request: merge_request }) }
+ let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request }) }
let(:user) { project.owner }
let(:project) { create(:project, :repository) }
let(:repository) { project.repository.raw }
@@ -62,7 +62,7 @@ RSpec.describe MergeRequests::SquashService do
end
it 'will still perform the squash when a custom squash commit message has been provided' do
- service = described_class.new(project, user, { merge_request: merge_request, squash_commit_message: 'A custom commit message' })
+ service = described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: 'A custom commit message' })
expect(merge_request.target_project.repository).to receive(:squash).and_return('sha')
@@ -98,7 +98,7 @@ RSpec.describe MergeRequests::SquashService do
end
context 'if a message was provided' do
- let(:service) { described_class.new(project, user, { merge_request: merge_request, squash_commit_message: message }) }
+ let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: message }) }
let(:message) { 'My custom message' }
let(:squash_sha) { service.execute[:squash_sha] }
diff --git a/spec/services/merge_requests/update_assignees_service_spec.rb b/spec/services/merge_requests/update_assignees_service_spec.rb
index de03aab5418..076161c9029 100644
--- a/spec/services/merge_requests/update_assignees_service_spec.rb
+++ b/spec/services/merge_requests/update_assignees_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
project.add_developer(user3)
end
- let(:service) { described_class.new(project, user, opts) }
+ let(:service) { described_class.new(project: project, current_user: user, params: opts) }
let(:opts) { { assignee_ids: [user2.id] } }
describe 'execute' do
@@ -36,8 +36,24 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
end
context 'when the parameters are valid' do
+ context 'when using sentinel values' do
+ let(:opts) { { assignee_ids: [0] } }
+
+ it 'removes all assignees' do
+ expect { update_merge_request }.to change(merge_request, :assignees).to([])
+ end
+ end
+
+ context 'the assignee_ids parameter is the empty list' do
+ let(:opts) { { assignee_ids: [] } }
+
+ it 'removes all assignees' do
+ expect { update_merge_request }.to change(merge_request, :assignees).to([])
+ end
+ end
+
it 'updates the MR, and queues the more expensive work for later' do
- expect_next(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect_next(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
expect(service)
.to receive(:async_execute)
.with(merge_request, [user3], execute_hooks: true)
@@ -56,7 +72,7 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
end
it 'is more efficient than using the full update-service' do
- allow_next(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ allow_next(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
expect(service)
.to receive(:async_execute)
.with(merge_request, [user3], execute_hooks: true)
@@ -69,7 +85,7 @@ RSpec.describe MergeRequests::UpdateAssigneesService do
source_project: merge_request.project,
author: merge_request.author)
- update_service = ::MergeRequests::UpdateService.new(project, user, opts)
+ update_service = ::MergeRequests::UpdateService.new(project: project, current_user: user, params: opts)
expect { service.execute(merge_request) }
.to issue_fewer_queries_than { update_service.execute(other_mr) }
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 8c010855eb2..a85fbd77d70 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
def update_merge_request(opts)
- @merge_request = MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ @merge_request = MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
@merge_request.reload
end
@@ -64,7 +64,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
}
end
- let(:service) { described_class.new(project, current_user, opts) }
+ let(:service) { described_class.new(project: project, current_user: current_user, params: opts) }
let(:current_user) { user }
before do
@@ -99,7 +99,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_description_edit_action).once.with(user: user)
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request2)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request2)
end
it 'tracks Draft/WIP marking' do
@@ -108,7 +108,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:title] = "WIP: #{opts[:title]}"
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request2)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request2)
end
it 'tracks Draft/WIP un-marking' do
@@ -117,7 +117,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:title] = "Non-draft/wip title string"
- MergeRequests::UpdateService.new(project, user, opts).execute(draft_merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(draft_merge_request)
end
context 'when MR is locked' do
@@ -128,7 +128,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:discussion_locked] = true
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
end
@@ -139,7 +139,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:discussion_locked] = false
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
end
end
@@ -154,7 +154,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:discussion_locked] = false
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
end
@@ -165,7 +165,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:discussion_locked] = true
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
end
end
@@ -184,7 +184,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
spent_at: Date.parse('2021-02-24')
}
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
it 'tracks milestone change' do
@@ -193,7 +193,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:milestone] = milestone
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
it 'track labels change' do
@@ -202,7 +202,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:label_ids] = [label2.id]
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
context 'reviewers' do
@@ -213,7 +213,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:reviewers] = [user2]
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
end
@@ -224,7 +224,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts[:reviewers] = merge_request.reviewers
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
+ MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request)
end
end
end
@@ -439,7 +439,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
let(:milestone) { create(:milestone, project: project) }
let(:req_opts) { { source_branch: 'feature', target_branch: 'master' } }
- subject { MergeRequests::UpdateService.new(project, user, opts).execute(merge_request) }
+ subject { MergeRequests::UpdateService.new(project: project, current_user: user, params: opts).execute(merge_request) }
context 'when mentionable attributes change' do
let(:opts) { { description: "Description with #{user.to_reference}" }.merge(req_opts) }
@@ -486,7 +486,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
}
end
- let(:service) { described_class.new(project, user, opts) }
+ let(:service) { described_class.new(project: project, current_user: user, params: opts) }
context 'without pipeline' do
before do
@@ -547,7 +547,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
context 'with a non-authorised user' do
let(:visitor) { create(:user) }
- let(:service) { described_class.new(project, visitor, opts) }
+ let(:service) { described_class.new(project: project, current_user: visitor, params: opts) }
before do
merge_request.update_attribute(:merge_error, 'Error')
@@ -805,7 +805,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts = { title: 'New title' }
perform_enqueued_jobs do
- @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
should_email(subscriber)
@@ -818,7 +818,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts = { title: 'Draft: New title' }
perform_enqueued_jobs do
- @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
should_not_email(subscriber)
@@ -840,7 +840,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts = { label_ids: [label.id] }
perform_enqueued_jobs do
- @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
should_email(subscriber)
@@ -856,7 +856,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts = { label_ids: [label.id, label2.id] }
perform_enqueued_jobs do
- @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
should_not_email(subscriber)
@@ -867,7 +867,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
opts = { label_ids: [label2.id] }
perform_enqueued_jobs do
- @merge_request = described_class.new(project, user, opts).execute(merge_request)
+ @merge_request = described_class.new(project: project, current_user: user, params: opts).execute(merge_request)
end
should_not_email(subscriber)
@@ -933,7 +933,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
it 'creates a `MergeRequestsClosingIssues` record for each issue' do
issue_closing_opts = { description: "Closes #{first_issue.to_reference} and #{second_issue.to_reference}" }
- service = described_class.new(project, user, issue_closing_opts)
+ service = described_class.new(project: project, current_user: user, params: issue_closing_opts)
allow(service).to receive(:execute_hooks)
service.execute(merge_request)
@@ -945,7 +945,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
create(:merge_requests_closing_issues, issue: first_issue, merge_request: merge_request)
create(:merge_requests_closing_issues, issue: second_issue, merge_request: merge_request)
- service = described_class.new(project, user, description: "not closing any issues")
+ service = described_class.new(project: project, current_user: user, params: { description: "not closing any issues" })
allow(service).to receive(:execute_hooks)
service.execute(merge_request.reload)
@@ -1002,7 +1002,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
it 'unassigns assignee when user id is 0' do
merge_request.update!(assignee_ids: [user.id])
- expect_next_instance_of(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect_next_instance_of(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
expect(service)
.to receive(:async_execute)
.with(merge_request, [user])
@@ -1014,7 +1014,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
it 'saves assignee when user id is valid' do
- expect_next_instance_of(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect_next_instance_of(MergeRequests::HandleAssigneesChangeService, project: project, current_user: user) do |service|
expect(service)
.to receive(:async_execute)
.with(merge_request, [user3])
@@ -1052,6 +1052,35 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
+ context 'when adding time spent' do
+ let(:spend_time) { { duration: 1800, user_id: user3.id } }
+
+ context ':use_specialized_service' do
+ context 'when true' do
+ it 'passes the update action to ::MergeRequests::AddSpentTimeService' do
+ expect(::MergeRequests::AddSpentTimeService)
+ .to receive(:new).and_call_original
+
+ update_merge_request(spend_time: spend_time, use_specialized_service: true)
+ end
+ end
+
+ context 'when false or nil' do
+ before do
+ expect(::MergeRequests::AddSpentTimeService).not_to receive(:new)
+ end
+
+ it 'does not pass the update action to ::MergeRequests::UpdateAssigneesService when false' do
+ update_merge_request(spend_time: spend_time, use_specialized_service: false)
+ end
+
+ it 'does not pass the update action to ::MergeRequests::UpdateAssigneesService when nil' do
+ update_merge_request(spend_time: spend_time, use_specialized_service: nil)
+ end
+ end
+ end
+ end
+
include_examples 'issuable update service' do
let(:open_issuable) { merge_request }
let(:closed_issuable) { create(:closed_merge_request, source_project: project) }
@@ -1145,7 +1174,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
it_behaves_like 'issuable record that supports quick actions' do
let(:existing_merge_request) { create(:merge_request, source_project: project) }
- let(:issuable) { described_class.new(project, user, params).execute(existing_merge_request) }
+ let(:issuable) { described_class.new(project: project, current_user: user, params: params).execute(existing_merge_request) }
end
end
end
diff --git a/spec/services/namespaces/package_settings/update_service_spec.rb b/spec/services/namespaces/package_settings/update_service_spec.rb
index fa0c58e4c9b..030bc03038e 100644
--- a/spec/services/namespaces/package_settings/update_service_spec.rb
+++ b/spec/services/namespaces/package_settings/update_service_spec.rb
@@ -32,7 +32,9 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService do
end
shared_examples 'updating the namespace package setting' do
- it_behaves_like 'updating the namespace package setting attributes', from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT' }, to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' }
+ it_behaves_like 'updating the namespace package setting attributes',
+ from: { maven_duplicates_allowed: true, maven_duplicate_exception_regex: 'SNAPSHOT', generic_duplicates_allowed: true, generic_duplicate_exception_regex: 'foo' },
+ to: { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE', generic_duplicates_allowed: false, generic_duplicate_exception_regex: 'bar' }
it_behaves_like 'returning a success'
@@ -60,7 +62,12 @@ RSpec.describe ::Namespaces::PackageSettings::UpdateService do
context 'with existing namespace package setting' do
let_it_be(:package_settings) { create(:namespace_package_setting, namespace: namespace) }
- let_it_be(:params) { { maven_duplicates_allowed: false, maven_duplicate_exception_regex: 'RELEASE' } }
+ let_it_be(:params) do
+ { maven_duplicates_allowed: false,
+ maven_duplicate_exception_regex: 'RELEASE',
+ generic_duplicates_allowed: false,
+ generic_duplicate_exception_regex: 'bar' }
+ end
where(:user_role, :shared_examples_name) do
:maintainer | 'updating the namespace package setting'
diff --git a/spec/services/notes/build_service_spec.rb b/spec/services/notes/build_service_spec.rb
index deeab66c4e9..b7b08390dcd 100644
--- a/spec/services/notes/build_service_spec.rb
+++ b/spec/services/notes/build_service_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe Notes::BuildService do
let(:user) { create(:user) }
it 'returns `Discussion to reply to cannot be found` error' do
- expect(new_note.errors.first).to include("Discussion to reply to cannot be found")
+ expect(new_note.errors.added?(:base, "Discussion to reply to cannot be found")).to be true
end
end
end
diff --git a/spec/services/notes/copy_service_spec.rb b/spec/services/notes/copy_service_spec.rb
index fd44aa7cf40..d9b6bafd7ff 100644
--- a/spec/services/notes/copy_service_spec.rb
+++ b/spec/services/notes/copy_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Notes::CopyService do
let_it_be(:noteable) { create(:issue) }
it 'validates that we cannot copy notes to the same Noteable' do
- expect { described_class.new(noteable, noteable) }.to raise_error(ArgumentError)
+ expect { described_class.new(nil, noteable, noteable) }.to raise_error(ArgumentError)
end
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index d28cb118529..31263feb947 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -176,7 +176,7 @@ RSpec.describe Notes::CreateService do
end
it 'note is associated with a note diff file' do
- MergeRequests::MergeToRefService.new(merge_request.project, merge_request.author).execute(merge_request)
+ MergeRequests::MergeToRefService.new(project: merge_request.project, current_user: merge_request.author).execute(merge_request)
note = described_class.new(project_with_repo, user, new_opts).execute
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index c098500b78a..9692bb08379 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -103,6 +103,30 @@ RSpec.describe Notes::QuickActionsService do
expect(Timelog.last.note_id).to eq(note.id)
end
end
+
+ context 'adds a system note' do
+ context 'when not specifying a date' do
+ let(:note_text) { "/spend 1h" }
+
+ it 'does not include the date' do
+ _, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
+
+ expect(Note.last.note).to eq('added 1h of time spent')
+ end
+ end
+
+ context 'when specifying a date' do
+ let(:note_text) { "/spend 1h 2020-01-01" }
+
+ it 'does include the date' do
+ _, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
+
+ expect(Note.last.note).to eq('added 1h of time spent at 2020-01-01')
+ end
+ end
+ end
end
end
@@ -214,25 +238,25 @@ RSpec.describe Notes::QuickActionsService do
end
end
- describe '.noteable_update_service' do
+ describe '.noteable_update_service_class' do
include_context 'note on noteable'
it 'returns Issues::UpdateService for a note on an issue' do
note = create(:note_on_issue, project: project)
- expect(described_class.noteable_update_service(note)).to eq(Issues::UpdateService)
+ expect(described_class.noteable_update_service_class(note)).to eq(Issues::UpdateService)
end
it 'returns MergeRequests::UpdateService for a note on a merge request' do
note = create(:note_on_merge_request, project: project)
- expect(described_class.noteable_update_service(note)).to eq(MergeRequests::UpdateService)
+ expect(described_class.noteable_update_service_class(note)).to eq(MergeRequests::UpdateService)
end
it 'returns Commits::TagService for a note on a commit' do
note = create(:note_on_commit, project: project)
- expect(described_class.noteable_update_service(note)).to eq(Commits::TagService)
+ expect(described_class.noteable_update_service_class(note)).to eq(Commits::TagService)
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 6eff768eac2..c3a0766cb17 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -412,7 +412,7 @@ RSpec.describe NotificationService, :mailer do
it_should_not_email!
end
- context 'do exist' do
+ context 'do exist and note not confidential' do
let!(:issue_email_participant) { issue.issue_email_participants.create!(email: 'service.desk@example.com') }
before do
@@ -422,6 +422,18 @@ RSpec.describe NotificationService, :mailer do
it_should_email!
end
+
+ context 'do exist and note is confidential' do
+ let(:note) { create(:note, noteable: issue, project: project, confidential: true) }
+ let!(:issue_email_participant) { issue.issue_email_participants.create!(email: 'service.desk@example.com') }
+
+ before do
+ issue.update!(external_author: 'service.desk@example.com')
+ project.update!(service_desk_enabled: true)
+ end
+
+ it_should_not_email!
+ end
end
describe '#new_note' do
diff --git a/spec/services/packages/debian/generate_distribution_key_service_spec.rb b/spec/services/packages/debian/generate_distribution_key_service_spec.rb
new file mode 100644
index 00000000000..b31830c2d3b
--- /dev/null
+++ b/spec/services/packages/debian/generate_distribution_key_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::GenerateDistributionKeyService do
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { {} }
+
+ subject { described_class.new(current_user: user, params: params) }
+
+ let(:response) { subject.execute }
+
+ context 'with a user' do
+ it 'returns an Hash', :aggregate_failures do
+ expect(GPGME::Ctx).to receive(:new).with(armor: true, offline: true).and_call_original
+ expect(User).to receive(:random_password).with(no_args).and_call_original
+
+ expect(response).to be_a Hash
+ expect(response.keys).to contain_exactly(:private_key, :public_key, :fingerprint, :passphrase)
+ expect(response[:private_key]).to start_with('-----BEGIN PGP PRIVATE KEY BLOCK-----')
+ expect(response[:public_key]).to start_with('-----BEGIN PGP PUBLIC KEY BLOCK-----')
+ expect(response[:fingerprint].length).to eq(40)
+ expect(response[:passphrase].length).to be > 10
+ end
+ end
+
+ context 'without a user' do
+ let(:user) { nil }
+
+ it 'raises an ArgumentError' do
+ expect { response }.to raise_error(ArgumentError, 'Please provide a user')
+ end
+ end
+end
diff --git a/spec/services/packages/debian/generate_distribution_service_spec.rb b/spec/services/packages/debian/generate_distribution_service_spec.rb
new file mode 100644
index 00000000000..0547d18c8bc
--- /dev/null
+++ b/spec/services/packages/debian/generate_distribution_service_spec.rb
@@ -0,0 +1,182 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::GenerateDistributionService do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+ let_it_be(:project_distribution) { create("debian_project_distribution", container: project, codename: 'unstable', valid_time_duration_seconds: 48.hours.to_i) }
+
+ let_it_be(:incoming) { create(:debian_incoming, project: project) }
+
+ before_all do
+ ::Packages::Debian::ProcessChangesService.new(incoming.package_files.last, nil).execute
+ end
+
+ let(:service) { described_class.new(distribution) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ shared_examples 'Generate Distribution' do |container_type|
+ context "for #{container_type}" do
+ if container_type == :group
+ let_it_be(:container) { group }
+ let_it_be(:distribution, reload: true) { create('debian_group_distribution', container: group, codename: 'unstable', valid_time_duration_seconds: 48.hours.to_i) }
+ else
+ let_it_be(:container) { project }
+ let_it_be(:distribution, reload: true) { project_distribution }
+ end
+
+ context 'with components and architectures' do
+ let_it_be(:component_main ) { create("debian_#{container_type}_component", distribution: distribution, name: 'main') }
+ let_it_be(:component_contrib) { create("debian_#{container_type}_component", distribution: distribution, name: 'contrib') }
+
+ let_it_be(:architecture_all ) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'all') }
+ let_it_be(:architecture_amd64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'amd64') }
+ let_it_be(:architecture_arm64) { create("debian_#{container_type}_architecture", distribution: distribution, name: 'arm64') }
+
+ let_it_be(:component_file1) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_all, created_at: '2020-01-24T09:00:00.000Z') } # destroyed
+ let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component_main, architecture: architecture_amd64, created_at: '2020-01-24T10:29:59.000Z') } # destroyed
+ let_it_be(:component_file3) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_all, created_at: '2020-01-24T10:30:00.000Z') } # kept
+ let_it_be(:component_file4) { create("debian_#{container_type}_component_file", component: component_contrib, architecture: architecture_amd64, created_at: '2020-01-24T11:30:00.000Z') } # kept
+
+ def check_component_file(component_name, component_file_type, architecture_name, expected_content)
+ component_file = distribution
+ .component_files
+ .with_component_name(component_name)
+ .with_file_type(component_file_type)
+ .with_architecture_name(architecture_name)
+ .last
+
+ expect(component_file).not_to be_nil
+ expect(component_file.file.exists?).to eq(!expected_content.nil?)
+
+ unless expected_content.nil?
+ component_file.file.use_file do |file_path|
+ expect(File.read(file_path)).to eq(expected_content)
+ end
+ end
+ end
+
+ it 'updates distribution and component files', :aggregate_failures do
+ travel_to(Time.utc(2020, 01, 25, 15, 17, 18, 123456)) do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and change { distribution.component_files.count }.from(4).to(2 + 6)
+
+ expected_main_amd64_content = <<~EOF
+ Package: libsample0
+ Source: sample
+ Version: 1.2.3~alpha2
+ Installed-Size: 7
+ Maintainer: John Doe <john.doe@example.com>
+ Architecture: amd64
+ Description: Some mostly empty lib
+ Used in GitLab tests.
+ .
+ Testing another paragraph.
+ Multi-Arch: same
+ Homepage: https://gitlab.com/
+ Section: libs
+ Priority: optional
+ Filename: pool/unstable/#{project.id}/s/sample/libsample0_1.2.3~alpha2_amd64.deb
+ Size: 409600
+ MD5sum: fb0842b21adc44207996296fe14439dd
+ SHA256: 1c383a525bfcba619c7305ccd106d61db501a6bbaf0003bf8d0c429fbdb7fcc1
+
+ Package: sample-dev
+ Source: sample (1.2.3~alpha2)
+ Version: 1.2.3~binary
+ Installed-Size: 7
+ Maintainer: John Doe <john.doe@example.com>
+ Architecture: amd64
+ Depends: libsample0 (= 1.2.3~binary)
+ Description: Some mostly empty developpement files
+ Used in GitLab tests.
+ .
+ Testing another paragraph.
+ Multi-Arch: same
+ Homepage: https://gitlab.com/
+ Section: libdevel
+ Priority: optional
+ Filename: pool/unstable/#{project.id}/s/sample/sample-dev_1.2.3~binary_amd64.deb
+ Size: 409600
+ MD5sum: d2afbd28e4d74430d22f9504e18bfdf5
+ SHA256: 9fbeee2191ce4dab5288fad5ecac1bd369f58fef9a992a880eadf0caf25f086d
+ EOF
+
+ check_component_file('main', :packages, 'all', nil)
+ check_component_file('main', :packages, 'amd64', expected_main_amd64_content)
+ check_component_file('main', :packages, 'arm64', nil)
+
+ check_component_file('contrib', :packages, 'all', nil)
+ check_component_file('contrib', :packages, 'amd64', nil)
+ check_component_file('contrib', :packages, 'arm64', nil)
+
+ size = expected_main_amd64_content.length
+ md5sum = Digest::MD5.hexdigest(expected_main_amd64_content)
+ sha256 = Digest::SHA256.hexdigest(expected_main_amd64_content)
+
+ expected_release_content = <<~EOF
+ Codename: unstable
+ Date: Sat, 25 Jan 2020 15:17:18 +0000
+ Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
+ Architectures: all amd64 arm64
+ Components: contrib main
+ MD5Sum:
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-all/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-amd64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 contrib/binary-arm64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 main/binary-all/Packages
+ #{md5sum} #{size} main/binary-amd64/Packages
+ d41d8cd98f00b204e9800998ecf8427e 0 main/binary-arm64/Packages
+ SHA256:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-all/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-amd64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 contrib/binary-arm64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-all/Packages
+ #{sha256} #{size} main/binary-amd64/Packages
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 main/binary-arm64/Packages
+ EOF
+
+ distribution.file.use_file do |file_path|
+ expect(File.read(file_path)).to eq(expected_release_content)
+ end
+ end
+ end
+ end
+
+ context 'without components and architectures' do
+ it 'updates distribution and component files', :aggregate_failures do
+ travel_to(Time.utc(2020, 01, 25, 15, 17, 18, 123456)) do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and not_change { distribution.component_files.count }
+
+ expected_release_content = <<~EOF
+ Codename: unstable
+ Date: Sat, 25 Jan 2020 15:17:18 +0000
+ Valid-Until: Mon, 27 Jan 2020 15:17:18 +0000
+ MD5Sum:
+ SHA256:
+ EOF
+
+ distribution.file.use_file do |file_path|
+ expect(File.read(file_path)).to eq(expected_release_content)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ it_behaves_like 'Generate Distribution', :project
+ it_behaves_like 'Generate Distribution', :group
+ end
+end
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
index 98b531bde10..f23471659bc 100644
--- a/spec/services/packages/debian/process_changes_service_spec.rb
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Packages::Debian::ProcessChangesService do
.to change { Packages::Package.count }.from(1).to(2)
.and not_change { Packages::PackageFile.count }
.and change { incoming.package_files.count }.from(7).to(0)
+ .and change { package_file.debian_file_metadatum&.reload&.file_type }.from('unknown').to('changes')
created_package = Packages::Package.last
expect(created_package.name).to eq 'sample'
diff --git a/spec/services/packages/generic/create_package_file_service_spec.rb b/spec/services/packages/generic/create_package_file_service_spec.rb
index 10c54369f26..1c9eb53cfc7 100644
--- a/spec/services/packages/generic/create_package_file_service_spec.rb
+++ b/spec/services/packages/generic/create_package_file_service_spec.rb
@@ -6,13 +6,16 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
+ let_it_be(:file_name) { 'myfile.tar.gz.1' }
+
let(:build) { double('build', pipeline: pipeline) }
describe '#execute' do
+ let_it_be(:package) { create(:generic_package, project: project) }
+
let(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
let(:temp_file) { Tempfile.new("test") }
let(:file) { UploadedFile.new(temp_file.path, sha256: sha256) }
- let(:package) { create(:generic_package, project: project) }
let(:package_service) { double }
let(:params) do
@@ -20,7 +23,7 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
package_name: 'mypackage',
package_version: '0.0.1',
file: file,
- file_name: 'myfile.tar.gz.1',
+ file_name: file_name,
build: build
}
end
@@ -34,7 +37,7 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
}
end
- subject { described_class.new(project, user, params).execute }
+ subject(:execute_service) { described_class.new(project, user, params).execute }
before do
FileUtils.touch(temp_file)
@@ -47,14 +50,14 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
end
it 'creates package file', :aggregate_failures do
- expect { subject }.to change { package.package_files.count }.by(1)
+ expect { execute_service }.to change { package.package_files.count }.by(1)
.and change { Packages::PackageFileBuildInfo.count }.by(1)
package_file = package.package_files.last
aggregate_failures do
expect(package_file.package.status).to eq('default')
expect(package_file.package).to eq(package)
- expect(package_file.file_name).to eq('myfile.tar.gz.1')
+ expect(package_file.file_name).to eq(file_name)
expect(package_file.size).to eq(file.size)
expect(package_file.file_sha256).to eq(sha256)
end
@@ -65,7 +68,7 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
let(:package_params) { super().merge(status: 'hidden') }
it 'updates an existing packages status' do
- expect { subject }.to change { package.package_files.count }.by(1)
+ expect { execute_service }.to change { package.package_files.count }.by(1)
.and change { Packages::PackageFileBuildInfo.count }.by(1)
package_file = package.package_files.last
@@ -76,5 +79,32 @@ RSpec.describe Packages::Generic::CreatePackageFileService do
end
it_behaves_like 'assigns build to package file'
+
+ context 'with existing package' do
+ before do
+ create(:package_file, package: package, file_name: file_name)
+ end
+
+ it { expect { execute_service }.to change { project.package_files.count }.by(1) }
+
+ context 'when duplicates are not allowed' do
+ before do
+ package.project.namespace.package_settings.update!(generic_duplicates_allowed: false)
+ end
+
+ it 'does not allow duplicates' do
+ expect { execute_service }.to raise_error(::Packages::DuplicatePackageError)
+ .and change { project.package_files.count }.by(0)
+ end
+
+ context 'when the package name matches the exception regex' do
+ before do
+ package.project.namespace.package_settings.update!(generic_duplicate_exception_regex: '.*')
+ end
+
+ it { expect { execute_service }.to change { project.package_files.count }.by(1) }
+ end
+ end
+ end
end
end
diff --git a/spec/services/packages/maven/find_or_create_package_service_spec.rb b/spec/services/packages/maven/find_or_create_package_service_spec.rb
index 2543ab0c669..803371af4bf 100644
--- a/spec/services/packages/maven/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/maven/find_or_create_package_service_spec.rb
@@ -130,7 +130,15 @@ RSpec.describe Packages::Maven::FindOrCreatePackageService do
context 'when the package name matches the exception regex' do
before do
- package_settings.update!(maven_duplicate_exception_regex: '.*')
+ package_settings.update!(maven_duplicate_exception_regex: existing_package.name)
+ end
+
+ it_behaves_like 'reuse existing package'
+ end
+
+ context 'when the package version matches the exception regex' do
+ before do
+ package_settings.update!(maven_duplicate_exception_regex: existing_package.version)
end
it_behaves_like 'reuse existing package'
diff --git a/spec/services/packages/nuget/search_service_spec.rb b/spec/services/packages/nuget/search_service_spec.rb
index db758dc6672..1838065c5be 100644
--- a/spec/services/packages/nuget/search_service_spec.rb
+++ b/spec/services/packages/nuget/search_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Packages::Nuget::SearchService do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project) { create(:project, namespace: subgroup) }
- let_it_be(:package_a) { create(:nuget_package, project: project, name: 'DummyPackageA') }
+ let_it_be_with_refind(:package_a) { create(:nuget_package, project: project, name: 'DummyPackageA') }
let_it_be(:packages_b) { create_list(:nuget_package, 5, project: project, name: 'DummyPackageB') }
let_it_be(:packages_c) { create_list(:nuget_package, 5, project: project, name: 'DummyPackageC') }
let_it_be(:package_d) { create(:nuget_package, project: project, name: 'FooBarD') }
@@ -79,6 +79,16 @@ RSpec.describe Packages::Nuget::SearchService do
it { expect_search_results 4, package_a, packages_b, packages_c, package_d }
end
+ context 'with non-displayable packages' do
+ let(:search_term) { '' }
+
+ before do
+ package_a.update_column(:status, 1)
+ end
+
+ it { expect_search_results 3, packages_b, packages_c, package_d }
+ end
+
context 'with prefix search term' do
let(:search_term) { 'dummy' }
diff --git a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
index 206bffe53f8..78abfc96ed5 100644
--- a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
+++ b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Packages::Rubygems::DependencyResolverService do
]
}]
- expect(subject.payload).to eq(expected_result)
+ expect(subject.payload).to match_array(expected_result)
end
end
end
diff --git a/spec/services/packages/rubygems/process_gem_service_spec.rb b/spec/services/packages/rubygems/process_gem_service_spec.rb
index 83e868d9579..64deb39c6d8 100644
--- a/spec/services/packages/rubygems/process_gem_service_spec.rb
+++ b/spec/services/packages/rubygems/process_gem_service_spec.rb
@@ -16,12 +16,11 @@ RSpec.describe Packages::Rubygems::ProcessGemService do
describe '#execute' do
subject { service.execute }
- context 'no gem file', :aggregate_failures do
+ context 'no gem file' do
let(:package_file) { nil }
it 'returns an error' do
- expect(subject.error?).to be(true)
- expect(subject.message).to eq('Gem was not processed')
+ expect { subject }.to raise_error(::Packages::Rubygems::ProcessGemService::ExtractionError, 'Gem was not processed - package_file is not set')
end
end
diff --git a/spec/services/packages/terraform_module/create_package_service_spec.rb b/spec/services/packages/terraform_module/create_package_service_spec.rb
new file mode 100644
index 00000000000..f911bb5b82c
--- /dev/null
+++ b/spec/services/packages/terraform_module/create_package_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::TerraformModule::CreatePackageService do
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:project) { create(:project, namespace: namespace) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ let_it_be(:temp_file) { Tempfile.new('test') }
+ let_it_be(:file) { UploadedFile.new(temp_file.path, sha256: sha256) }
+
+ let(:overrides) { {} }
+
+ let(:params) do
+ {
+ module_name: 'foo',
+ module_system: 'bar',
+ module_version: '1.0.1',
+ file: file,
+ file_name: 'foo-bar-1.0.1.tgz'
+ }.merge(overrides)
+ end
+
+ subject { described_class.new(project, user, params).execute }
+
+ describe '#execute' do
+ context 'valid package' do
+ it 'creates a package' do
+ expect { subject }
+ .to change { ::Packages::Package.count }.by(1)
+ .and change { ::Packages::Package.terraform_module.count }.by(1)
+ end
+ end
+
+ context 'package already exists elsewhere' do
+ let(:project2) { create(:project, namespace: namespace) }
+ let!(:existing_package) { create(:terraform_module_package, project: project2, name: 'foo/bar', version: '1.0.0') }
+
+ it { expect(subject[:http_status]).to eq 403 }
+ it { expect(subject[:message]).to be 'Package already exists.' }
+ end
+
+ context 'version already exists' do
+ let!(:existing_version) { create(:terraform_module_package, project: project, name: 'foo/bar', version: '1.0.1') }
+
+ it { expect(subject[:http_status]).to eq 403 }
+ it { expect(subject[:message]).to be 'Package version already exists.' }
+ end
+
+ context 'with empty version' do
+ let(:overrides) { { module_version: '' } }
+
+ it { expect(subject[:http_status]).to eq 400 }
+ it { expect(subject[:message]).to eq 'Version is empty.' }
+ end
+ end
+end
diff --git a/spec/services/post_receive_service_spec.rb b/spec/services/post_receive_service_spec.rb
index 033194972c7..2a78dc454c7 100644
--- a/spec/services/post_receive_service_spec.rb
+++ b/spec/services/post_receive_service_spec.rb
@@ -264,7 +264,7 @@ RSpec.describe PostReceiveService do
context "project path matches" do
before do
- allow(project).to receive(:full_path).and_return("/company/sekrit-project")
+ allow(project).to receive(:full_path).and_return("company/sekrit-project")
end
it "does output the latest scoped broadcast message" do
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index c272ce13132..feae8f3967c 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -3,77 +3,49 @@
require 'spec_helper'
RSpec.describe Projects::Alerting::NotifyService do
- let_it_be_with_reload(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project) }
+
+ let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
+ let(:payload_raw) { {} }
+
+ let(:service) { described_class.new(project, payload) }
before do
- allow(ProjectServiceWorker).to receive(:perform_async)
+ stub_licensed_features(oncall_schedules: false, generic_alert_fingerprinting: false)
end
describe '#execute' do
- let(:token) { 'invalid-token' }
- let(:starts_at) { Time.current.change(usec: 0) }
- let(:fingerprint) { 'testing' }
- let(:service) { described_class.new(project, payload) }
- let_it_be(:environment) { create(:environment, project: project) }
- let(:environment) { create(:environment, project: project) }
- let(:ended_at) { nil }
- let(:payload_raw) do
- {
- title: 'alert title',
- start_time: starts_at.rfc3339,
- end_time: ended_at&.rfc3339,
- severity: 'low',
- monitoring_tool: 'GitLab RSpec',
- service: 'GitLab Test Suite',
- description: 'Very detailed description',
- hosts: ['1.1.1.1', '2.2.2.2'],
- fingerprint: fingerprint,
- gitlab_environment_name: environment.name
- }.with_indifferent_access
- end
+ include_context 'incident management settings enabled'
- let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
+ subject { service.execute(token, integration) }
- subject { service.execute(token, nil) }
+ context 'with HTTP integration' do
+ let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
- shared_examples 'notifications are handled correctly' do
context 'with valid token' do
let(:token) { integration.token }
- let(:incident_management_setting) { double(send_email?: email_enabled, create_issue?: issue_enabled, auto_close_incident?: auto_close_enabled) }
- let(:email_enabled) { false }
- let(:issue_enabled) { false }
- let(:auto_close_enabled) { false }
-
- before do
- allow(service)
- .to receive(:incident_management_setting)
- .and_return(incident_management_setting)
- end
context 'with valid payload' do
- shared_examples 'assigns the alert properties' do
- it 'ensure that created alert has all data properly assigned' do
- subject
- expect(last_alert_attributes).to match(
- project_id: project.id,
- title: payload_raw.fetch(:title),
- started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
- severity: payload_raw.fetch(:severity),
- status: AlertManagement::Alert.status_value(:triggered),
- events: 1,
- domain: 'operations',
- hosts: payload_raw.fetch(:hosts),
- payload: payload_raw.with_indifferent_access,
- issue_id: nil,
- description: payload_raw.fetch(:description),
- monitoring_tool: payload_raw.fetch(:monitoring_tool),
- service: payload_raw.fetch(:service),
- fingerprint: Digest::SHA1.hexdigest(fingerprint),
- environment_id: environment.id,
- ended_at: nil,
- prometheus_alert_id: nil
- )
- end
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:fingerprint) { 'testing' }
+ let_it_be(:source) { 'GitLab RSpec' }
+ let_it_be(:starts_at) { Time.current.change(usec: 0) }
+
+ let(:ended_at) { nil }
+ let(:domain) { 'operations' }
+ let(:payload_raw) do
+ {
+ title: 'alert title',
+ start_time: starts_at.rfc3339,
+ end_time: ended_at&.rfc3339,
+ severity: 'low',
+ monitoring_tool: source,
+ service: 'GitLab Test Suite',
+ description: 'Very detailed description',
+ hosts: ['1.1.1.1', '2.2.2.2'],
+ fingerprint: fingerprint,
+ gitlab_environment_name: environment.name
+ }.with_indifferent_access
end
let(:last_alert_attributes) do
@@ -82,8 +54,8 @@ RSpec.describe Projects::Alerting::NotifyService do
.with_indifferent_access
end
- it_behaves_like 'creates an alert management alert'
- it_behaves_like 'assigns the alert properties'
+ it_behaves_like 'processes new firing alert'
+ it_behaves_like 'properly assigns the alert properties'
it 'passes the integration to alert processing' do
expect(Gitlab::AlertManagement::Payload)
@@ -94,101 +66,18 @@ RSpec.describe Projects::Alerting::NotifyService do
subject
end
- it 'creates a system note corresponding to alert creation' do
- expect { subject }.to change(Note, :count).by(1)
- expect(Note.last.note).to include(payload_raw.fetch(:monitoring_tool))
- end
-
- context 'existing alert with same fingerprint' do
- let(:fingerprint_sha) { Digest::SHA1.hexdigest(fingerprint) }
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint_sha) }
-
- it_behaves_like 'adds an alert management alert event'
-
- context 'end time given' do
- let(:ended_at) { Time.current.change(nsec: 0) }
-
- it 'does not resolve the alert' do
- expect { subject }.not_to change { alert.reload.status }
- end
-
- it 'does not set the ended at' do
- subject
-
- expect(alert.reload.ended_at).to be_nil
- end
-
- it_behaves_like 'does not an create alert management alert'
- it_behaves_like 'creates single system note based on the source of the alert'
-
- context 'auto_close_enabled setting enabled' do
- let(:auto_close_enabled) { true }
-
- it 'resolves the alert and sets the end time', :aggregate_failures do
- subject
- alert.reload
-
- expect(alert.resolved?).to eq(true)
- expect(alert.ended_at).to eql(ended_at)
- end
-
- it_behaves_like 'creates status-change system note for an auto-resolved alert'
-
- context 'related issue exists' do
- let(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint_sha) }
- let(:issue) { alert.issue }
-
- it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
- it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
- end
-
- context 'with issue enabled' do
- let(:issue_enabled) { true }
-
- it_behaves_like 'does not process incident issues'
- end
- end
- end
-
- context 'existing alert is resolved' do
- let!(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint_sha) }
-
- it_behaves_like 'creates an alert management alert'
- it_behaves_like 'assigns the alert properties'
- end
-
- context 'existing alert is ignored' do
- let!(:alert) { create(:alert_management_alert, :ignored, project: project, fingerprint: fingerprint_sha) }
-
- it_behaves_like 'adds an alert management alert event'
- end
-
- context 'two existing alerts, one resolved one open' do
- let!(:resolved_existing_alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: fingerprint_sha) }
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint_sha) }
-
- it_behaves_like 'adds an alert management alert event'
- end
- end
-
- context 'end time given' do
- let(:ended_at) { Time.current }
-
- it_behaves_like 'creates an alert management alert'
- it_behaves_like 'assigns the alert properties'
- end
-
- context 'with a minimal payload' do
- let(:payload_raw) do
+ context 'with partial payload' do
+ let_it_be(:source) { integration.name }
+ let_it_be(:payload_raw) do
{
title: 'alert title',
start_time: starts_at.rfc3339
}
end
- it_behaves_like 'creates an alert management alert'
+ include_examples 'processes never-before-seen alert'
- it 'created alert has all data properly assigned' do
+ it 'assigns the alert properties' do
subject
expect(last_alert_attributes).to match(
@@ -212,7 +101,19 @@ RSpec.describe Projects::Alerting::NotifyService do
)
end
- it_behaves_like 'creates single system note based on the source of the alert'
+ context 'with existing alert with matching payload' do
+ let_it_be(:fingerprint) { payload_raw.except(:start_time).stringify_keys }
+ let_it_be(:gitlab_fingerprint) { Gitlab::AlertManagement::Fingerprint.generate(fingerprint) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, fingerprint: gitlab_fingerprint) }
+
+ include_examples 'processes never-before-seen alert'
+ end
+ end
+
+ context 'with resolving payload' do
+ let(:ended_at) { Time.current.change(usec: 0) }
+
+ it_behaves_like 'processes recovery alert'
end
end
@@ -223,63 +124,30 @@ RSpec.describe Projects::Alerting::NotifyService do
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
end
- it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
- it_behaves_like 'does not an create alert management alert'
+ it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
end
- it_behaves_like 'does not process incident issues'
-
- context 'issue enabled' do
- let(:issue_enabled) { true }
-
- it_behaves_like 'processes incident issues'
-
- context 'when alert already exists' do
- let(:fingerprint_sha) { Digest::SHA1.hexdigest(fingerprint) }
- let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint_sha) }
-
- context 'when existing alert does not have an associated issue' do
- it_behaves_like 'processes incident issues'
- end
-
- context 'when existing alert has an associated issue' do
- let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint_sha) }
-
- it_behaves_like 'does not process incident issues'
- end
+ context 'with inactive integration' do
+ before do
+ integration.update!(active: false)
end
- end
- context 'with emails turned on' do
- let(:email_enabled) { true }
-
- it_behaves_like 'Alert Notification Service sends notification email'
+ it_behaves_like 'alerts service responds with an error and takes no actions', :forbidden
end
end
context 'with invalid token' do
- it_behaves_like 'does not process incident issues due to error', http_status: :unauthorized
- it_behaves_like 'does not an create alert management alert'
- end
- end
-
- context 'with an HTTP Integration' do
- let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
+ let(:token) { 'invalid-token' }
- subject { service.execute(token, integration) }
-
- it_behaves_like 'notifications are handled correctly' do
- let(:source) { integration.name }
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
end
+ end
- context 'with deactivated HTTP Integration' do
- before do
- integration.update!(active: false)
- end
+ context 'without HTTP integration' do
+ let(:integration) { nil }
+ let(:token) { nil }
- it_behaves_like 'does not process incident issues due to error', http_status: :forbidden
- it_behaves_like 'does not an create alert management alert'
- end
+ it_behaves_like 'alerts service responds with an error and takes no actions', :forbidden
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e0d6b9afcff..cd659bf5e60 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -273,16 +273,6 @@ RSpec.describe Projects::CreateService, '#execute' do
opts[:default_branch] = 'master'
expect(create_project(user, opts)).to eq(nil)
end
-
- it 'sets invalid service as inactive' do
- create(:service, type: 'JiraService', project: nil, template: true, active: true)
-
- project = create_project(user, opts)
- service = project.services.first
-
- expect(project).to be_persisted
- expect(service.active).to be false
- end
end
context 'wiki_enabled creates repository directory' do
@@ -574,18 +564,18 @@ RSpec.describe Projects::CreateService, '#execute' do
let!(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
it 'creates a service from the template' do
- expect(project.services.count).to eq(1)
- expect(project.services.first.api_url).to eq(template_integration.api_url)
- expect(project.services.first.inherit_from_id).to be_nil
+ expect(project.integrations.count).to eq(1)
+ expect(project.integrations.first.api_url).to eq(template_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to be_nil
end
context 'with an active instance-level integration' do
let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
it 'creates a service from the instance-level integration' do
- expect(project.services.count).to eq(1)
- expect(project.services.first.api_url).to eq(instance_integration.api_url)
- expect(project.services.first.inherit_from_id).to eq(instance_integration.id)
+ expect(project.integrations.count).to eq(1)
+ expect(project.integrations.first.api_url).to eq(instance_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to eq(instance_integration.id)
end
context 'with an active group-level integration' do
@@ -604,9 +594,9 @@ RSpec.describe Projects::CreateService, '#execute' do
end
it 'creates a service from the group-level integration' do
- expect(project.services.count).to eq(1)
- expect(project.services.first.api_url).to eq(group_integration.api_url)
- expect(project.services.first.inherit_from_id).to eq(group_integration.id)
+ expect(project.integrations.count).to eq(1)
+ expect(project.integrations.first.api_url).to eq(group_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to eq(group_integration.id)
end
context 'with an active subgroup' do
@@ -625,25 +615,14 @@ RSpec.describe Projects::CreateService, '#execute' do
end
it 'creates a service from the subgroup-level integration' do
- expect(project.services.count).to eq(1)
- expect(project.services.first.api_url).to eq(subgroup_integration.api_url)
- expect(project.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ expect(project.integrations.count).to eq(1)
+ expect(project.integrations.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.integrations.first.inherit_from_id).to eq(subgroup_integration.id)
end
end
end
end
end
-
- context 'when there is an invalid integration' do
- before do
- create(:service, :template, type: 'DroneCiService', active: true)
- end
-
- it 'creates an inactive service' do
- expect(project).to be_persisted
- expect(project.services.first.active).to be false
- end
- end
end
context 'when skip_disk_validation is used' do
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index b2a68bbd0aa..ff582279d71 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -418,6 +418,54 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
end
end
+ context 'when project has webhooks' do
+ let!(:web_hook1) { create(:project_hook, project: project) }
+ let!(:web_hook2) { create(:project_hook, project: project) }
+ let!(:another_project_web_hook) { create(:project_hook) }
+ let!(:web_hook_log) { create(:web_hook_log, web_hook: web_hook1) }
+
+ it 'deletes webhooks and logs related to project' do
+ expect_next_instance_of(WebHooks::DestroyService, user) do |instance|
+ expect(instance).to receive(:sync_destroy).with(web_hook1).and_call_original
+ end
+ expect_next_instance_of(WebHooks::DestroyService, user) do |instance|
+ expect(instance).to receive(:sync_destroy).with(web_hook2).and_call_original
+ end
+
+ expect do
+ destroy_project(project, user)
+ end.to change(WebHook, :count).by(-2)
+ .and change(WebHookLog, :count).by(-1)
+ end
+
+ context 'when an error is raised deleting webhooks' do
+ before do
+ allow_next_instance_of(WebHooks::DestroyService) do |instance|
+ allow(instance).to receive(:sync_destroy).and_return(message: 'foo', status: :error)
+ end
+ end
+
+ it_behaves_like 'handles errors thrown during async destroy', "Failed to remove webhooks"
+ end
+
+ context 'when "destroy_webhooks_before_the_project" flag is disabled' do
+ before do
+ stub_feature_flags(destroy_webhooks_before_the_project: false)
+ end
+
+ it 'does not call WebHooks::DestroyService' do
+ expect(WebHooks::DestroyService).not_to receive(:new)
+
+ expect do
+ destroy_project(project, user)
+ end.to change(WebHook, :count).by(-2)
+ .and change(WebHookLog, :count).by(-1)
+
+ expect(another_project_web_hook.reload).to be
+ end
+ end
+ end
+
context 'error while destroying', :sidekiq_inline do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:builds) { create_list(:ci_build, 2, :artifacts, pipeline: pipeline) }
diff --git a/spec/services/projects/housekeeping_service_spec.rb b/spec/services/projects/housekeeping_service_spec.rb
deleted file mode 100644
index 0aa4a1cd312..00000000000
--- a/spec/services/projects/housekeeping_service_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# This is a compatibility class to avoid calling a non-existent
-# class from sidekiq during deployment.
-#
-# We're deploying the name of the referenced class in 13.9. Nevertheless,
-# we cannot remove the class entirely because there can be jobs
-# referencing it. We still need this specs to ensure that the old
-# class still has the old behavior.
-#
-# We can get rid of this class in 13.10
-# https://gitlab.com/gitlab-org/gitlab/-/issues/297580
-#
-RSpec.describe Projects::HousekeepingService do
- it_behaves_like 'housekeeps repository' do
- let_it_be(:resource) { create(:project, :repository) }
- end
-end
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index e196220eabe..bfc8225b654 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -6,25 +6,26 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
include PrometheusHelpers
using RSpec::Parameterized::TableSyntax
- let_it_be(:project, reload: true) { create(:project) }
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be_with_refind(:setting) do
+ create(:project_incident_management_setting, project: project, send_email: true, create_issue: true)
+ end
let(:service) { described_class.new(project, payload) }
let(:token_input) { 'token' }
- let!(:setting) do
- create(:project_incident_management_setting, project: project, send_email: true, create_issue: true)
- end
-
- let(:subject) { service.execute(token_input) }
+ subject { service.execute(token_input) }
context 'with valid payload' do
let_it_be(:alert_firing) { create(:prometheus_alert, project: project) }
let_it_be(:alert_resolved) { create(:prometheus_alert, project: project) }
- let_it_be(:cluster) { create(:cluster, :provided_by_user, projects: [project]) }
+ let_it_be(:cluster, reload: true) { create(:cluster, :provided_by_user, projects: [project]) }
+
let(:payload_raw) { prometheus_alert_payload(firing: [alert_firing], resolved: [alert_resolved]) }
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
let(:payload_alert_firing) { payload_raw['alerts'].first }
let(:token) { 'token' }
+ let(:source) { 'Prometheus' }
context 'with environment specific clusters' do
let(:prd_cluster) do
@@ -53,15 +54,15 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
context 'without token' do
let(:token_input) { nil }
- it_behaves_like 'Alert Notification Service sends notification email'
+ include_examples 'processes one firing and one resolved prometheus alerts'
end
context 'with token' do
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
end
end
- context 'with project specific cluster' do
+ context 'with project specific cluster using prometheus application' do
where(:cluster_enabled, :status, :configured_token, :token_input, :result) do
true | :installed | token | token | :success
true | :installed | nil | nil | :success
@@ -87,9 +88,43 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
case result = params[:result]
when :success
- it_behaves_like 'Alert Notification Service sends notification email'
+ include_examples 'processes one firing and one resolved prometheus alerts'
+ when :failure
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
+ else
+ raise "invalid result: #{result.inspect}"
+ end
+ end
+ end
+
+ context 'with project specific cluster using prometheus integration' do
+ where(:cluster_enabled, :integration_enabled, :configured_token, :token_input, :result) do
+ true | true | token | token | :success
+ true | true | nil | nil | :success
+ true | true | token | 'x' | :failure
+ true | true | token | nil | :failure
+ true | false | token | token | :failure
+ false | true | token | token | :failure
+ false | nil | nil | token | :failure
+ end
+
+ with_them do
+ before do
+ cluster.update!(enabled: cluster_enabled)
+
+ unless integration_enabled.nil?
+ create(:clusters_integrations_prometheus,
+ cluster: cluster,
+ enabled: integration_enabled,
+ alert_manager_token: configured_token)
+ end
+ end
+
+ case result = params[:result]
+ when :success
+ include_examples 'processes one firing and one resolved prometheus alerts'
when :failure
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
else
raise "invalid result: #{result.inspect}"
end
@@ -97,9 +132,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'without project specific cluster' do
- let!(:cluster) { create(:cluster, enabled: true) }
+ let_it_be(:cluster) { create(:cluster, enabled: true) }
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
end
context 'with manual prometheus installation' do
@@ -126,9 +161,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
case result = params[:result]
when :success
- it_behaves_like 'Alert Notification Service sends notification email'
+ it_behaves_like 'processes one firing and one resolved prometheus alerts'
when :failure
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
else
raise "invalid result: #{result.inspect}"
end
@@ -150,50 +185,53 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
let(:token_input) { public_send(token) if token }
let(:integration) { create(:alert_management_http_integration, active, project: project) if active }
- let(:subject) { service.execute(token_input, integration) }
+ subject { service.execute(token_input, integration) }
case result = params[:result]
when :success
- it_behaves_like 'Alert Notification Service sends notification email'
+ it_behaves_like 'processes one firing and one resolved prometheus alerts'
when :failure
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
else
raise "invalid result: #{result.inspect}"
end
end
end
- context 'alert emails' do
+ context 'incident settings' do
before do
create(:prometheus_service, project: project)
create(:project_alerting_setting, project: project, token: token)
end
- context 'when incident_management_setting does not exist' do
- let!(:setting) { nil }
+ it_behaves_like 'processes one firing and one resolved prometheus alerts'
- it 'does not send notification email', :sidekiq_might_not_need_inline do
- expect_any_instance_of(NotificationService)
- .not_to receive(:async)
-
- expect(subject).to be_success
+ context 'when incident_management_setting does not exist' do
+ before do
+ setting.destroy!
end
- end
- context 'when incident_management_setting.send_email is true' do
- it_behaves_like 'Alert Notification Service sends notification email'
+ it { is_expected.to be_success }
+ include_examples 'does not send alert notification emails'
+ include_examples 'does not process incident issues'
end
context 'incident_management_setting.send_email is false' do
- let!(:setting) do
- create(:project_incident_management_setting, send_email: false, project: project)
+ before do
+ setting.update!(send_email: false)
end
- it 'does not send notification' do
- expect(NotificationService).not_to receive(:new)
+ it { is_expected.to be_success }
+ include_examples 'does not send alert notification emails'
+ end
- expect(subject).to be_success
+ context 'incident_management_setting.create_issue is false' do
+ before do
+ setting.update!(create_issue: false)
end
+
+ it { is_expected.to be_success }
+ include_examples 'does not process incident issues'
end
end
@@ -233,7 +271,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
.and_return(false)
end
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unprocessable_entity
+ it_behaves_like 'alerts service responds with an error and takes no actions', :unprocessable_entity
end
context 'when the payload is too big' do
@@ -244,14 +282,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
end
- it_behaves_like 'Alert Notification Service sends no notifications', http_status: :bad_request
-
- it 'does not process Prometheus alerts' do
- expect(AlertManagement::ProcessPrometheusAlertService)
- .not_to receive(:new)
-
- subject
- end
+ it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 5f41ec1d610..8498b752610 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -130,7 +130,7 @@ RSpec.describe Projects::TransferService do
execute_transfer
expect(project.slack_service.webhook).to eq(group_integration.webhook)
- expect(Service.count).to eq(3)
+ expect(Integration.count).to eq(3)
end
end
diff --git a/spec/services/projects/unlink_fork_service_spec.rb b/spec/services/projects/unlink_fork_service_spec.rb
index 90def365fca..d939a79b7e9 100644
--- a/spec/services/projects/unlink_fork_service_spec.rb
+++ b/spec/services/projects/unlink_fork_service_spec.rb
@@ -16,11 +16,11 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
let(:merge_request2) { create(:merge_request, source_project: forked_project, target_project: fork_project(project)) }
let(:merge_request_in_fork) { create(:merge_request, source_project: forked_project, target_project: forked_project) }
- let(:mr_close_service) { MergeRequests::CloseService.new(forked_project, user) }
+ let(:mr_close_service) { MergeRequests::CloseService.new(project: forked_project, current_user: user) }
before do
allow(MergeRequests::CloseService).to receive(:new)
- .with(forked_project, user)
+ .with(project: forked_project, current_user: user)
.and_return(mr_close_service)
end
@@ -79,11 +79,11 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
let!(:merge_request2) { create(:merge_request, source_project: project, target_project: fork_project(project)) }
let!(:merge_request_in_fork) { create(:merge_request, source_project: forked_project, target_project: forked_project) }
- let(:mr_close_service) { MergeRequests::CloseService.new(project, user) }
+ let(:mr_close_service) { MergeRequests::CloseService.new(project: project, current_user: user) }
before do
allow(MergeRequests::CloseService).to receive(:new)
- .with(project, user)
+ .with(project: project, current_user: user)
.and_return(mr_close_service)
end
@@ -142,11 +142,11 @@ RSpec.describe Projects::UnlinkForkService, :use_clean_rails_memory_store_cachin
let!(:mr_from_child) { create(:merge_request, source_project: fork_of_fork, target_project: forked_project) }
let!(:merge_request_in_fork) { create(:merge_request, source_project: forked_project, target_project: forked_project) }
- let(:mr_close_service) { MergeRequests::CloseService.new(forked_project, user) }
+ let(:mr_close_service) { MergeRequests::CloseService.new(project: forked_project, current_user: user) }
before do
allow(MergeRequests::CloseService).to receive(:new)
- .with(forked_project, user)
+ .with(project: forked_project, current_user: user)
.and_return(mr_close_service)
end
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 828667fdfc2..5b15b7d5f34 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
end
context 'when the move succeeds' do
- it 'moves the repository to the new storage and unmarks the repository as read only' do
+ it 'moves the repository to the new storage and unmarks the repository as read-only' do
old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
project.repository.path_to_repo
end
diff --git a/spec/services/projects/update_statistics_service_spec.rb b/spec/services/projects/update_statistics_service_spec.rb
index 92e97186be3..6987185b549 100644
--- a/spec/services/projects/update_statistics_service_spec.rb
+++ b/spec/services/projects/update_statistics_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::UpdateStatisticsService do
+ using RSpec::Parameterized::TableSyntax
+
let(:service) { described_class.new(project, nil, statistics: statistics)}
let(:statistics) { %w(repository_size) }
@@ -18,12 +20,46 @@ RSpec.describe Projects::UpdateStatisticsService do
end
context 'with an existing project' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+
+ where(:statistics, :method_caches) do
+ [] | %i(size commit_count)
+ ['repository_size'] | [:size]
+ [:repository_size] | [:size]
+ [:lfs_objects_size] | nil
+ [:commit_count] | [:commit_count] # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ [:repository_size, :commit_count] | %i(size commit_count)
+ [:repository_size, :commit_count, :lfs_objects_size] | %i(size commit_count)
+ end
+
+ with_them do
+ it 'refreshes the project statistics' do
+ expect(project.statistics).to receive(:refresh!).with(only: statistics.map(&:to_sym)).and_call_original
+
+ service.execute
+ end
+
+ it 'invalidates the method caches after a refresh' do
+ expect(project.wiki.repository).not_to receive(:expire_method_caches)
+
+ if method_caches.present?
+ expect(project.repository).to receive(:expire_method_caches).with(method_caches).and_call_original
+ else
+ expect(project.repository).not_to receive(:expire_method_caches)
+ end
+
+ service.execute
+ end
+ end
+ end
+
+ context 'with an existing project with a Wiki' do
+ let(:project) { create(:project, :repository, :wiki_enabled) }
+ let(:statistics) { [:wiki_size] }
- it 'refreshes the project statistics' do
- expect_any_instance_of(ProjectStatistics).to receive(:refresh!)
- .with(only: statistics.map(&:to_sym))
- .and_call_original
+ it 'invalidates and refreshes Wiki size' do
+ expect(project.statistics).to receive(:refresh!).with(only: statistics).and_call_original
+ expect(project.wiki.repository).to receive(:expire_method_caches).with(%i(size)).and_call_original
service.execute
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 9df238c6dac..f3ad69bae13 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -360,25 +360,29 @@ RSpec.describe QuickActions::InterpretService do
shared_examples 'spend command' do
it 'populates spend_time: 3600 if content contains /spend 1h' do
- _, updates, _ = service.execute(content, issuable)
+ freeze_time do
+ _, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(spend_time: {
- duration: 3600,
- user_id: developer.id,
- spent_at: DateTime.current.to_date
- })
+ expect(updates).to eq(spend_time: {
+ duration: 3600,
+ user_id: developer.id,
+ spent_at: DateTime.current
+ })
+ end
end
end
shared_examples 'spend command with negative time' do
it 'populates spend_time: -7200 if content contains -120m' do
- _, updates, _ = service.execute(content, issuable)
+ freeze_time do
+ _, updates, _ = service.execute(content, issuable)
- expect(updates).to eq(spend_time: {
- duration: -7200,
- user_id: developer.id,
- spent_at: DateTime.current.to_date
- })
+ expect(updates).to eq(spend_time: {
+ duration: -7200,
+ user_id: developer.id,
+ spent_at: DateTime.current
+ })
+ end
end
it 'returns the spend_time message including the formatted duration and verb' do
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
index ff7ab614e08..44f8f07a5be 100644
--- a/spec/services/security/ci_configuration/sast_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -3,67 +3,24 @@
require 'spec_helper'
RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow do
- describe '#execute' do
- let_it_be(:project) { create(:project, :repository) }
- let_it_be(:user) { create(:user) }
- let(:params) { {} }
+ subject(:result) { described_class.new(project, user, params).execute }
- subject(:result) { described_class.new(project, user, params).execute }
+ let(:branch_name) { 'set-sast-config-1' }
- context 'user does not belong to project' do
- it 'returns an error status' do
- expect(result[:status]).to eq(:error)
- expect(result[:success_path]).to be_nil
- end
-
- it 'does not track a snowplow event' do
- subject
-
- expect_no_snowplow_event
- end
- end
-
- context 'user belongs to project' do
- before do
- project.add_developer(user)
- end
-
- it 'does track the snowplow event' do
- subject
-
- expect_snowplow_event(
- category: 'Security::CiConfiguration::SastCreateService',
- action: 'create',
- label: 'false'
- )
- end
-
- it 'raises exception if the user does not have permission to create a new branch' do
- allow(project).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "You are not allowed to create protected branches on this project.")
-
- expect { subject }.to raise_error(Gitlab::Git::PreReceiveError)
- end
-
- context 'with no parameters' do
- it 'returns the path to create a new merge request' do
- expect(result[:status]).to eq(:success)
- expect(result[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
- end
- end
-
- context 'with parameters' do
- let(:params) do
- { 'stage' => 'security',
- 'SEARCH_MAX_DEPTH' => 1,
- 'SECURE_ANALYZERS_PREFIX' => 'new_registry',
- 'SAST_EXCLUDED_PATHS' => 'spec,docs' }
- end
+ let(:non_empty_params) do
+ { 'stage' => 'security',
+ 'SEARCH_MAX_DEPTH' => 1,
+ 'SECURE_ANALYZERS_PREFIX' => 'new_registry',
+ 'SAST_EXCLUDED_PATHS' => 'spec,docs' }
+ end
- it 'returns the path to create a new merge request' do
- expect(result[:status]).to eq(:success)
- expect(result[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
- end
- end
- end
+ let(:snowplow_event) do
+ {
+ category: 'Security::CiConfiguration::SastCreateService',
+ action: 'create',
+ label: 'false'
+ }
end
+
+ include_examples 'services security ci configuration create service'
end
diff --git a/spec/services/security/ci_configuration/secret_detection_create_service_spec.rb b/spec/services/security/ci_configuration/secret_detection_create_service_spec.rb
new file mode 100644
index 00000000000..c1df3ebdca5
--- /dev/null
+++ b/spec/services/security/ci_configuration/secret_detection_create_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::SecretDetectionCreateService, :snowplow do
+ subject(:result) { described_class.new(project, user).execute }
+
+ let(:branch_name) { 'set-secret-detection-config-1' }
+
+ let(:snowplow_event) do
+ {
+ category: 'Security::CiConfiguration::SecretDetectionCreateService',
+ action: 'create',
+ label: ''
+ }
+ end
+
+ include_examples 'services security ci configuration create service', true
+end
diff --git a/spec/services/snippets/update_repository_storage_service_spec.rb b/spec/services/snippets/update_repository_storage_service_spec.rb
index 6ba09a9dca9..50b28a5a125 100644
--- a/spec/services/snippets/update_repository_storage_service_spec.rb
+++ b/spec/services/snippets/update_repository_storage_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Snippets::UpdateRepositoryStorageService do
end
context 'when the move succeeds' do
- it 'moves the repository to the new storage and unmarks the repository as read only' do
+ it 'moves the repository to the new storage and unmarks the repository as read-only' do
old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
snippet.repository.path_to_repo
end
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index e8ac826df1c..9ca52b92267 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -9,11 +9,11 @@ RSpec.describe Spam::SpamActionService do
let(:issue) { create(:issue, project: project, author: user) }
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
- let(:fake_referrer) { 'fake-http-referrer' }
+ let(:fake_referer) { 'fake-http-referer' }
let(:env) do
{ 'action_dispatch.remote_ip' => fake_ip,
'HTTP_USER_AGENT' => fake_user_agent,
- 'HTTP_REFERRER' => fake_referrer }
+ 'HTTP_REFERER' => fake_referer }
end
let_it_be(:project) { create(:project, :public) }
@@ -80,7 +80,7 @@ RSpec.describe Spam::SpamActionService do
{
ip_address: fake_ip,
user_agent: fake_user_agent,
- referrer: fake_referrer
+ referer: fake_referer
}
end
@@ -222,6 +222,38 @@ RSpec.describe Spam::SpamActionService do
end
end
+ context 'spam verdict service advises to block the user' do
+ before do
+ allow(fake_verdict_service).to receive(:execute).and_return(BLOCK_USER)
+ end
+
+ context 'when allow_possible_spam feature flag is false' do
+ before do
+ stub_feature_flags(allow_possible_spam: false)
+ end
+
+ it_behaves_like 'only checks for spam if a request is provided'
+
+ it 'marks as spam' do
+ response = subject
+
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).to be_spam
+ end
+ end
+
+ context 'when allow_possible_spam feature flag is true' do
+ it_behaves_like 'only checks for spam if a request is provided'
+
+ it 'does not mark as spam' do
+ response = subject
+
+ expect(response.message).to match(expected_service_check_response_message)
+ expect(issue).not_to be_spam
+ end
+ end
+ end
+
context 'when spam verdict service conditionally allows' do
before do
allow(fake_verdict_service).to receive(:execute).and_return(CONDITIONAL_ALLOW)
@@ -281,6 +313,22 @@ RSpec.describe Spam::SpamActionService do
end
end
+ context 'when spam verdict service returns noop' do
+ before do
+ allow(fake_verdict_service).to receive(:execute).and_return(NOOP)
+ end
+
+ it 'does not create a spam log' do
+ expect { subject }.not_to change(SpamLog, :count)
+ end
+
+ it 'clears spam flags' do
+ expect(issue).to receive(:clear_spam_flags!)
+
+ subject
+ end
+ end
+
context 'with spam verdict service options' do
before do
allow(fake_verdict_service).to receive(:execute).and_return(ALLOW)
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index 14b788e3a86..215df81de63 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -7,28 +7,33 @@ RSpec.describe Spam::SpamVerdictService do
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
- let(:fake_referrer) { 'fake-http-referrer' }
+ let(:fake_referer) { 'fake-http-referer' }
let(:env) do
{ 'action_dispatch.remote_ip' => fake_ip,
'HTTP_USER_AGENT' => fake_user_agent,
- 'HTTP_REFERRER' => fake_referrer }
+ 'HTTP_REFERER' => fake_referer }
end
let(:request) { double(:request, env: env) }
let(:check_for_spam) { true }
let_it_be(:user) { create(:user) }
- let(:issue) { build(:issue, author: user) }
+ let_it_be(:issue) { create(:issue, author: user) }
let(:service) do
described_class.new(user: user, target: issue, request: request, options: {})
end
+ let(:attribs) do
+ extra_attributes = { "monitorMode" => "false" }
+ extra_attributes
+ end
+
describe '#execute' do
subject { service.execute }
before do
allow(service).to receive(:akismet_verdict).and_return(nil)
- allow(service).to receive(:external_verdict).and_return(nil)
+ allow(service).to receive(:spamcheck_verdict).and_return([nil, attribs])
end
context 'if all services return nil' do
@@ -63,7 +68,7 @@ RSpec.describe Spam::SpamVerdictService do
context 'and they are supported' do
before do
allow(service).to receive(:akismet_verdict).and_return(DISALLOW)
- allow(service).to receive(:external_verdict).and_return(BLOCK_USER)
+ allow(service).to receive(:spamcheck_verdict).and_return([BLOCK_USER, attribs])
end
it 'renders the more restrictive verdict' do
@@ -74,7 +79,7 @@ RSpec.describe Spam::SpamVerdictService do
context 'and one is supported' do
before do
allow(service).to receive(:akismet_verdict).and_return('nonsense')
- allow(service).to receive(:external_verdict).and_return(BLOCK_USER)
+ allow(service).to receive(:spamcheck_verdict).and_return([BLOCK_USER, attribs])
end
it 'renders the more restrictive verdict' do
@@ -85,13 +90,56 @@ RSpec.describe Spam::SpamVerdictService do
context 'and none are supported' do
before do
allow(service).to receive(:akismet_verdict).and_return('nonsense')
- allow(service).to receive(:external_verdict).and_return('rubbish')
+ allow(service).to receive(:spamcheck_verdict).and_return(['rubbish', attribs])
end
it 'renders the more restrictive verdict' do
expect(subject).to eq ALLOW
end
end
+
+ context 'and attribs - monitorMode is true' do
+ let(:attribs) do
+ extra_attributes = { "monitorMode" => "true" }
+ extra_attributes
+ end
+
+ before do
+ allow(service).to receive(:akismet_verdict).and_return(DISALLOW)
+ allow(service).to receive(:spamcheck_verdict).and_return([BLOCK_USER, attribs])
+ end
+
+ it 'renders the more restrictive verdict' do
+ expect(subject).to eq(DISALLOW)
+ end
+ end
+ end
+
+ context 'records metrics' do
+ let(:histogram) { instance_double(Prometheus::Client::Histogram) }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:verdict, :error, :label) do
+ Spam::SpamConstants::ALLOW | false | 'ALLOW'
+ Spam::SpamConstants::ALLOW | true | 'ERROR'
+ Spam::SpamConstants::CONDITIONAL_ALLOW | false | 'CONDITIONAL_ALLOW'
+ Spam::SpamConstants::BLOCK_USER | false | 'BLOCK'
+ Spam::SpamConstants::DISALLOW | false | 'DISALLOW'
+ Spam::SpamConstants::NOOP | false | 'NOOP'
+ end
+
+ with_them do
+ before do
+ allow(Gitlab::Metrics).to receive(:histogram).with(:gitlab_spamcheck_request_duration_seconds, anything).and_return(histogram)
+ allow(service).to receive(:spamcheck_verdict).and_return([verdict, attribs, error])
+ end
+
+ it 'records duration with labels' do
+ expect(histogram).to receive(:observe).with(a_hash_including(result: label), anything)
+ subject
+ end
+ end
end
end
@@ -150,48 +198,113 @@ RSpec.describe Spam::SpamVerdictService do
end
end
- describe '#external_verdict' do
- subject { service.send(:external_verdict) }
+ describe '#spamcheck_verdict' do
+ subject { service.send(:spamcheck_verdict) }
context 'if a Spam Check endpoint enabled and set to a URL' do
let(:spam_check_body) { {} }
- let(:spam_check_http_status) { nil }
+ let(:endpoint_url) { "grpc://www.spamcheckurl.com/spam_check" }
+
+ let(:spam_client) do
+ Gitlab::Spamcheck::Client.new
+ end
before do
stub_application_setting(spam_check_endpoint_enabled: true)
- stub_application_setting(spam_check_endpoint_url: "http://www.spamcheckurl.com/spam_check")
- stub_request(:post, /.*spamcheckurl.com.*/).to_return( body: spam_check_body.to_json, status: spam_check_http_status )
+ stub_application_setting(spam_check_endpoint_url: endpoint_url)
end
context 'if the endpoint is accessible' do
- let(:spam_check_http_status) { 200 }
- let(:error) { nil }
+ let(:error) { '' }
let(:verdict) { nil }
- let(:spam_check_body) do
- { verdict: verdict, error: error }
+
+ let(:attribs) do
+ extra_attributes = { "monitorMode" => "false" }
+ extra_attributes
+ end
+
+ before do
+ allow(service).to receive(:spamcheck_client).and_return(spam_client)
+ allow(spam_client).to receive(:issue_spam?).and_return([verdict, attribs, error])
+ end
+
+ context 'if the result is a NOOP verdict' do
+ let(:verdict) { NOOP }
+
+ it 'returns the verdict' do
+ expect(subject).to eq([NOOP, attribs])
+ end
+ end
+
+ context 'if attribs - monitorMode is true' do
+ let(:attribs) do
+ extra_attributes = { "monitorMode" => "true" }
+ extra_attributes
+ end
+
+ let(:verdict) { ALLOW }
+
+ it 'returns the verdict' do
+ expect(subject).to eq([ALLOW, attribs])
+ end
end
context 'the result is a valid verdict' do
- let(:verdict) { 'allow' }
+ let(:verdict) { ALLOW }
it 'returns the verdict' do
- expect(subject).to eq ALLOW
+ expect(subject).to eq([ALLOW, attribs])
end
end
- context 'the verdict is an unexpected string' do
- let(:verdict) { 'this is fine' }
+ context 'when recaptcha is enabled' do
+ before do
+ allow(Gitlab::Recaptcha).to receive(:enabled?).and_return(true)
+ end
- it 'returns the string' do
- expect(subject).to eq verdict
+ using RSpec::Parameterized::TableSyntax
+
+ # rubocop: disable Lint/BinaryOperatorWithIdenticalOperands
+ where(:verdict_value, :expected) do
+ ::Spam::SpamConstants::ALLOW | ::Spam::SpamConstants::ALLOW
+ ::Spam::SpamConstants::CONDITIONAL_ALLOW | ::Spam::SpamConstants::CONDITIONAL_ALLOW
+ ::Spam::SpamConstants::DISALLOW | ::Spam::SpamConstants::CONDITIONAL_ALLOW
+ ::Spam::SpamConstants::BLOCK_USER | ::Spam::SpamConstants::CONDITIONAL_ALLOW
+ end
+ # rubocop: enable Lint/BinaryOperatorWithIdenticalOperands
+
+ with_them do
+ let(:verdict) { verdict_value }
+
+ it "returns expected spam constant" do
+ expect(subject).to eq([expected, attribs])
+ end
end
end
- context 'the JSON is malformed' do
- let(:spam_check_body) { 'this is fine' }
+ context 'when recaptcha is disabled' do
+ before do
+ allow(Gitlab::Recaptcha).to receive(:enabled?).and_return(false)
+ end
+
+ [::Spam::SpamConstants::ALLOW,
+ ::Spam::SpamConstants::CONDITIONAL_ALLOW,
+ ::Spam::SpamConstants::DISALLOW,
+ ::Spam::SpamConstants::BLOCK_USER].each do |verdict_value|
+ let(:verdict) { verdict_value }
+ let(:expected) { [verdict_value, attribs] }
- it 'returns allow' do
- expect(subject).to eq ALLOW
+ it "returns expected spam constant" do
+ expect(subject).to eq(expected)
+ end
+ end
+ end
+
+ context 'the verdict is an unexpected value' do
+ let(:verdict) { :this_is_fine }
+
+ it 'returns the string' do
+ expect(subject).to eq([verdict, attribs])
end
end
@@ -199,7 +312,7 @@ RSpec.describe Spam::SpamVerdictService do
let(:verdict) { '' }
it 'returns nil' do
- expect(subject).to eq verdict
+ expect(subject).to eq([verdict, attribs])
end
end
@@ -207,7 +320,7 @@ RSpec.describe Spam::SpamVerdictService do
let(:verdict) { nil }
it 'returns nil' do
- expect(subject).to be_nil
+ expect(subject).to eq([nil, attribs])
end
end
@@ -215,15 +328,19 @@ RSpec.describe Spam::SpamVerdictService do
let(:error) { "Sorry Dave, I can't do that" }
it 'returns nil' do
- expect(subject).to be_nil
+ expect(subject).to eq([nil, attribs])
end
end
- context 'the HTTP status is not 200' do
- let(:spam_check_http_status) { 500 }
+ context 'the requested is aborted' do
+ let(:attribs) { nil }
+
+ before do
+ allow(spam_client).to receive(:issue_spam?).and_raise(GRPC::Aborted)
+ end
it 'returns nil' do
- expect(subject).to be_nil
+ expect(subject).to eq([ALLOW, attribs, true])
end
end
@@ -232,18 +349,20 @@ RSpec.describe Spam::SpamVerdictService do
let(:error) { 'oh noes!' }
it 'renders the verdict' do
- expect(subject).to eq DISALLOW
+ expect(subject).to eq [DISALLOW, attribs]
end
end
end
context 'if the endpoint times out' do
+ let(:attribs) { nil }
+
before do
- stub_request(:post, /.*spamcheckurl.com.*/).to_timeout
+ allow(spam_client).to receive(:issue_spam?).and_raise(GRPC::DeadlineExceeded)
end
it 'returns nil' do
- expect(subject).to be_nil
+ expect(subject).to eq([ALLOW, attribs, true])
end
end
end
diff --git a/spec/services/submit_usage_ping_service_spec.rb b/spec/services/submit_usage_ping_service_spec.rb
index 53cc33afcff..a9f1b2c2b2d 100644
--- a/spec/services/submit_usage_ping_service_spec.rb
+++ b/spec/services/submit_usage_ping_service_spec.rb
@@ -217,7 +217,7 @@ RSpec.describe SubmitUsagePingService do
end
def stub_response(body:, status: 201)
- stub_full_request(SubmitUsagePingService::STAGING_URL, method: :post)
+ stub_full_request(subject.send(:url), method: :post)
.to_return(
headers: { 'Content-Type' => 'application/json' },
body: body.to_json,
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index 77d0e892725..9cf794cde7e 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -67,11 +67,13 @@ RSpec.describe Suggestions::ApplyService do
apply(suggestions)
commit = project.repository.commit
+ author = suggestions.first.note.author
expect(user.commit_email).not_to eq(user.email)
- expect(commit.author_email).to eq(user.commit_email)
+ expect(commit.author_email).to eq(author.commit_email)
expect(commit.committer_email).to eq(user.commit_email)
- expect(commit.author_name).to eq(user.name)
+ expect(commit.author_name).to eq(author.name)
+ expect(commit.committer_name).to eq(user.name)
end
it 'tracks apply suggestion event' do
@@ -319,6 +321,73 @@ RSpec.describe Suggestions::ApplyService do
end
end
+ context 'single suggestion' do
+ let(:author) { suggestions.first.note.author }
+ let(:commit) { project.repository.commit }
+
+ context 'author of suggestion applies suggestion' do
+ before do
+ suggestion.note.update!(author_id: user.id)
+
+ apply(suggestions)
+ end
+
+ it 'created commit by same author and committer' do
+ expect(user.commit_email).to eq(author.commit_email)
+ expect(author).to eq(user)
+ expect(commit.author_email).to eq(author.commit_email)
+ expect(commit.committer_email).to eq(user.commit_email)
+ expect(commit.author_name).to eq(author.name)
+ expect(commit.committer_name).to eq(user.name)
+ end
+ end
+
+ context 'another user applies suggestion' do
+ before do
+ apply(suggestions)
+ end
+
+ it 'created commit has authors info and commiters info' do
+ expect(user.commit_email).not_to eq(user.email)
+ expect(author).not_to eq(user)
+ expect(commit.author_email).to eq(author.commit_email)
+ expect(commit.committer_email).to eq(user.commit_email)
+ expect(commit.author_name).to eq(author.name)
+ expect(commit.committer_name).to eq(user.name)
+ end
+ end
+ end
+
+ context 'multiple suggestions' do
+ let(:author_emails) { suggestions.map {|s| s.note.author.commit_email } }
+ let(:first_author) { suggestion.note.author }
+ let(:commit) { project.repository.commit }
+
+ context 'when all the same author' do
+ before do
+ apply(suggestions)
+ end
+
+ it 'uses first authors information' do
+ expect(author_emails).to include(first_author.commit_email).exactly(3)
+ expect(commit.author_email).to eq(first_author.commit_email)
+ end
+ end
+
+ context 'when all different authors' do
+ before do
+ suggestion2.note.update!(author_id: create(:user).id)
+ suggestion3.note.update!(author_id: create(:user).id)
+ apply(suggestions)
+ end
+
+ it 'uses committers information' do
+ expect(commit.author_email).to eq(user.commit_email)
+ expect(commit.committer_email).to eq(user.commit_email)
+ end
+ end
+ end
+
context 'multiple suggestions applied sequentially' do
def apply_suggestion(suggestion)
suggestion.reload
@@ -329,7 +398,7 @@ RSpec.describe Suggestions::ApplyService do
suggestion.reload
expect(result[:status]).to eq(:success)
- refresh = MergeRequests::RefreshService.new(project, user)
+ refresh = MergeRequests::RefreshService.new(project: project, current_user: user)
refresh.execute(merge_request.diff_head_sha,
suggestion.commit_id,
merge_request.source_branch_ref)
diff --git a/spec/services/system_hooks_service_spec.rb b/spec/services/system_hooks_service_spec.rb
index d8435c72896..5d60b6e0487 100644
--- a/spec/services/system_hooks_service_spec.rb
+++ b/spec/services/system_hooks_service_spec.rb
@@ -3,133 +3,68 @@
require 'spec_helper'
RSpec.describe SystemHooksService do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:project_member) { create(:project_member) }
- let(:key) { create(:key, user: user) }
- let(:deploy_key) { create(:key) }
- let(:group) { create(:group) }
- let(:group_member) { create(:group_member) }
-
- context 'event data' do
- it { expect(event_data(user, :create)).to include(:event_name, :name, :created_at, :updated_at, :email, :user_id, :username) }
- it { expect(event_data(user, :destroy)).to include(:event_name, :name, :created_at, :updated_at, :email, :user_id, :username) }
- it { expect(event_data(project, :create)).to include(:event_name, :name, :created_at, :updated_at, :path, :project_id, :owner_name, :owner_email, :project_visibility) }
- it { expect(event_data(project, :update)).to include(:event_name, :name, :created_at, :updated_at, :path, :project_id, :owner_name, :owner_email, :project_visibility) }
- it { expect(event_data(project, :destroy)).to include(:event_name, :name, :created_at, :updated_at, :path, :project_id, :owner_name, :owner_email, :project_visibility) }
- it { expect(event_data(project_member, :create)).to include(:event_name, :created_at, :updated_at, :project_name, :project_path, :project_path_with_namespace, :project_id, :user_name, :user_username, :user_email, :user_id, :access_level, :project_visibility) }
- it { expect(event_data(project_member, :destroy)).to include(:event_name, :created_at, :updated_at, :project_name, :project_path, :project_path_with_namespace, :project_id, :user_name, :user_username, :user_email, :user_id, :access_level, :project_visibility) }
- it { expect(event_data(project_member, :update)).to include(:event_name, :created_at, :updated_at, :project_name, :project_path, :project_path_with_namespace, :project_id, :user_name, :user_username, :user_email, :user_id, :access_level, :project_visibility) }
- it { expect(event_data(key, :create)).to include(:username, :key, :id) }
- it { expect(event_data(key, :destroy)).to include(:username, :key, :id) }
- it { expect(event_data(deploy_key, :create)).to include(:key, :id) }
- it { expect(event_data(deploy_key, :destroy)).to include(:key, :id) }
-
- it do
- project.old_path_with_namespace = 'renamed_from_path'
- expect(event_data(project, :rename)).to include(
- :event_name, :name, :created_at, :updated_at, :path, :project_id,
- :owner_name, :owner_email, :project_visibility,
- :old_path_with_namespace
- )
+ describe '#execute_hooks_for' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:group_member) { create(:group_member, source: group, user: user) }
+ let_it_be(:project_member) { create(:project_member, source: project, user: user) }
+ let_it_be(:key) { create(:key, user: user) }
+ let_it_be(:deploy_key) { create(:key) }
+
+ let(:event) { :create }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:model_name, :builder_class) do
+ :group_member | Gitlab::HookData::GroupMemberBuilder
+ :group | Gitlab::HookData::GroupBuilder
+ :project_member | Gitlab::HookData::ProjectMemberBuilder
+ :user | Gitlab::HookData::UserBuilder
+ :project | Gitlab::HookData::ProjectBuilder
+ :key | Gitlab::HookData::KeyBuilder
+ :deploy_key | Gitlab::HookData::KeyBuilder
end
- it do
- project.old_path_with_namespace = 'transferred_from_path'
- expect(event_data(project, :transfer)).to include(
- :event_name, :name, :created_at, :updated_at, :path, :project_id,
- :owner_name, :owner_email, :project_visibility,
- :old_path_with_namespace
- )
- end
-
- it do
- expect(event_data(group, :create)).to include(
- :event_name, :name, :created_at, :updated_at, :path, :group_id
- )
- end
+ with_them do
+ it 'builds the data with the relevant builder class and then calls #execute_hooks with the obtained data' do
+ data = double
+ model = public_send(model_name)
- it do
- expect(event_data(group, :destroy)).to include(
- :event_name, :name, :created_at, :updated_at, :path, :group_id
- )
- end
+ expect_next_instance_of(builder_class, model) do |builder|
+ expect(builder).to receive(:build).with(event).and_return(data)
+ end
- it do
- expect(event_data(group_member, :create)).to include(
- :event_name, :created_at, :updated_at, :group_name, :group_path,
- :group_id, :user_id, :user_username, :user_name, :user_email, :group_access
- )
- end
+ service = described_class.new
- it do
- expect(event_data(group_member, :destroy)).to include(
- :event_name, :created_at, :updated_at, :group_name, :group_path,
- :group_id, :user_id, :user_username, :user_name, :user_email, :group_access
- )
- end
-
- it do
- expect(event_data(group_member, :update)).to include(
- :event_name, :created_at, :updated_at, :group_name, :group_path,
- :group_id, :user_id, :user_username, :user_name, :user_email, :group_access
- )
- end
-
- it 'includes the correct project visibility level' do
- data = event_data(project, :create)
-
- expect(data[:project_visibility]).to eq('private')
- end
+ expect_next_instance_of(SystemHooksService) do |system_hook_service|
+ expect(system_hook_service).to receive(:execute_hooks).with(data)
+ end
- it 'handles nil datetime columns' do
- user.update!(created_at: nil, updated_at: nil)
- data = event_data(user, :destroy)
-
- expect(data[:created_at]).to be(nil)
- expect(data[:updated_at]).to be(nil)
+ service.execute_hooks_for(model, event)
+ end
end
+ end
- context 'group_rename' do
- it 'contains old and new path' do
- allow(group).to receive(:path_before_last_save).and_return('old-path')
+ describe '#execute_hooks' do
+ let(:data) { { key: :value } }
- data = event_data(group, :rename)
+ subject { described_class.new.execute_hooks(data) }
- expect(data).to include(:event_name, :name, :created_at, :updated_at, :full_path, :path, :group_id, :old_path, :old_full_path)
- expect(data[:path]).to eq(group.path)
- expect(data[:full_path]).to eq(group.path)
- expect(data[:old_path]).to eq(group.path_before_last_save)
- expect(data[:old_full_path]).to eq(group.path_before_last_save)
- end
+ it 'executes system hooks with the given data' do
+ hook = create(:system_hook)
- it 'contains old and new full_path for subgroup' do
- subgroup = create(:group, parent: group)
- allow(subgroup).to receive(:path_before_last_save).and_return('old-path')
+ allow(SystemHook).to receive_message_chain(:hooks_for, :find_each).and_yield(hook)
- data = event_data(subgroup, :rename)
+ expect(hook).to receive(:async_execute).with(data, 'system_hooks')
- expect(data[:full_path]).to eq(subgroup.full_path)
- expect(data[:old_path]).to eq('old-path')
- end
+ subject
end
- end
- context 'event names' do
- it { expect(event_name(project, :create)).to eq "project_create" }
- it { expect(event_name(project, :destroy)).to eq "project_destroy" }
- it { expect(event_name(project, :rename)).to eq "project_rename" }
- it { expect(event_name(project, :transfer)).to eq "project_transfer" }
- it { expect(event_name(project, :update)).to eq "project_update" }
- it { expect(event_name(key, :create)).to eq 'key_create' }
- it { expect(event_name(key, :destroy)).to eq 'key_destroy' }
- end
+ it 'executes FileHook with the given data' do
+ expect(Gitlab::FileHook).to receive(:execute_all_async).with(data)
- def event_data(*args)
- SystemHooksService.new.send :build_event_data, *args
- end
-
- def event_name(*args)
- SystemHooksService.new.send :build_event_name, *args
+ subject
+ end
end
end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 59f936509df..6a8e6dc8970 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -345,17 +345,10 @@ RSpec.describe TodoService do
describe '#destroy_target' do
it 'refreshes the todos count cache for users with todos on the target' do
- create(:todo, state: :pending, target: issue, user: john_doe, author: john_doe, project: issue.project)
+ create(:todo, state: :pending, target: issue, user: author, author: author, project: issue.project)
+ create(:todo, state: :done, target: issue, user: assignee, author: assignee, project: issue.project)
- expect_next(Users::UpdateTodoCountCacheService, [john_doe]).to receive(:execute)
-
- service.destroy_target(issue) { issue.destroy! }
- end
-
- it 'does not refresh the todos count cache for users with only done todos on the target' do
- create(:todo, :done, target: issue, user: john_doe, author: john_doe, project: issue.project)
-
- expect(Users::UpdateTodoCountCacheService).not_to receive(:new)
+ expect_next(Users::UpdateTodoCountCacheService, [author.id, assignee.id]).to receive(:execute)
service.destroy_target(issue) { issue.destroy! }
end
@@ -1101,7 +1094,7 @@ RSpec.describe TodoService do
it 'updates cached counts when a todo is created' do
issue = create(:issue, project: project, assignees: [john_doe], author: author)
- expect_next(Users::UpdateTodoCountCacheService, [john_doe]).to receive(:execute)
+ expect_next(Users::UpdateTodoCountCacheService, [john_doe.id]).to receive(:execute)
service.new_issue(issue, author)
end
diff --git a/spec/services/users/ban_service_spec.rb b/spec/services/users/ban_service_spec.rb
new file mode 100644
index 00000000000..0e6ac615da5
--- /dev/null
+++ b/spec/services/users/ban_service_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::BanService do
+ let(:current_user) { create(:admin) }
+
+ subject(:service) { described_class.new(current_user) }
+
+ describe '#execute' do
+ subject(:operation) { service.execute(user) }
+
+ context 'when successful' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to eq(status: :success) }
+
+ it "bans the user" do
+ expect { operation }.to change { user.state }.to('banned')
+ end
+
+ it "blocks the user" do
+ expect { operation }.to change { user.blocked? }.from(false).to(true)
+ end
+
+ it 'logs ban in application logs' do
+ allow(Gitlab::AppLogger).to receive(:info)
+
+ operation
+
+ expect(Gitlab::AppLogger).to have_received(:info).with(message: "User banned", user: "#{user.username}", email: "#{user.email}", banned_by: "#{current_user.username}", ip_address: "#{current_user.current_sign_in_ip}")
+ end
+ end
+
+ context 'when failed' do
+ let(:user) { create(:user, :blocked) }
+
+ it 'returns error result' do
+ aggregate_failures 'error result' do
+ expect(operation[:status]).to eq(:error)
+ expect(operation[:message]).to match(/State cannot transition/)
+ end
+ end
+
+ it "does not change the user's state" do
+ expect { operation }.not_to change { user.state }
+ end
+ end
+ end
+end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index b2a7d349ce6..e8786c677d1 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -6,105 +6,76 @@ RSpec.describe Users::BuildService do
using RSpec::Parameterized::TableSyntax
describe '#execute' do
- let(:params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
-
- context 'with an admin user' do
- let(:params) { build_stubbed(:user).slice(:name, :username, :email, :password) }
+ let_it_be(:current_user) { nil }
- let(:admin_user) { create(:admin) }
- let(:service) { described_class.new(admin_user, ActionController::Parameters.new(params).permit!) }
+ let(:params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
+ let(:service) { described_class.new(current_user, params) }
- it 'returns a valid user' do
- expect(service.execute).to be_valid
- end
+ shared_examples_for 'common build items' do
+ it { is_expected.to be_valid }
it 'sets the created_by_id' do
- expect(service.execute.created_by_id).to eq(admin_user.id)
+ expect(user.created_by_id).to eq(current_user&.id)
end
- context 'calls the UpdateCanonicalEmailService' do
- specify do
- expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original
+ it 'calls UpdateCanonicalEmailService' do
+ expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original
- service.execute
- end
+ user
end
- context 'allowed params' do
- let(:params) do
- {
- access_level: 1,
- admin: 1,
- avatar: anything,
- bio: 1,
- can_create_group: 1,
- color_scheme_id: 1,
- email: 1,
- external: 1,
- force_random_password: 1,
- hide_no_password: 1,
- hide_no_ssh_key: 1,
- linkedin: 1,
- name: 1,
- password: 1,
- password_automatically_set: 1,
- password_expires_at: 1,
- projects_limit: 1,
- remember_me: 1,
- skip_confirmation: 1,
- skype: 1,
- theme_id: 1,
- twitter: 1,
- username: 1,
- website_url: 1,
- private_profile: 1,
- organization: 1,
- location: 1,
- public_email: 1
- }
- end
+ context 'when user_type is provided' do
+ context 'when project_bot' do
+ before do
+ params.merge!({ user_type: :project_bot })
+ end
- it 'sets all allowed attributes' do
- admin_user # call first so the admin gets created before setting `expect`
+ it { expect(user.project_bot?).to be true }
+ end
- expect(User).to receive(:new).with(hash_including(params)).and_call_original
+ context 'when not a project_bot' do
+ before do
+ params.merge!({ user_type: :alert_bot })
+ end
- service.execute
+ it { expect(user).to be_human }
end
end
+ end
+ shared_examples_for 'current user not admin' do
context 'with "user_default_external" application setting' do
where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
true | nil | 'fl@example.com' | nil | true
true | true | 'fl@example.com' | nil | true
- true | false | 'fl@example.com' | nil | false
+ true | false | 'fl@example.com' | nil | true # admin difference
true | nil | 'fl@example.com' | '' | true
true | true | 'fl@example.com' | '' | true
- true | false | 'fl@example.com' | '' | false
+ true | false | 'fl@example.com' | '' | true # admin difference
true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
false | nil | 'fl@example.com' | nil | false
- false | true | 'fl@example.com' | nil | true
+ false | true | 'fl@example.com' | nil | false # admin difference
false | false | 'fl@example.com' | nil | false
false | nil | 'fl@example.com' | '' | false
- false | true | 'fl@example.com' | '' | true
+ false | true | 'fl@example.com' | '' | false # admin difference
false | false | 'fl@example.com' | '' | false
false | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
false | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
false | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
false | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
end
@@ -116,40 +87,11 @@ RSpec.describe Users::BuildService do
params.merge!({ external: external, email: email }.compact)
end
- subject(:user) { service.execute }
-
- it 'correctly sets user.external' do
+ it 'sets the value of Gitlab::CurrentSettings.user_default_external' do
expect(user.external).to eq(result)
end
end
end
- end
-
- context 'with non admin user' do
- let(:user) { create(:user) }
- let(:service) { described_class.new(user, params) }
-
- it 'raises AccessDeniedError exception' do
- expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError
- end
-
- context 'when authorization is skipped' do
- subject(:built_user) { service.execute(skip_authorization: true) }
-
- it { is_expected.to be_valid }
-
- it 'sets the created_by_id' do
- expect(built_user.created_by_id).to eq(user.id)
- end
- end
- end
-
- context 'with nil user' do
- let(:service) { described_class.new(nil, params) }
-
- it 'returns a valid user' do
- expect(service.execute).to be_valid
- end
context 'when "send_user_confirmation_email" application setting is true' do
before do
@@ -157,7 +99,7 @@ RSpec.describe Users::BuildService do
end
it 'does not confirm the user' do
- expect(service.execute).not_to be_confirmed
+ expect(user).not_to be_confirmed
end
end
@@ -167,27 +109,103 @@ RSpec.describe Users::BuildService do
end
it 'confirms the user' do
- expect(service.execute).to be_confirmed
+ expect(user).to be_confirmed
end
end
- context 'when user_type is provided' do
- subject(:user) { service.execute }
+ context 'with allowed params' do
+ let(:params) do
+ {
+ email: 1,
+ name: 1,
+ password: 1,
+ password_automatically_set: 1,
+ username: 1,
+ user_type: 'project_bot'
+ }
+ end
- context 'when project_bot' do
- before do
- params.merge!({ user_type: :project_bot })
- end
+ it 'sets all allowed attributes' do
+ expect(User).to receive(:new).with(hash_including(params)).and_call_original
- it { expect(user.project_bot?).to be true}
+ user
end
+ end
+ end
- context 'when not a project_bot' do
- before do
- params.merge!({ user_type: :alert_bot })
- end
+ context 'with nil current_user' do
+ subject(:user) { service.execute }
+
+ it_behaves_like 'common build items'
+ it_behaves_like 'current user not admin'
+ end
+
+ context 'with non admin current_user' do
+ let_it_be(:current_user) { create(:user) }
+
+ let(:service) { described_class.new(current_user, params) }
+
+ subject(:user) { service.execute(skip_authorization: true) }
+
+ it 'raises AccessDeniedError exception when authorization is not skipped' do
+ expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError
+ end
- it { expect(user.user_type).to be nil }
+ it_behaves_like 'common build items'
+ it_behaves_like 'current user not admin'
+ end
+
+ context 'with an admin current_user' do
+ let_it_be(:current_user) { create(:admin) }
+
+ let(:params) { build_stubbed(:user).slice(:name, :username, :email, :password) }
+ let(:service) { described_class.new(current_user, ActionController::Parameters.new(params).permit!) }
+
+ subject(:user) { service.execute }
+
+ it_behaves_like 'common build items'
+
+ context 'with allowed params' do
+ let(:params) do
+ {
+ access_level: 1,
+ admin: 1,
+ avatar: anything,
+ bio: 1,
+ can_create_group: 1,
+ color_scheme_id: 1,
+ email: 1,
+ external: 1,
+ force_random_password: 1,
+ hide_no_password: 1,
+ hide_no_ssh_key: 1,
+ linkedin: 1,
+ name: 1,
+ password: 1,
+ password_automatically_set: 1,
+ password_expires_at: 1,
+ projects_limit: 1,
+ remember_me: 1,
+ skip_confirmation: 1,
+ skype: 1,
+ theme_id: 1,
+ twitter: 1,
+ username: 1,
+ website_url: 1,
+ private_profile: 1,
+ organization: 1,
+ location: 1,
+ public_email: 1,
+ user_type: 'project_bot',
+ note: 1,
+ view_diffs_file_by_file: 1
+ }
+ end
+
+ it 'sets all allowed attributes' do
+ expect(User).to receive(:new).with(hash_including(params)).and_call_original
+
+ service.execute
end
end
@@ -195,34 +213,34 @@ RSpec.describe Users::BuildService do
where(:user_default_external, :external, :email, :user_default_internal_regex, :result) do
true | nil | 'fl@example.com' | nil | true
true | true | 'fl@example.com' | nil | true
- true | false | 'fl@example.com' | nil | true
+ true | false | 'fl@example.com' | nil | false # admin difference
true | nil | 'fl@example.com' | '' | true
true | true | 'fl@example.com' | '' | true
- true | false | 'fl@example.com' | '' | true
+ true | false | 'fl@example.com' | '' | false # admin difference
true | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ true | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
true | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
true | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
true | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
- true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true
+ true | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false # admin difference
false | nil | 'fl@example.com' | nil | false
- false | true | 'fl@example.com' | nil | false
+ false | true | 'fl@example.com' | nil | true # admin difference
false | false | 'fl@example.com' | nil | false
false | nil | 'fl@example.com' | '' | false
- false | true | 'fl@example.com' | '' | false
+ false | true | 'fl@example.com' | '' | true # admin difference
false | false | 'fl@example.com' | '' | false
false | nil | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ false | true | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
false | false | 'fl@example.com' | '^(?:(?!\.ext@).)*$\r?' | false
false | nil | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
- false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
+ false | true | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | true # admin difference
false | false | 'tester.ext@domain.com' | '^(?:(?!\.ext@).)*$\r?' | false
end
@@ -234,8 +252,6 @@ RSpec.describe Users::BuildService do
params.merge!({ external: external, email: email }.compact)
end
- subject(:user) { service.execute }
-
it 'sets the value of Gitlab::CurrentSettings.user_default_external' do
expect(user.external).to eq(result)
end
diff --git a/spec/services/users/registrations_build_service_spec.rb b/spec/services/users/registrations_build_service_spec.rb
new file mode 100644
index 00000000000..bc3718dbdb2
--- /dev/null
+++ b/spec/services/users/registrations_build_service_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::RegistrationsBuildService do
+ describe '#execute' do
+ let(:base_params) { build_stubbed(:user).slice(:first_name, :last_name, :username, :email, :password) }
+ let(:skip_param) { {} }
+ let(:params) { base_params.merge(skip_param) }
+
+ subject(:service) { described_class.new(nil, params) }
+
+ before do
+ stub_application_setting(signup_enabled?: true)
+ end
+
+ context 'when automatic user confirmation is not enabled' do
+ before do
+ stub_application_setting(send_user_confirmation_email: true)
+ end
+
+ context 'when skip_confirmation is true' do
+ let(:skip_param) { { skip_confirmation: true } }
+
+ it 'confirms the user' do
+ expect(service.execute).to be_confirmed
+ end
+ end
+
+ context 'when skip_confirmation is not set' do
+ it 'does not confirm the user' do
+ expect(service.execute).not_to be_confirmed
+ end
+ end
+
+ context 'when skip_confirmation is false' do
+ let(:skip_param) { { skip_confirmation: false } }
+
+ it 'does not confirm the user' do
+ expect(service.execute).not_to be_confirmed
+ end
+ end
+ end
+
+ context 'when automatic user confirmation is enabled' do
+ before do
+ stub_application_setting(send_user_confirmation_email: false)
+ end
+
+ context 'when skip_confirmation is true' do
+ let(:skip_param) { { skip_confirmation: true } }
+
+ it 'confirms the user' do
+ expect(service.execute).to be_confirmed
+ end
+ end
+
+ context 'when skip_confirmation is not set the application setting takes precedence' do
+ it 'confirms the user' do
+ expect(service.execute).to be_confirmed
+ end
+ end
+
+ context 'when skip_confirmation is false the application setting takes precedence' do
+ let(:skip_param) { { skip_confirmation: false } }
+
+ it 'confirms the user' do
+ expect(service.execute).to be_confirmed
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/users/update_assigned_open_issue_count_service_spec.rb b/spec/services/users/update_assigned_open_issue_count_service_spec.rb
new file mode 100644
index 00000000000..55fc60a7893
--- /dev/null
+++ b/spec/services/users/update_assigned_open_issue_count_service_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UpdateAssignedOpenIssueCountService do
+ let_it_be(:user) { create(:user) }
+
+ describe '#initialize' do
+ context 'incorrect arguments provided' do
+ it 'raises an error if there are no target user' do
+ expect { described_class.new(target_user: nil) }.to raise_error(ArgumentError, /Please provide a target user/)
+ expect { described_class.new(target_user: "nonsense") }.to raise_error(ArgumentError, /Please provide a target user/)
+ end
+ end
+
+ context 'when correct arguments provided' do
+ it 'is successful' do
+ expect { described_class.new(target_user: user) }.not_to raise_error
+ end
+ end
+ end
+
+ describe "#execute", :clean_gitlab_redis_cache do
+ let(:fake_update_service) { double }
+ let(:fake_issue_count_service) { double }
+ let(:provided_value) { nil }
+
+ subject { described_class.new(target_user: user).execute }
+
+ context 'successful' do
+ it 'returns a success response' do
+ expect(subject).to be_success
+ end
+
+ it 'writes the cache with the new value' do
+ expect(Rails.cache).to receive(:write).with(['users', user.id, 'assigned_open_issues_count'], 0, expires_in: User::COUNT_CACHE_VALIDITY_PERIOD)
+
+ subject
+ end
+
+ it 'calls the issues finder to get the latest value' do
+ expect(IssuesFinder).to receive(:new).with(user, assignee_id: user.id, state: 'opened', non_archived: true).and_return(fake_issue_count_service)
+ expect(fake_issue_count_service).to receive(:execute)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/services/users/update_todo_count_cache_service_spec.rb b/spec/services/users/update_todo_count_cache_service_spec.rb
index 3e3618b1291..3d96af928df 100644
--- a/spec/services/users/update_todo_count_cache_service_spec.rb
+++ b/spec/services/users/update_todo_count_cache_service_spec.rb
@@ -14,13 +14,21 @@ RSpec.describe Users::UpdateTodoCountCacheService do
let_it_be(:todo5) { create(:todo, user: user2, state: :pending) }
let_it_be(:todo6) { create(:todo, user: user2, state: :pending) }
+ def execute_all
+ described_class.new([user1.id, user2.id]).execute
+ end
+
+ def execute_single
+ described_class.new([user1.id]).execute
+ end
+
it 'updates the todos_counts for users', :use_clean_rails_memory_store_caching do
Rails.cache.write(['users', user1.id, 'todos_done_count'], 0)
Rails.cache.write(['users', user1.id, 'todos_pending_count'], 0)
Rails.cache.write(['users', user2.id, 'todos_done_count'], 0)
Rails.cache.write(['users', user2.id, 'todos_pending_count'], 0)
- expect { described_class.new([user1, user2]).execute }
+ expect { execute_all }
.to change(user1, :todos_done_count).from(0).to(2)
.and change(user1, :todos_pending_count).from(0).to(1)
.and change(user2, :todos_done_count).from(0).to(1)
@@ -28,7 +36,7 @@ RSpec.describe Users::UpdateTodoCountCacheService do
Todo.delete_all
- expect { described_class.new([user1, user2]).execute }
+ expect { execute_all }
.to change(user1, :todos_done_count).from(2).to(0)
.and change(user1, :todos_pending_count).from(1).to(0)
.and change(user2, :todos_done_count).from(1).to(0)
@@ -36,26 +44,24 @@ RSpec.describe Users::UpdateTodoCountCacheService do
end
it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { described_class.new([user1]).execute }.count
+ control_count = ActiveRecord::QueryRecorder.new { execute_single }.count
- expect { described_class.new([user1, user2]).execute }.not_to exceed_query_limit(control_count)
+ expect { execute_all }.not_to exceed_query_limit(control_count)
end
it 'executes one query per batch of users' do
stub_const("#{described_class}::QUERY_BATCH_SIZE", 1)
- expect(ActiveRecord::QueryRecorder.new { described_class.new([user1]).execute }.count).to eq(1)
- expect(ActiveRecord::QueryRecorder.new { described_class.new([user1, user2]).execute }.count).to eq(2)
+ expect(ActiveRecord::QueryRecorder.new { execute_single }.count).to eq(1)
+ expect(ActiveRecord::QueryRecorder.new { execute_all }.count).to eq(2)
end
- it 'sets the cache expire time to the users count_cache_validity_period' do
- allow(user1).to receive(:count_cache_validity_period).and_return(1.minute)
- allow(user2).to receive(:count_cache_validity_period).and_return(1.hour)
-
- expect(Rails.cache).to receive(:write).with(['users', user1.id, anything], anything, expires_in: 1.minute).twice
- expect(Rails.cache).to receive(:write).with(['users', user2.id, anything], anything, expires_in: 1.hour).twice
+ it 'sets the correct cache expire time' do
+ expect(Rails.cache).to receive(:write)
+ .with(['users', user1.id, anything], anything, expires_in: User::COUNT_CACHE_VALIDITY_PERIOD)
+ .twice
- described_class.new([user1, user2]).execute
+ execute_single
end
end
end
diff --git a/spec/services/users/upsert_credit_card_validation_service_spec.rb b/spec/services/users/upsert_credit_card_validation_service_spec.rb
new file mode 100644
index 00000000000..148638fe5e7
--- /dev/null
+++ b/spec/services/users/upsert_credit_card_validation_service_spec.rb
@@ -0,0 +1,84 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UpsertCreditCardValidationService do
+ let_it_be(:user) { create(:user) }
+
+ let(:user_id) { user.id }
+ let(:credit_card_validated_time) { Time.utc(2020, 1, 1) }
+ let(:params) { { user_id: user_id, credit_card_validated_at: credit_card_validated_time } }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(params) }
+
+ context 'successfully set credit card validation record for the user' do
+ context 'when user does not have credit card validation record' do
+ it 'creates the credit card validation and returns a success' do
+ expect(user.credit_card_validated_at).to be nil
+
+ result = service.execute
+
+ expect(result.status).to eq(:success)
+ expect(user.reload.credit_card_validated_at).to eq(credit_card_validated_time)
+ end
+ end
+
+ context 'when user has credit card validation record' do
+ let(:old_time) { Time.utc(1999, 2, 2) }
+
+ before do
+ create(:credit_card_validation, user: user, credit_card_validated_at: old_time)
+ end
+
+ it 'updates the credit card validation and returns a success' do
+ expect(user.credit_card_validated_at).to eq(old_time)
+
+ result = service.execute
+
+ expect(result.status).to eq(:success)
+ expect(user.reload.credit_card_validated_at).to eq(credit_card_validated_time)
+ end
+ end
+ end
+
+ shared_examples 'returns an error without tracking the exception' do
+ it do
+ expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ result = service.execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+
+ context 'when user id does not exist' do
+ let(:user_id) { non_existing_record_id }
+
+ it_behaves_like 'returns an error without tracking the exception'
+ end
+
+ context 'when missing credit_card_validated_at' do
+ let(:params) { { user_id: user_id } }
+
+ it_behaves_like 'returns an error without tracking the exception'
+ end
+
+ context 'when missing user id' do
+ let(:params) { { credit_card_validated_at: credit_card_validated_time } }
+
+ it_behaves_like 'returns an error without tracking the exception'
+ end
+
+ context 'when unexpected exception happen' do
+ it 'tracks the exception and returns an error' do
+ expect(::Users::CreditCardValidation).to receive(:upsert).and_raise(e = StandardError.new('My exception!'))
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(e, class: described_class.to_s, params: params)
+
+ result = service.execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+ end
+end
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 2fe72ab31c2..b3fd4e33640 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -5,8 +5,9 @@ require 'spec_helper'
RSpec.describe WebHookService do
include StubRequests
- let(:project) { create(:project) }
- let(:project_hook) { create(:project_hook) }
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:project_hook) { create(:project_hook, project: project) }
+
let(:headers) do
{
'Content-Type' => 'application/json',
@@ -21,6 +22,10 @@ RSpec.describe WebHookService do
let(:service_instance) { described_class.new(project_hook, data, :push_hooks) }
+ around do |example|
+ travel_to(Time.current) { example.run }
+ end
+
describe '#initialize' do
before do
stub_application_setting(setting_name => setting)
@@ -56,12 +61,8 @@ RSpec.describe WebHookService do
end
describe '#execute' do
- before do
- project.hooks << [project_hook]
- end
-
context 'when token is defined' do
- let(:project_hook) { create(:project_hook, :token) }
+ let_it_be(:project_hook) { create(:project_hook, :token) }
it 'POSTs to the webhook URL' do
stub_full_request(project_hook.url, method: :post)
@@ -85,8 +86,8 @@ RSpec.describe WebHookService do
end
context 'when auth credentials are present' do
- let(:url) {'https://example.org'}
- let(:project_hook) { create(:project_hook, url: 'https://demo:demo@example.org/') }
+ let_it_be(:url) {'https://example.org'}
+ let_it_be(:project_hook) { create(:project_hook, url: 'https://demo:demo@example.org/') }
it 'uses the credentials' do
stub_full_request(url, method: :post)
@@ -100,8 +101,8 @@ RSpec.describe WebHookService do
end
context 'when auth credentials are partial present' do
- let(:url) {'https://example.org'}
- let(:project_hook) { create(:project_hook, url: 'https://demo@example.org/') }
+ let_it_be(:url) {'https://example.org'}
+ let_it_be(:project_hook) { create(:project_hook, url: 'https://demo@example.org/') }
it 'uses the credentials anyways' do
stub_full_request(url, method: :post)
@@ -120,10 +121,21 @@ RSpec.describe WebHookService do
expect { service_instance.execute }.to raise_error(StandardError)
end
+ it 'does not execute disabled hooks' do
+ project_hook.update!(recent_failures: 4)
+
+ expect(service_instance.execute).to eq({ status: :error, message: 'Hook disabled' })
+ end
+
it 'handles exceptions' do
- exceptions = [SocketError, OpenSSL::SSL::SSLError, Errno::ECONNRESET, Errno::ECONNREFUSED, Errno::EHOSTUNREACH, Net::OpenTimeout, Net::ReadTimeout, Gitlab::HTTP::BlockedUrlError, Gitlab::HTTP::RedirectionTooDeep]
+ exceptions = [
+ SocketError, OpenSSL::SSL::SSLError, Errno::ECONNRESET, Errno::ECONNREFUSED,
+ Errno::EHOSTUNREACH, Net::OpenTimeout, Net::ReadTimeout,
+ Gitlab::HTTP::BlockedUrlError, Gitlab::HTTP::RedirectionTooDeep
+ ]
exceptions.each do |exception_class|
exception = exception_class.new('Exception message')
+ project_hook.enable!
stub_full_request(project_hook.url, method: :post).to_raise(exception)
expect(service_instance.execute).to eq({ status: :error, message: exception.to_s })
@@ -132,7 +144,7 @@ RSpec.describe WebHookService do
end
context 'when url is not encoded' do
- let(:project_hook) { create(:project_hook, url: 'http://server.com/my path/') }
+ let_it_be(:project_hook) { create(:project_hook, url: 'http://server.com/my path/') }
it 'handles exceptions' do
expect(service_instance.execute).to eq(status: :error, message: 'bad URI(is not URI?): "http://server.com/my path/"')
@@ -166,10 +178,11 @@ RSpec.describe WebHookService do
context 'with success' do
before do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
- service_instance.execute
end
it 'log successful execution' do
+ service_instance.execute
+
expect(hook_log.trigger).to eq('push_hooks')
expect(hook_log.url).to eq(project_hook.url)
expect(hook_log.request_headers).to eq(headers)
@@ -178,15 +191,81 @@ RSpec.describe WebHookService do
expect(hook_log.execution_duration).to be > 0
expect(hook_log.internal_error_message).to be_nil
end
+
+ it 'does not increment the failure count' do
+ expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ end
+
+ it 'does not change the disabled_until attribute' do
+ expect { service_instance.execute }.not_to change(project_hook, :disabled_until)
+ end
+
+ context 'when the hook had previously failed' do
+ before do
+ project_hook.update!(recent_failures: 2)
+ end
+
+ it 'resets the failure count' do
+ expect { service_instance.execute }.to change(project_hook, :recent_failures).to(0)
+ end
+ end
+ end
+
+ context 'with bad request' do
+ before do
+ stub_full_request(project_hook.url, method: :post).to_return(status: 400, body: 'Bad request')
+ end
+
+ it 'logs failed execution' do
+ service_instance.execute
+
+ expect(hook_log).to have_attributes(
+ trigger: eq('push_hooks'),
+ url: eq(project_hook.url),
+ request_headers: eq(headers),
+ response_body: eq('Bad request'),
+ response_status: eq('400'),
+ execution_duration: be > 0,
+ internal_error_message: be_nil
+ )
+ end
+
+ it 'increments the failure count' do
+ expect { service_instance.execute }.to change(project_hook, :recent_failures).by(1)
+ end
+
+ it 'does not change the disabled_until attribute' do
+ expect { service_instance.execute }.not_to change(project_hook, :disabled_until)
+ end
+
+ it 'does not allow the failure count to overflow' do
+ project_hook.update!(recent_failures: 32767)
+
+ expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ end
+
+ context 'when the web_hooks_disable_failed FF is disabled' do
+ before do
+ # Hook will only be executed if the flag is disabled.
+ stub_feature_flags(web_hooks_disable_failed: false)
+ end
+
+ it 'does not allow the failure count to overflow' do
+ project_hook.update!(recent_failures: 32767)
+
+ expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ end
+ end
end
context 'with exception' do
before do
stub_full_request(project_hook.url, method: :post).to_raise(SocketError.new('Some HTTP Post error'))
- service_instance.execute
end
it 'log failed execution' do
+ service_instance.execute
+
expect(hook_log.trigger).to eq('push_hooks')
expect(hook_log.url).to eq(project_hook.url)
expect(hook_log.request_headers).to eq(headers)
@@ -195,6 +274,47 @@ RSpec.describe WebHookService do
expect(hook_log.execution_duration).to be > 0
expect(hook_log.internal_error_message).to eq('Some HTTP Post error')
end
+
+ it 'does not increment the failure count' do
+ expect { service_instance.execute }.not_to change(project_hook, :recent_failures)
+ end
+
+ it 'sets the disabled_until attribute' do
+ expect { service_instance.execute }
+ .to change(project_hook, :disabled_until).to(project_hook.next_backoff.from_now)
+ end
+
+ it 'increases the backoff count' do
+ expect { service_instance.execute }.to change(project_hook, :backoff_count).by(1)
+ end
+
+ context 'when the previous cool-off was near the maximum' do
+ before do
+ project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 8)
+ end
+
+ it 'sets the disabled_until attribute' do
+ expect { service_instance.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
+ end
+
+ it 'sets the last_backoff attribute' do
+ expect { service_instance.execute }.to change(project_hook, :backoff_count).by(1)
+ end
+ end
+
+ context 'when we have backed-off many many times' do
+ before do
+ project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 365)
+ end
+
+ it 'sets the disabled_until attribute' do
+ expect { service_instance.execute }.to change(project_hook, :disabled_until).to(1.day.from_now)
+ end
+
+ it 'sets the last_backoff attribute' do
+ expect { service_instance.execute }.to change(project_hook, :backoff_count).by(1)
+ end
+ end
end
context 'with unsafe response body' do
@@ -217,12 +337,98 @@ RSpec.describe WebHookService do
end
describe '#async_execute' do
- let(:system_hook) { create(:system_hook) }
+ def expect_to_perform_worker(hook)
+ expect(WebHookWorker).to receive(:perform_async).with(hook.id, data, 'push_hooks')
+ end
+
+ def expect_to_rate_limit(hook, threshold:, throttled: false)
+ expect(Gitlab::ApplicationRateLimiter).to receive(:throttled?)
+ .with(:web_hook_calls, scope: [hook], threshold: threshold)
+ .and_return(throttled)
+ end
+
+ context 'when rate limiting is not configured' do
+ it 'queues a worker without tracking the call' do
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+ expect_to_perform_worker(project_hook)
+
+ service_instance.async_execute
+ end
+ end
+
+ context 'when rate limiting is configured' do
+ let_it_be(:threshold) { 3 }
+ let_it_be(:plan_limits) { create(:plan_limits, :default_plan, web_hook_calls: threshold) }
+
+ it 'queues a worker and tracks the call' do
+ expect_to_rate_limit(project_hook, threshold: threshold)
+ expect_to_perform_worker(project_hook)
+
+ service_instance.async_execute
+ end
+
+ context 'when the hook is throttled (via mock)' do
+ before do
+ expect_to_rate_limit(project_hook, threshold: threshold, throttled: true)
+ end
+
+ it 'does not queue a worker and logs an error' do
+ expect(WebHookWorker).not_to receive(:perform_async)
- it 'enqueue WebHookWorker' do
- expect(WebHookWorker).to receive(:perform_async).with(project_hook.id, data, 'push_hooks')
+ payload = {
+ message: 'Webhook rate limit exceeded',
+ hook_id: project_hook.id,
+ hook_type: 'ProjectHook',
+ hook_name: 'push_hooks'
+ }
- described_class.new(project_hook, data, 'push_hooks').async_execute
+ expect(Gitlab::AuthLogger).to receive(:error).with(payload)
+ expect(Gitlab::AppLogger).to receive(:error).with(payload)
+
+ service_instance.async_execute
+ end
+ end
+
+ context 'when the hook is throttled (via Redis)', :clean_gitlab_redis_cache do
+ before do
+ # Set a high interval to avoid intermittent failures in CI
+ allow(Gitlab::ApplicationRateLimiter).to receive(:rate_limits).and_return(
+ web_hook_calls: { interval: 1.day }
+ )
+
+ expect_to_perform_worker(project_hook).exactly(threshold).times
+
+ threshold.times { service_instance.async_execute }
+ end
+
+ it 'stops queueing workers and logs errors' do
+ expect(Gitlab::AuthLogger).to receive(:error).twice
+ expect(Gitlab::AppLogger).to receive(:error).twice
+
+ 2.times { service_instance.async_execute }
+ end
+
+ it 'still queues workers for other hooks' do
+ other_hook = create(:project_hook)
+
+ expect_to_perform_worker(other_hook)
+
+ described_class.new(other_hook, data, :push_hooks).async_execute
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(web_hooks_rate_limit: false)
+ end
+
+ it 'queues a worker without tracking the call' do
+ expect(Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+ expect_to_perform_worker(project_hook)
+
+ service_instance.async_execute
+ end
+ end
end
end
end
diff --git a/spec/services/web_hooks/destroy_service_spec.rb b/spec/services/web_hooks/destroy_service_spec.rb
index fda40eb01e2..5269fe08ac0 100644
--- a/spec/services/web_hooks/destroy_service_spec.rb
+++ b/spec/services/web_hooks/destroy_service_spec.rb
@@ -41,15 +41,15 @@ RSpec.describe WebHooks::DestroyService do
end
context 'with system hook' do
- let_it_be(:hook) { create(:system_hook, url: "http://example.com") }
- let_it_be(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ let!(:hook) { create(:system_hook, url: "http://example.com") }
+ let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
it_behaves_like 'batched destroys'
end
context 'with project hook' do
- let_it_be(:hook) { create(:project_hook) }
- let_it_be(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ let!(:hook) { create(:project_hook) }
+ let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
it_behaves_like 'batched destroys'
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 4179e6f7e91..c59daa6c919 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -253,6 +253,9 @@ RSpec.configure do |config|
# tests, until we introduce it in user settings
stub_feature_flags(forti_token_cloud: false)
+ # This feature flag is by default disabled and used in disaster recovery mode
+ stub_feature_flags(ci_queueing_disaster_recovery: false)
+
enable_rugged = example.metadata[:enable_rugged].present?
# Disable Rugged features by default
@@ -265,16 +268,33 @@ RSpec.configure do |config|
stub_feature_flags(file_identifier_hash: false)
stub_feature_flags(unified_diff_components: false)
+ stub_feature_flags(diffs_virtual_scrolling: false)
- # The following `vue_issues_list` stub can be removed once the
- # Vue issues page has feature parity with the current Haml page
+ # The following `vue_issues_list`/`vue_issuables_list` stubs can be removed
+ # once the Vue issues page has feature parity with the current Haml page
stub_feature_flags(vue_issues_list: false)
+ stub_feature_flags(vue_issuables_list: false)
# Disable `refactor_blob_viewer` as we refactor
# the blob viewer. See the follwing epic for more:
# https://gitlab.com/groups/gitlab-org/-/epics/5531
stub_feature_flags(refactor_blob_viewer: false)
+ # Disable `main_branch_over_master` as we migrate
+ # from `master` to `main` accross our codebase.
+ # It's done in order to preserve the concistency in tests
+ # As we're ready to change `master` usages to `main`, let's enable it
+ stub_feature_flags(main_branch_over_master: false)
+
+ # Selectively disable by actor https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor
+ stub_feature_flags(remove_description_html_in_release_api_override: false)
+
+ # Disable issue respositioning to avoid heavy load on database when importing big projects.
+ # This is only turned on when app is handling heavy project imports.
+ # Can be removed when we find a better way to deal with the problem.
+ # For more information check https://gitlab.com/gitlab-com/gl-infra/production/-/issues/4321
+ stub_feature_flags(block_issue_repositioning: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
diff --git a/spec/support/atlassian/jira_connect/schemata.rb b/spec/support/atlassian/jira_connect/schemata.rb
index d056c7cacf3..61e8aa8e15c 100644
--- a/spec/support/atlassian/jira_connect/schemata.rb
+++ b/spec/support/atlassian/jira_connect/schemata.rb
@@ -74,7 +74,7 @@ module Atlassian
'deploymentSequenceNumber' => { 'type' => 'integer' },
'updateSequenceNumber' => { 'type' => 'integer' },
'associations' => {
- 'type' => 'array',
+ 'type' => %w(array),
'items' => association_type,
'minItems' => 1
},
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index be2b41d6997..f9a28c8e40b 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -157,7 +157,7 @@ RSpec.configure do |config|
unless session.current_window.size == CAPYBARA_WINDOW_SIZE
begin
session.current_window.resize_to(*CAPYBARA_WINDOW_SIZE)
- rescue # ?
+ rescue StandardError # ?
end
end
end
@@ -170,14 +170,16 @@ RSpec.configure do |config|
Capybara.raise_server_errors = false
example.run
+ ensure
+ Capybara.raise_server_errors = true
+ end
+ config.append_after do |example|
if example.metadata[:screenshot]
screenshot = example.metadata[:screenshot][:image] || example.metadata[:screenshot][:html]
+ screenshot&.delete_prefix!(ENV.fetch('CI_PROJECT_DIR', ''))
example.metadata[:stdout] = %{[[ATTACHMENT|#{screenshot}]]}
end
-
- ensure
- Capybara.raise_server_errors = true
end
config.after(:example, :js) do |example|
diff --git a/spec/support/db_cleaner.rb b/spec/support/db_cleaner.rb
index 77e1f6bcaa3..ff913ebf22b 100644
--- a/spec/support/db_cleaner.rb
+++ b/spec/support/db_cleaner.rb
@@ -16,4 +16,4 @@ module DbCleaner
end
end
-DbCleaner.prepend_if_ee('EE::DbCleaner')
+DbCleaner.prepend_mod_with('DbCleaner')
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
index 5761e05d541..5f22fa11e9e 100644
--- a/spec/support/factory_bot.rb
+++ b/spec/support/factory_bot.rb
@@ -2,6 +2,14 @@
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
+
+ # FactoryBot doesn't allow yet to add a helper that can be used in factories
+ # While the fixture_file_upload helper is reasonable to be used there:
+ #
+ # https://github.com/thoughtbot/factory_bot/issues/564#issuecomment-389491577
+ def fixture_file_upload(*args, **kwargs)
+ Rack::Test::UploadedFile.new(*args, **kwargs)
+ end
end
# Patching FactoryBot to allow stubbing non AR models
diff --git a/spec/support/factory_default.rb b/spec/support/factory_default.rb
index e116c28f132..31af022f6c0 100644
--- a/spec/support/factory_default.rb
+++ b/spec/support/factory_default.rb
@@ -1,5 +1,17 @@
# frozen_string_literal: true
+module Gitlab
+ module FreezeFactoryDefault
+ def set_factory_default(name, obj, preserve_traits: nil)
+ obj.freeze unless obj.frozen?
+
+ super
+ end
+ end
+end
+
+TestProf::FactoryDefault::DefaultSyntax.prepend Gitlab::FreezeFactoryDefault
+
RSpec.configure do |config|
config.after do |ex|
TestProf::FactoryDefault.reset unless ex.metadata[:factory_default] == :keep
diff --git a/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb b/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb
new file mode 100644
index 00000000000..c9ff566e94c
--- /dev/null
+++ b/spec/support/gitlab/usage/metrics_instrumentation_shared_examples.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a correct instrumented metric value' do |options, expected_value|
+ let(:time_frame) { options[:time_frame] }
+
+ before do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ end
+
+ it 'has correct value' do
+ expect(described_class.new(time_frame: time_frame).value).to eq(expected_value)
+ end
+end
diff --git a/spec/support/gitlab_experiment.rb b/spec/support/gitlab_experiment.rb
index bd0c88f8049..b84adf82d29 100644
--- a/spec/support/gitlab_experiment.rb
+++ b/spec/support/gitlab_experiment.rb
@@ -12,10 +12,6 @@ class ApplicationExperiment # rubocop:disable Gitlab/NamespacedClass
super(...)
Feature.persist_used!(feature_flag_name)
end
-
- def should_track?
- true
- end
end
RSpec.configure do |config|
diff --git a/spec/support/helpers/board_helpers.rb b/spec/support/helpers/board_helpers.rb
index 6e145fed733..c4e69d06f52 100644
--- a/spec/support/helpers/board_helpers.rb
+++ b/spec/support/helpers/board_helpers.rb
@@ -4,6 +4,23 @@ module BoardHelpers
def click_card(card)
within card do
first('.board-card-number').click
+ wait_for_requests
+ end
+ end
+
+ def load_board(board_path)
+ visit board_path
+
+ wait_for_requests
+ end
+
+ def click_card_and_edit_label
+ click_card(card)
+
+ page.within(labels_select) do
+ click_button 'Edit'
+
+ wait_for_requests
end
end
end
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 9e62eef14de..5510284b30d 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -3,6 +3,38 @@
module CycleAnalyticsHelpers
include GitHelpers
+ def toggle_value_stream_dropdown
+ page.find('[data-testid="dropdown-value-streams"]').click
+ end
+
+ def add_custom_stage_to_form
+ page.find_button(s_('CreateValueStreamForm|Add another stage')).click
+
+ index = page.all('[data-testid="value-stream-stage-fields"]').length
+ last_stage = page.all('[data-testid="value-stream-stage-fields"]').last
+
+ within last_stage do
+ find('[name*="custom-stage-name-"]').fill_in with: "Cool custom stage - name #{index}"
+ select_dropdown_option_by_value "custom-stage-start-event-", :merge_request_created
+ select_dropdown_option_by_value "custom-stage-end-event-", :merge_request_merged
+ end
+ end
+
+ def save_value_stream(custom_value_stream_name)
+ fill_in 'create-value-stream-name', with: custom_value_stream_name
+
+ page.find_button(s_('CreateValueStreamForm|Create Value Stream')).click
+ wait_for_requests
+ end
+
+ def create_custom_value_stream(custom_value_stream_name)
+ toggle_value_stream_dropdown
+ page.find_button(_('Create new Value Stream')).click
+
+ add_custom_stage_to_form
+ save_value_stream(custom_value_stream_name)
+ end
+
def wait_for_stages_to_load(selector = '.js-path-navigation')
expect(page).to have_selector selector
wait_for_requests
@@ -93,17 +125,17 @@ module CycleAnalyticsHelpers
target_branch: 'master'
}
- mr = MergeRequests::CreateService.new(project, user, opts).execute
+ mr = MergeRequests::CreateService.new(project: project, current_user: user, params: opts).execute
NewMergeRequestWorker.new.perform(mr, user)
mr
end
def merge_merge_requests_closing_issue(user, project, issue)
merge_requests = Issues::ReferencedMergeRequestsService
- .new(project, user)
+ .new(project: project, current_user: user)
.closed_by_merge_requests(issue)
- merge_requests.each { |merge_request| MergeRequests::MergeService.new(project, user, sha: merge_request.diff_head_sha).execute(merge_request) }
+ merge_requests.each { |merge_request| MergeRequests::MergeService.new(project: project, current_user: user, params: { sha: merge_request.diff_head_sha }).execute(merge_request) }
end
def deploy_master(user, project, environment: 'production')
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
index 1795b0a9ac3..ba32ccbb6f1 100644
--- a/spec/support/helpers/dns_helpers.rb
+++ b/spec/support/helpers/dns_helpers.rb
@@ -18,7 +18,7 @@ module DnsHelpers
def stub_invalid_dns!
allow(Addrinfo).to receive(:getaddrinfo).with(/\Afoobar\.\w|(\d{1,3}\.){4,}\d{1,3}\z/i, anything, nil, :STREAM) do
- raise SocketError.new("getaddrinfo: Name or service not known")
+ raise SocketError, "getaddrinfo: Name or service not known"
end
end
diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb
new file mode 100644
index 00000000000..1127c817656
--- /dev/null
+++ b/spec/support/helpers/features/invite_members_modal_helper.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module Spec
+ module Support
+ module Helpers
+ module Features
+ module InviteMembersModalHelper
+ def invite_member(name, role: 'Guest', expires_at: nil)
+ click_on 'Invite members'
+
+ page.within '#invite-members-modal' do
+ fill_in 'Select members or type email addresses', with: name
+
+ wait_for_requests
+ click_button name
+ choose_options(role, expires_at)
+
+ click_button 'Invite'
+
+ page.refresh
+ end
+ end
+
+ def invite_group(name, role: 'Guest', expires_at: nil)
+ click_on 'Invite a group'
+
+ click_on 'Select a group'
+ wait_for_requests
+ click_button name
+ choose_options(role, expires_at)
+
+ click_button 'Invite'
+
+ page.refresh
+ end
+
+ def choose_options(role, expires_at)
+ unless role == 'Guest'
+ click_button 'Guest'
+ wait_for_requests
+ click_button role
+ end
+
+ fill_in 'YYYY-MM-DD', with: expires_at.try(:strftime, '%Y-%m-%d')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/features/members_table_helpers.rb b/spec/support/helpers/features/members_table_helpers.rb
index 4a0e218ed3e..2e86e014a1b 100644
--- a/spec/support/helpers/features/members_table_helpers.rb
+++ b/spec/support/helpers/features/members_table_helpers.rb
@@ -27,10 +27,6 @@ module Spec
all_rows[2]
end
- def invite_users_form
- page.find('[data-testid="invite-users-form"]')
- end
-
def find_row(name)
page.within(members_table) do
page.find('tbody > tr', text: name)
diff --git a/spec/support/helpers/gitaly_setup.rb b/spec/support/helpers/gitaly_setup.rb
new file mode 100644
index 00000000000..2ce4bcfa943
--- /dev/null
+++ b/spec/support/helpers/gitaly_setup.rb
@@ -0,0 +1,195 @@
+# frozen_string_literal: true
+
+# This file contains environment settings for gitaly when it's running
+# as part of the gitlab-ce/ee test suite.
+#
+# Please be careful when modifying this file. Your changes must work
+# both for local development rspec runs, and in CI.
+
+require 'securerandom'
+require 'socket'
+require 'logger'
+
+module GitalySetup
+ LOGGER = begin
+ default_name = ENV['CI'] ? 'DEBUG' : 'WARN'
+ level_name = ENV['GITLAB_TESTING_LOG_LEVEL']&.upcase
+ level = Logger.const_get(level_name || default_name, true) # rubocop: disable Gitlab/ConstGetInheritFalse
+ Logger.new(STDOUT, level: level, formatter: ->(_, _, _, msg) { msg })
+ end
+
+ def tmp_tests_gitaly_dir
+ File.expand_path('../../../tmp/tests/gitaly', __dir__)
+ end
+
+ def tmp_tests_gitaly_bin_dir
+ File.join(tmp_tests_gitaly_dir, '_build', 'bin')
+ end
+
+ def tmp_tests_gitlab_shell_dir
+ File.expand_path('../../../tmp/tests/gitlab-shell', __dir__)
+ end
+
+ def rails_gitlab_shell_secret
+ File.expand_path('../../../.gitlab_shell_secret', __dir__)
+ end
+
+ def gemfile
+ File.join(tmp_tests_gitaly_dir, 'ruby', 'Gemfile')
+ end
+
+ def gemfile_dir
+ File.dirname(gemfile)
+ end
+
+ def gitlab_shell_secret_file
+ File.join(tmp_tests_gitlab_shell_dir, '.gitlab_shell_secret')
+ end
+
+ def env
+ {
+ 'HOME' => File.expand_path('tmp/tests'),
+ 'GEM_PATH' => Gem.path.join(':'),
+ 'BUNDLE_APP_CONFIG' => File.join(gemfile_dir, '.bundle'),
+ 'BUNDLE_INSTALL_FLAGS' => nil,
+ 'BUNDLE_GEMFILE' => gemfile,
+ 'RUBYOPT' => nil,
+
+ # Git hooks can't run during tests as the internal API is not running.
+ 'GITALY_TESTING_NO_GIT_HOOKS' => "1",
+ 'GITALY_TESTING_ENABLE_ALL_FEATURE_FLAGS' => "true"
+ }
+ end
+
+ # rubocop:disable GitlabSecurity/SystemCommandInjection
+ def set_bundler_config
+ system('bundle config set --local jobs 4', chdir: gemfile_dir)
+ system('bundle config set --local retry 3', chdir: gemfile_dir)
+
+ if ENV['CI']
+ bundle_path = File.expand_path('../../../vendor/gitaly-ruby', __dir__)
+ system('bundle', 'config', 'set', '--local', 'path', bundle_path, chdir: gemfile_dir)
+ end
+ end
+ # rubocop:enable GitlabSecurity/SystemCommandInjection
+
+ def config_path(service)
+ case service
+ when :gitaly
+ File.join(tmp_tests_gitaly_dir, 'config.toml')
+ when :gitaly2
+ File.join(tmp_tests_gitaly_dir, 'gitaly2.config.toml')
+ when :praefect
+ File.join(tmp_tests_gitaly_dir, 'praefect.config.toml')
+ end
+ end
+
+ def service_binary(service)
+ case service
+ when :gitaly, :gitaly2
+ 'gitaly'
+ when :praefect
+ 'praefect'
+ end
+ end
+
+ def install_gitaly_gems
+ system(env, "make #{tmp_tests_gitaly_dir}/.ruby-bundle", chdir: tmp_tests_gitaly_dir) # rubocop:disable GitlabSecurity/SystemCommandInjection
+ end
+
+ def build_gitaly
+ system(env, 'make', chdir: tmp_tests_gitaly_dir) # rubocop:disable GitlabSecurity/SystemCommandInjection
+ end
+
+ def start_gitaly
+ start(:gitaly)
+ end
+
+ def start_gitaly2
+ start(:gitaly2)
+ end
+
+ def start_praefect
+ start(:praefect)
+ end
+
+ def start(service)
+ args = ["#{tmp_tests_gitaly_bin_dir}/#{service_binary(service)}"]
+ args.push("-config") if service == :praefect
+ args.push(config_path(service))
+ pid = spawn(env, *args, [:out, :err] => "log/#{service}-test.log")
+
+ begin
+ try_connect!(service)
+ rescue StandardError
+ Process.kill('TERM', pid)
+ raise
+ end
+
+ pid
+ end
+
+ # Taken from Gitlab::Shell.generate_and_link_secret_token
+ def ensure_gitlab_shell_secret!
+ secret_file = rails_gitlab_shell_secret
+ shell_link = gitlab_shell_secret_file
+
+ unless File.size?(secret_file)
+ File.write(secret_file, SecureRandom.hex(16))
+ end
+
+ unless File.exist?(shell_link)
+ FileUtils.ln_s(secret_file, shell_link)
+ end
+ end
+
+ def check_gitaly_config!
+ LOGGER.debug "Checking gitaly-ruby Gemfile...\n"
+
+ unless File.exist?(gemfile)
+ message = "#{gemfile} does not exist."
+ message += "\n\nThis might have happened if the CI artifacts for this build were destroyed." if ENV['CI']
+ abort message
+ end
+
+ LOGGER.debug "Checking gitaly-ruby bundle...\n"
+ out = ENV['CI'] ? STDOUT : '/dev/null'
+ abort 'bundle check failed' unless system(env, 'bundle', 'check', out: out, chdir: File.dirname(gemfile))
+ end
+
+ def read_socket_path(service)
+ # This code needs to work in an environment where we cannot use bundler,
+ # so we cannot easily use the toml-rb gem. This ad-hoc parser should be
+ # good enough.
+ config_text = IO.read(config_path(service))
+
+ config_text.lines.each do |line|
+ match_data = line.match(/^\s*socket_path\s*=\s*"([^"]*)"$/)
+
+ return match_data[1] if match_data
+ end
+
+ raise "failed to find socket_path in #{config_path(service)}"
+ end
+
+ def try_connect!(service)
+ LOGGER.debug "Trying to connect to #{service}: "
+ timeout = 20
+ delay = 0.1
+ socket = read_socket_path(service)
+
+ Integer(timeout / delay).times do
+ UNIXSocket.new(socket)
+ LOGGER.debug " OK\n"
+
+ return
+ rescue Errno::ENOENT, Errno::ECONNREFUSED
+ LOGGER.debug '.'
+ sleep delay
+ end
+
+ LOGGER.warn " FAILED to connect to #{service}\n"
+
+ raise "could not connect to #{socket}"
+ end
+end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 9d6c6ab93e4..5dc6945ec5e 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -142,9 +142,9 @@ module GraphqlHelpers
Class.new(::Types::BaseObject) { graphql_name name }
end
- def resolver_instance(resolver_class, obj: nil, ctx: {}, field: nil, schema: GitlabSchema)
+ def resolver_instance(resolver_class, obj: nil, ctx: {}, field: nil, schema: GitlabSchema, subscription_update: false)
if ctx.is_a?(Hash)
- q = double('Query', schema: schema)
+ q = double('Query', schema: schema, subscription_update?: subscription_update)
ctx = GraphQL::Query::Context.new(query: q, object: obj, values: ctx)
end
diff --git a/spec/support/helpers/ldap_helpers.rb b/spec/support/helpers/ldap_helpers.rb
index 8154e3a4fc9..2f5f8be518c 100644
--- a/spec/support/helpers/ldap_helpers.rb
+++ b/spec/support/helpers/ldap_helpers.rb
@@ -71,4 +71,4 @@ module LdapHelpers
end
end
-LdapHelpers.include_if_ee('EE::LdapHelpers')
+LdapHelpers.include_mod_with('LdapHelpers')
diff --git a/spec/support/helpers/license_helper.rb b/spec/support/helpers/license_helper.rb
index a1defba9ccb..61afb2171f0 100644
--- a/spec/support/helpers/license_helper.rb
+++ b/spec/support/helpers/license_helper.rb
@@ -7,4 +7,4 @@ module LicenseHelpers
end
end
-LicenseHelpers.prepend_if_ee('EE::LicenseHelpers')
+LicenseHelpers.prepend_mod_with('LicenseHelpers')
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 86022e16d71..fc3eb976276 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -237,4 +237,4 @@ module LoginHelpers
end
end
-LoginHelpers.prepend_if_ee('EE::LoginHelpers')
+LoginHelpers.prepend_mod_with('LoginHelpers')
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index 2c31a608b35..fa50b234bd5 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -172,4 +172,4 @@ module MigrationsHelpers
end
end
-MigrationsHelpers.prepend_if_ee('EE::MigrationsHelpers')
+MigrationsHelpers.prepend_mod_with('MigrationsHelpers')
diff --git a/spec/support/helpers/next_found_instance_of.rb b/spec/support/helpers/next_found_instance_of.rb
index feb63f90211..c8cdbaf2c5d 100644
--- a/spec/support/helpers/next_found_instance_of.rb
+++ b/spec/support/helpers/next_found_instance_of.rb
@@ -22,7 +22,7 @@ module NextFoundInstanceOf
private
def check_if_active_record!(klass)
- raise ArgumentError.new(ERROR_MESSAGE) unless klass < ActiveRecord::Base
+ raise ArgumentError, ERROR_MESSAGE unless klass < ActiveRecord::Base
end
def stub_allocate(target, klass)
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 2d880c7a8fe..05afbc336da 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -13,6 +13,10 @@ module ActiveRecord
@skip_cached = skip_cached
@query_recorder_debug = ENV['QUERY_RECORDER_DEBUG'] || query_recorder_debug
@log_file = log_file
+ record(&block) if block_given?
+ end
+
+ def record(&block)
# force replacement of bind parameters to give tests the ability to check for ids
ActiveRecord::Base.connection.unprepared_statement do
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
diff --git a/spec/support/helpers/redis_without_keys.rb b/spec/support/helpers/redis_without_keys.rb
index e030f1028f7..ff64a3cf08e 100644
--- a/spec/support/helpers/redis_without_keys.rb
+++ b/spec/support/helpers/redis_without_keys.rb
@@ -4,7 +4,7 @@ class Redis
ForbiddenCommand = Class.new(StandardError)
def keys(*args)
- raise ForbiddenCommand.new("Don't use `Redis#keys` as it iterates over all "\
- "keys in redis. Use `Redis#scan_each` instead.")
+ raise ForbiddenCommand, "Don't use `Redis#keys` as it iterates over all "\
+ "keys in redis. Use `Redis#scan_each` instead."
end
end
diff --git a/spec/support/helpers/reload_helpers.rb b/spec/support/helpers/reload_helpers.rb
index 60811e4604f..368ebaaba8a 100644
--- a/spec/support/helpers/reload_helpers.rb
+++ b/spec/support/helpers/reload_helpers.rb
@@ -2,7 +2,7 @@
module ReloadHelpers
def reload_models(*models)
- models.map(&:reload)
+ models.compact.map(&:reload)
end
def subject_and_reload(*models)
diff --git a/spec/support/helpers/require_migration.rb b/spec/support/helpers/require_migration.rb
index c2902aa4ec7..8de71d3073f 100644
--- a/spec/support/helpers/require_migration.rb
+++ b/spec/support/helpers/require_migration.rb
@@ -20,7 +20,7 @@ class RequireMigration
class << self
def require_migration!(file_name)
file_paths = search_migration_file(file_name)
- raise AutoLoadError.new(file_name) unless file_paths.first
+ raise AutoLoadError, file_name unless file_paths.first
require file_paths.first
end
@@ -41,7 +41,7 @@ class RequireMigration
end
end
-RequireMigration.prepend_if_ee('EE::RequireMigration')
+RequireMigration.prepend_mod_with('RequireMigration')
def require_migration!(file_name = nil)
location_info = caller_locations.first.path.match(RequireMigration::SPEC_FILE_PATTERN)
diff --git a/spec/support/helpers/snowplow_helpers.rb b/spec/support/helpers/snowplow_helpers.rb
index 70a4eadd8de..553739b5d30 100644
--- a/spec/support/helpers/snowplow_helpers.rb
+++ b/spec/support/helpers/snowplow_helpers.rb
@@ -60,6 +60,10 @@ module SnowplowHelpers
.with(category, action, **kwargs).at_least(:once)
end
+ def match_snowplow_context_schema(schema_path:, context:)
+ expect(context).to match_snowplow_schema(schema_path)
+ end
+
# Asserts that no call to `Gitlab::Tracking#event` was made.
#
# Example:
@@ -71,7 +75,11 @@ module SnowplowHelpers
# expect_no_snowplow_event
# end
# end
- def expect_no_snowplow_event
- expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking
+ def expect_no_snowplow_event(category: nil, action: nil, **kwargs)
+ if category && action
+ expect(Gitlab::Tracking).not_to have_received(:event).with(category, action, **kwargs) # rubocop:disable RSpec/ExpectGitlabTracking
+ else
+ expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking
+ end
end
end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index 9851a3de9e9..8c60dc30cdb 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -163,4 +163,4 @@ end
require_relative '../../../ee/spec/support/helpers/ee/stub_configuration' if
Dir.exist?("#{__dir__}/../../../ee")
-StubConfiguration.prepend_if_ee('EE::StubConfiguration')
+StubConfiguration.prepend_mod_with('StubConfiguration')
diff --git a/spec/support/helpers/stub_gitlab_calls.rb b/spec/support/helpers/stub_gitlab_calls.rb
index 4da8f760056..3824ff2b68d 100644
--- a/spec/support/helpers/stub_gitlab_calls.rb
+++ b/spec/support/helpers/stub_gitlab_calls.rb
@@ -153,4 +153,4 @@ module StubGitlabCalls
end
end
-StubGitlabCalls.prepend_if_ee('EE::StubGitlabCalls')
+StubGitlabCalls.prepend_mod_with('StubGitlabCalls')
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 7ba15a9c00b..40a3dbfbf25 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -52,7 +52,7 @@ module TestEnv
'wip' => 'b9238ee',
'csv' => '3dd0896',
'v1.1.0' => 'b83d6e3',
- 'add-ipython-files' => '93ee732',
+ 'add-ipython-files' => 'f6b7a70',
'add-pdf-file' => 'e774ebd',
'squash-large-files' => '54cec52',
'add-pdf-text-binary' => '79faa7b',
@@ -266,7 +266,7 @@ module TestEnv
Integer(sleep_time / sleep_interval).times do
Socket.unix(socket)
return
- rescue
+ rescue StandardError
sleep sleep_interval
end
@@ -612,5 +612,5 @@ end
require_relative('../../../ee/spec/support/helpers/ee/test_env') if Gitlab.ee?
-::TestEnv.prepend_if_ee('::EE::TestEnv')
-::TestEnv.extend_if_ee('::EE::TestEnv')
+::TestEnv.prepend_mod_with('TestEnv')
+::TestEnv.extend_mod_with('TestEnv')
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index d05676a649e..c6176b5bcbc 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -98,7 +98,6 @@ module UsageDataHelpers
projects_prometheus_active
projects_with_repositories_enabled
projects_with_error_tracking_enabled
- projects_with_alerts_service_enabled
projects_with_enabled_alert_integrations
projects_with_prometheus_alerts
projects_with_tracing_enabled
diff --git a/spec/support/matchers/access_matchers_generic.rb b/spec/support/matchers/access_matchers_generic.rb
index 13955750f4f..a38a83a2547 100644
--- a/spec/support/matchers/access_matchers_generic.rb
+++ b/spec/support/matchers/access_matchers_generic.rb
@@ -35,7 +35,7 @@ module AccessMatchersGeneric
run_matcher(action, role, @membership, @owned_objects) do |action|
action.call
- rescue => e
+ rescue StandardError => e
@error = e
raise unless e.is_a?(ERROR_CLASS)
end
diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb
index 47cffad8c41..dfdb5bc01ae 100644
--- a/spec/support/matchers/markdown_matchers.rb
+++ b/spec/support/matchers/markdown_matchers.rb
@@ -291,4 +291,4 @@ module RSpec::Matchers::DSL::Macros
end
end
-MarkdownMatchers.prepend_if_ee('EE::MarkdownMatchers')
+MarkdownMatchers.prepend_mod_with('MarkdownMatchers')
diff --git a/spec/support/matchers/schema_matcher.rb b/spec/support/matchers/schema_matcher.rb
index f0e7a52c51e..94e4359b1dd 100644
--- a/spec/support/matchers/schema_matcher.rb
+++ b/spec/support/matchers/schema_matcher.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
module SchemaPath
+ @schema_cache = {}
+
def self.expand(schema, dir = nil)
return schema unless schema.is_a?(String)
@@ -12,38 +14,56 @@ module SchemaPath
Rails.root.join(dir.to_s, 'spec', "fixtures/api/schemas/#{schema}.json").to_s
end
+
+ def self.validator(schema_path)
+ unless @schema_cache.key?(schema_path)
+ @schema_cache[schema_path] = JSONSchemer.schema(schema_path, ref_resolver: SchemaPath.file_ref_resolver)
+ end
+
+ @schema_cache[schema_path]
+ end
+
+ def self.file_ref_resolver
+ proc do |uri|
+ file = Rails.root.join(uri.path)
+ raise StandardError, "Ref file #{uri.path} must be json" unless uri.path.ends_with?('.json')
+ raise StandardError, "File #{file.to_path} doesn't exists" unless file.exist?
+
+ Gitlab::Json.parse(File.read(file))
+ end
+ end
end
RSpec::Matchers.define :match_response_schema do |schema, dir: nil, **options|
match do |response|
- @errors = JSON::Validator.fully_validate(
- SchemaPath.expand(schema, dir), response.body, options)
+ schema_path = Pathname.new(SchemaPath.expand(schema, dir))
+ validator = SchemaPath.validator(schema_path)
- @errors.empty?
- end
+ data = Gitlab::Json.parse(response.body)
- failure_message do |response|
- "didn't match the schema defined by #{SchemaPath.expand(schema, dir)}" \
- " The validation errors were:\n#{@errors.join("\n")}"
+ validator.valid?(data)
end
end
-RSpec::Matchers.define :match_schema do |schema, dir: nil, **options|
+RSpec::Matchers.define :match_snowplow_schema do |schema, dir: nil, **options|
match do |data|
- @errors = JSON::Validator.fully_validate(
- SchemaPath.expand(schema, dir), data, options)
-
- @errors.empty?
- end
+ schema_path = Pathname.new(Rails.root.join(dir.to_s, 'spec', "fixtures/product_intelligence/#{schema}.json").to_s)
+ validator = SchemaPath.validator(schema_path)
- failure_message do |response|
- "didn't match the schema defined by #{schema_name(schema, dir)}" \
- " The validation errors were:\n#{@errors.join("\n")}"
+ validator.valid?(data.stringify_keys)
end
+end
- def schema_name(schema, dir)
- return 'provided schema' unless schema.is_a?(String)
+RSpec::Matchers.define :match_schema do |schema, dir: nil, **options|
+ match do |data|
+ schema = SchemaPath.expand(schema, dir)
+ schema = Pathname.new(schema) if schema.is_a?(String)
+ validator = SchemaPath.validator(schema)
- SchemaPath.expand(schema, dir)
+ if data.is_a?(String)
+ validator.valid?(Gitlab::Json.parse(data))
+ else
+ validator.valid?(data)
+ end
end
end
diff --git a/spec/support/renameable_upload.rb b/spec/support/renameable_upload.rb
index f7f00181605..08e8d07ed4d 100644
--- a/spec/support/renameable_upload.rb
+++ b/spec/support/renameable_upload.rb
@@ -5,7 +5,7 @@ class RenameableUpload < SimpleDelegator
# Get a fixture file with a new unique name, and the same extension
def self.unique_file(name)
- upload = new(fixture_file_upload("spec/fixtures/#{name}"))
+ upload = new(Rack::Test::UploadedFile.new("spec/fixtures/#{name}"))
ext = File.extname(name)
new_name = File.basename(FactoryBot.generate(:filename), '.*')
upload.original_filename = new_name + ext
diff --git a/spec/support/services/issuable_update_service_shared_examples.rb b/spec/support/services/issuable_update_service_shared_examples.rb
index 8867a1e3a90..4d2843af1c4 100644
--- a/spec/support/services/issuable_update_service_shared_examples.rb
+++ b/spec/support/services/issuable_update_service_shared_examples.rb
@@ -12,13 +12,13 @@ RSpec.shared_examples 'issuable update service' do
context 'to reopened' do
it 'executes hooks only once' do
- described_class.new(project, user, state_event: 'reopen').execute(closed_issuable)
+ described_class.new(project: project, current_user: user, params: { state_event: 'reopen' }).execute(closed_issuable)
end
end
context 'to closed' do
it 'executes hooks only once' do
- described_class.new(project, user, state_event: 'close').execute(open_issuable)
+ described_class.new(project: project, current_user: user, params: { state_event: 'close' }).execute(open_issuable)
end
end
end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 78d14ecb880..4f8e88ae9da 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -24,16 +24,56 @@ RSpec.shared_context 'project navbar structure' do
}
end
+ let(:monitor_nav_item) do
+ {
+ nav_item: _('Operations'),
+ nav_sub_items: monitor_menu_items
+ }
+ end
+
+ let(:monitor_menu_items) do
+ [
+ _('Metrics'),
+ _('Logs'),
+ _('Tracing'),
+ _('Error Tracking'),
+ _('Alerts'),
+ _('Incidents'),
+ _('Serverless'),
+ _('Terraform'),
+ _('Kubernetes'),
+ _('Environments'),
+ _('Feature Flags'),
+ _('Product Analytics')
+ ]
+ end
+
+ let(:project_information_nav_item) do
+ {
+ nav_item: _('Project overview'),
+ nav_sub_items: [
+ _('Details'),
+ _('Activity'),
+ _('Releases')
+ ]
+ }
+ end
+
+ let(:settings_menu_items) do
+ [
+ _('General'),
+ _('Integrations'),
+ _('Webhooks'),
+ _('Access Tokens'),
+ _('Repository'),
+ _('CI/CD'),
+ _('Operations')
+ ]
+ end
+
let(:structure) do
[
- {
- nav_item: _('Project overview'),
- nav_sub_items: [
- _('Details'),
- _('Activity'),
- _('Releases')
- ]
- },
+ project_information_nav_item,
{
nav_item: _('Repository'),
nav_sub_items: [
@@ -52,7 +92,6 @@ RSpec.shared_context 'project navbar structure' do
nav_sub_items: [
_('List'),
_('Boards'),
- _('Labels'),
_('Service Desk'),
_('Milestones'),
(_('Iterations') if Gitlab.ee?)
@@ -73,23 +112,7 @@ RSpec.shared_context 'project navbar structure' do
]
},
security_and_compliance_nav_item,
- {
- nav_item: _('Operations'),
- nav_sub_items: [
- _('Metrics'),
- _('Logs'),
- _('Tracing'),
- _('Error Tracking'),
- _('Alerts'),
- _('Incidents'),
- _('Serverless'),
- _('Terraform'),
- _('Kubernetes'),
- _('Environments'),
- _('Feature Flags'),
- _('Product Analytics')
- ]
- },
+ monitor_nav_item,
analytics_nav_item,
{
nav_item: _('Wiki'),
@@ -100,20 +123,8 @@ RSpec.shared_context 'project navbar structure' do
nav_sub_items: []
},
{
- nav_item: _('Members'),
- nav_sub_items: []
- },
- {
nav_item: _('Settings'),
- nav_sub_items: [
- _('General'),
- _('Integrations'),
- _('Webhooks'),
- _('Access Tokens'),
- _('Repository'),
- _('CI/CD'),
- _('Operations')
- ].compact
+ nav_sub_items: settings_menu_items
}
].compact
end
@@ -124,8 +135,7 @@ RSpec.shared_context 'group navbar structure' do
{
nav_item: _('Analytics'),
nav_sub_items: [
- _('Contribution'),
- _('DevOps Adoption')
+ _('Contribution')
]
}
end
@@ -171,23 +181,31 @@ RSpec.shared_context 'group navbar structure' do
}
end
+ let(:group_information_nav_item) do
+ {
+ nav_item: _('Group information'),
+ nav_sub_items: [
+ _('Activity'),
+ _('Labels'),
+ _('Members')
+ ]
+ }
+ end
+
+ let(:issues_nav_items) do
+ [
+ _('List'),
+ _('Board'),
+ _('Milestones')
+ ]
+ end
+
let(:structure) do
[
- {
- nav_item: _('Group overview'),
- nav_sub_items: [
- _('Details'),
- _('Activity')
- ]
- },
+ group_information_nav_item,
{
nav_item: _('Issues'),
- nav_sub_items: [
- _('List'),
- _('Board'),
- _('Labels'),
- _('Milestones')
- ]
+ nav_sub_items: issues_nav_items
},
{
nav_item: _('Merge requests'),
@@ -199,11 +217,7 @@ RSpec.shared_context 'group navbar structure' do
nav_item: _('Kubernetes'),
nav_sub_items: []
},
- (analytics_nav_item if Gitlab.ee?),
- {
- nav_item: _('Members'),
- nav_sub_items: []
- }
+ (analytics_nav_item if Gitlab.ee?)
]
end
end
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index 266c8d5ee84..35dc709b5d9 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -15,7 +15,7 @@ RSpec.shared_context 'ProjectPolicy context' do
let(:base_guest_permissions) do
%i[
- award_emoji create_issue create_merge_request_in create_note
+ award_emoji create_issue create_incident create_merge_request_in create_note
create_project read_issue_board read_issue read_issue_iid read_issue_link
read_label read_issue_board_list read_milestone read_note read_project
read_project_for_iids read_project_member read_release read_snippet
diff --git a/spec/support/shared_contexts/project_service_shared_context.rb b/spec/support/shared_contexts/project_service_shared_context.rb
index a8e75c624e8..0e3540a3e15 100644
--- a/spec/support/shared_contexts/project_service_shared_context.rb
+++ b/spec/support/shared_contexts/project_service_shared_context.rb
@@ -22,7 +22,7 @@ RSpec.shared_context 'project service activation' do
end
def click_active_checkbox
- find('input[name="service[active]"]').click
+ find('label', text: 'Active').click
end
def click_save_integration
diff --git a/spec/support/shared_contexts/services_shared_context.rb b/spec/support/shared_contexts/services_shared_context.rb
index f250632ff51..34c92367efa 100644
--- a/spec/support/shared_contexts/services_shared_context.rb
+++ b/spec/support/shared_contexts/services_shared_context.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-Service.available_services_names.each do |service|
+Integration.available_services_names.each do |service|
RSpec.shared_context service do
include JiraServiceHelper if service == 'jira'
let(:dashed_service) { service.dasherize }
let(:service_method) { "#{service}_service".to_sym }
- let(:service_klass) { "#{service}_service".classify.constantize }
+ let(:service_klass) { Integration.service_name_to_model(service) }
let(:service_instance) { service_klass.new }
let(:service_fields) { service_instance.fields }
let(:service_attrs_list) { service_fields.inject([]) {|arr, hash| arr << hash[:name].to_sym } }
@@ -30,6 +30,8 @@ Service.available_services_names.each do |service|
hash.merge!(k => '1,2,3')
elsif service == 'emails_on_push' && k == :recipients
hash.merge!(k => 'foo@bar.com')
+ elsif service == 'slack' || service == 'mattermost' && k == :labels_to_be_notified_behavior
+ hash.merge!(k => "match_any")
else
hash.merge!(k => "someword")
end
@@ -47,8 +49,9 @@ Service.available_services_names.each do |service|
stub_jira_service_test if service == 'jira'
end
- def initialize_service(service)
+ def initialize_service(service, attrs = {})
service_item = project.find_or_initialize_service(service)
+ service_item.attributes = attrs
service_item.properties = service_attrs
service_item.save!
service_item
diff --git a/spec/support/shared_contexts/spam_constants.rb b/spec/support/shared_contexts/spam_constants.rb
index 813f9d00123..e88a7c1b0df 100644
--- a/spec/support/shared_contexts/spam_constants.rb
+++ b/spec/support/shared_contexts/spam_constants.rb
@@ -6,5 +6,6 @@ RSpec.shared_context 'includes Spam constants' do
stub_const('DISALLOW', Spam::SpamConstants::DISALLOW)
stub_const('ALLOW', Spam::SpamConstants::ALLOW)
stub_const('BLOCK_USER', Spam::SpamConstants::BLOCK_USER)
+ stub_const('NOOP', Spam::SpamConstants::NOOP)
end
end
diff --git a/spec/support/shared_examples/alert_notification_service_shared_examples.rb b/spec/support/shared_examples/alert_notification_service_shared_examples.rb
deleted file mode 100644
index fc935effe0e..00000000000
--- a/spec/support/shared_examples/alert_notification_service_shared_examples.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'Alert Notification Service sends notification email' do
- let(:notification_service) { spy }
-
- it 'sends a notification' do
- expect(NotificationService)
- .to receive(:new)
- .and_return(notification_service)
-
- expect(notification_service)
- .to receive_message_chain(:async, :prometheus_alerts_fired)
-
- expect(subject).to be_success
- end
-end
-
-RSpec.shared_examples 'Alert Notification Service sends no notifications' do |http_status: nil|
- it 'does not notify' do
- expect(NotificationService).not_to receive(:new)
-
- if http_status.present?
- expect(subject).to be_error
- expect(subject.http_status).to eq(http_status)
- else
- expect(subject).to be_success
- end
- end
-end
-
-RSpec.shared_examples 'creates status-change system note for an auto-resolved alert' do
- it 'has 2 new system notes' do
- expect { subject }.to change(Note, :count).by(2)
- expect(Note.last.note).to include('Resolved')
- end
-end
-
-# Requires `source` to be defined
-RSpec.shared_examples 'creates single system note based on the source of the alert' do
- it 'has one new system note' do
- expect { subject }.to change(Note, :count).by(1)
- expect(Note.last.note).to include(source)
- end
-end
diff --git a/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb b/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb
index d8a74f2582d..1fab31cd513 100644
--- a/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb
+++ b/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb
@@ -2,14 +2,30 @@
RSpec.shared_examples 'moving list' do
context 'when user can admin list' do
- it 'calls Lists::MoveService to update list position' do
+ before do
board.resource_parent.add_developer(user)
+ end
+
+ context 'when the new position is valid' do
+ it 'calls Lists::MoveService to update list position' do
+ expect_next_instance_of(Boards::Lists::MoveService, board.resource_parent, user, params) do |move_service|
+ expect(move_service).to receive(:execute).with(list).and_call_original
+ end
- expect_next_instance_of(Boards::Lists::MoveService, board.resource_parent, user, params) do |move_service|
- expect(move_service).to receive(:execute).with(list).and_call_original
+ service.execute(list)
end
- service.execute(list)
+ it 'returns a success response' do
+ expect(service.execute(list)).to be_success
+ end
+ end
+
+ context 'when the new position is invalid' do
+ let(:params) { { position: 10 } }
+
+ it 'returns error response' do
+ expect(service.execute(list)).to be_error
+ end
end
end
@@ -19,6 +35,10 @@ RSpec.shared_examples 'moving list' do
service.execute(list)
end
+
+ it 'returns an error response' do
+ expect(service.execute(list)).to be_error
+ end
end
end
diff --git a/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb b/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
index dd71107455f..70a684c12bf 100644
--- a/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/access_tokens_controller_shared_examples.rb
@@ -40,7 +40,7 @@ RSpec.shared_examples 'project access tokens available #create' do
it 'returns success message' do
subject
- expect(response.flash[:notice]).to match('Your new project access token has been created.')
+ expect(controller).to set_flash[:notice].to match('Your new project access token has been created.')
end
it 'creates project access token' do
@@ -88,7 +88,7 @@ RSpec.shared_examples 'project access tokens available #create' do
it 'shows a failure alert' do
subject
- expect(response.flash[:alert]).to match("Failed to create new project access token: Failed!")
+ expect(controller).to set_flash[:alert].to match("Failed to create new project access token: Failed!")
end
end
end
diff --git a/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb b/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
index 5ecc5c08bbd..a4eb6a839c0 100644
--- a/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuable_notes_filter_shared_examples.rb
@@ -35,7 +35,7 @@ RSpec.shared_examples 'issuable notes filter' do
get :discussions, params: params.merge(notes_filter: notes_filter)
end
- it 'does not set notes filter when database is in read only mode' do
+ it 'does not set notes filter when database is in read-only mode' do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
notes_filter = UserPreference::NOTES_FILTERS[:only_comments]
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 0a040557ffe..cfee26a0d6a 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -130,8 +130,8 @@ RSpec.shared_examples 'wiki controller actions' do
it_behaves_like 'fetching history', :ok do
let(:allow_read_wiki) { true }
- it 'assigns @page_versions' do
- expect(assigns(:page_versions)).to be_present
+ it 'assigns @commits' do
+ expect(assigns(:commits)).to be_present
end
end
diff --git a/spec/support/shared_examples/features/board_sidebar_labels_examples.rb b/spec/support/shared_examples/features/board_sidebar_labels_examples.rb
new file mode 100644
index 00000000000..520980c2615
--- /dev/null
+++ b/spec/support/shared_examples/features/board_sidebar_labels_examples.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'labels from nested groups and projects' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_label) { create(:group_label, group: group, name: 'Group label') }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:project_label) { create(:label, project: project, name: 'Project label') }
+
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup_label) { create(:group_label, group: subgroup, name: 'Subgroup label') }
+ let_it_be(:subproject) { create(:project, group: subgroup) }
+ let_it_be(:subproject_label) { create(:label, project: subproject, name: 'Subproject label') }
+
+ let_it_be(:subgroup2) { create(:group, parent: group) }
+ let_it_be(:subgroup2_label) { create(:group_label, group: subgroup2, name: 'Subgroup2 label') }
+
+ let_it_be(:maintainer) { create(:user) }
+
+ let(:labels_select) { find("[data-testid='sidebar-labels']") }
+ let(:labels_dropdown) { labels_select.find('[data-testid="dropdown-content"]')}
+
+ before do
+ group.add_maintainer(maintainer)
+
+ sign_in(maintainer)
+ end
+end
+
+RSpec.shared_examples "an issue from a subgroup's project is selected" do
+ context 'when editing labels' do
+ before do
+ click_card_and_edit_label
+ end
+
+ it 'displays the label from the top-level group' do
+ expect(labels_dropdown).to have_content(group_label.name)
+ end
+
+ it 'displays the label from the subgroup' do
+ expect(labels_dropdown).to have_content(subgroup_label.name)
+ end
+
+ it 'displays the label from the project' do
+ expect(labels_dropdown).to have_content(subproject_label.name)
+ end
+
+ it "does not display labels from the subgroup's siblings (project or group)" do
+ aggregate_failures do
+ expect(labels_dropdown).not_to have_content(project_label.name)
+ expect(labels_dropdown).not_to have_content(subgroup2_label.name)
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'an issue from a direct descendant project is selected' do
+ context 'when editing labels' do
+ before do
+ click_card_and_edit_label
+ end
+
+ it 'displays the label from the top-level group' do
+ expect(labels_dropdown).to have_content(group_label.name)
+ end
+
+ it 'displays the label from the project' do
+ expect(labels_dropdown).to have_content(project_label.name)
+ end
+
+ it "does not display labels from the project's siblings or their descendents" do
+ aggregate_failures do
+ expect(labels_dropdown).not_to have_content(subgroup_label.name)
+ expect(labels_dropdown).not_to have_content(subproject_label.name)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
index 49c3674277d..736c353c2aa 100644
--- a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
+++ b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
@@ -22,32 +22,7 @@ RSpec.shared_examples 'issuable invite members experiments' do
end
end
- context 'when invite_members_version_b experiment is enabled' do
- before do
- stub_experiment_for_subject(invite_members_version_b: true)
- end
-
- it 'shows a link for inviting members and follows through to modal' do
- project.add_developer(user)
- visit issuable_path
-
- find('.block.assignee .edit-link').click
-
- wait_for_requests
-
- page.within '.dropdown-menu-user' do
- expect(page).to have_link('Invite Members', href: '#')
- expect(page).to have_selector('[data-track-event="click_invite_members_version_b"]')
- expect(page).to have_selector('[data-track-label="edit_assignee"]')
- end
-
- click_link 'Invite Members'
-
- expect(page).to have_content("Oops, this feature isn't ready yet")
- end
- end
-
- context 'when invite_members_version_b experiment is disabled' do
+ context 'when user cannot invite members in assignee dropdown' do
it 'shows author in assignee dropdown and no invite link' do
project.add_developer(user)
visit issuable_path
diff --git a/spec/support/shared_examples/features/sidebar_shared_examples.rb b/spec/support/shared_examples/features/sidebar_shared_examples.rb
index 429efbe6ba0..c9508818f74 100644
--- a/spec/support/shared_examples/features/sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar_shared_examples.rb
@@ -44,22 +44,24 @@ RSpec.shared_examples 'issue boards sidebar' do
context 'in notifications subscription' do
it 'displays notifications toggle', :aggregate_failures do
page.within('[data-testid="sidebar-notifications"]') do
- expect(page).to have_selector('[data-testid="notification-subscribe-toggle"]')
+ expect(page).to have_selector('[data-testid="subscription-toggle"]')
expect(page).to have_content('Notifications')
- expect(page).not_to have_content('Notifications have been disabled by the project or group owner')
+ expect(page).not_to have_content('Disabled by project owner')
end
end
it 'shows toggle as on then as off as user toggles to subscribe and unsubscribe', :aggregate_failures do
- toggle = find('[data-testid="notification-subscribe-toggle"]')
+ wait_for_requests
- toggle.click
+ click_button 'Notifications'
- expect(toggle).to have_css("button.is-checked")
+ expect(page).to have_button('Notifications', class: 'is-checked')
- toggle.click
+ click_button 'Notifications'
- expect(toggle).not_to have_css("button.is-checked")
+ wait_for_requests
+
+ expect(page).not_to have_button('Notifications', class: 'is-checked')
end
context 'when notifications have been disabled' do
@@ -71,9 +73,28 @@ RSpec.shared_examples 'issue boards sidebar' do
it 'displays a message that notifications have been disabled' do
page.within('[data-testid="sidebar-notifications"]') do
- expect(page).not_to have_selector('[data-testid="notification-subscribe-toggle"]')
- expect(page).to have_content('Notifications have been disabled by the project or group owner')
+ expect(page).to have_button('Notifications', class: 'is-disabled')
+ expect(page).to have_content('Disabled by project owner')
+ end
+ end
+ end
+ end
+
+ context 'confidentiality' do
+ it 'make issue confidential' do
+ page.within('.confidentiality') do
+ expect(page).to have_content('Not confidential')
+
+ click_button 'Edit'
+ expect(page).to have_css('.sidebar-item-warning-message')
+
+ within('.sidebar-item-warning-message') do
+ click_button 'Turn on'
end
+
+ wait_for_requests
+
+ expect(page).to have_content('This issue is confidential')
end
end
end
diff --git a/spec/support/shared_examples/features/variable_list_shared_examples.rb b/spec/support/shared_examples/features/variable_list_shared_examples.rb
index 2fd88b610e9..4b94411f009 100644
--- a/spec/support/shared_examples/features/variable_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_shared_examples.rb
@@ -8,7 +8,7 @@ RSpec.shared_examples 'variable list' do
end
it 'adds a new CI variable' do
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('key', 'key_value') do
click_button('Add variable')
@@ -22,7 +22,7 @@ RSpec.shared_examples 'variable list' do
end
it 'adds a new protected variable' do
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('key', 'key_value') do
click_button('Add variable')
@@ -37,7 +37,7 @@ RSpec.shared_examples 'variable list' do
end
it 'defaults to unmasked' do
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('key', 'key_value') do
click_button('Add variable')
@@ -149,7 +149,7 @@ RSpec.shared_examples 'variable list' do
end
it 'shows a validation error box about duplicate keys' do
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('key', 'key_value') do
click_button('Add variable')
@@ -157,7 +157,7 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('key', 'key_value') do
click_button('Add variable')
@@ -170,7 +170,7 @@ RSpec.shared_examples 'variable list' do
end
it 'prevents a variable to be added if no values are provided when a variable is set to masked' do
- click_button('Add Variable')
+ click_button('Add variable')
page.within('#add-ci-variable') do
find('[data-qa-selector="ci_variable_key_field"] input').set('empty_mask_key')
@@ -182,7 +182,7 @@ RSpec.shared_examples 'variable list' do
end
it 'shows validation error box about unmaskable values' do
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('empty_mask_key', '???', protected: true, masked: true) do
expect(page).to have_content('This variable can not be masked')
@@ -192,7 +192,7 @@ RSpec.shared_examples 'variable list' do
it 'handles multiple edits and a deletion' do
# Create two variables
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('akey', 'akeyvalue') do
click_button('Add variable')
@@ -200,7 +200,7 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('zkey', 'zkeyvalue') do
click_button('Add variable')
@@ -224,7 +224,7 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
# Add another variable
- click_button('Add Variable')
+ click_button('Add variable')
fill_variable('ckey', 'ckeyvalue') do
click_button('Add variable')
@@ -249,7 +249,7 @@ RSpec.shared_examples 'variable list' do
end
it 'defaults to protected' do
- click_button('Add Variable')
+ click_button('Add variable')
page.within('#add-ci-variable') do
expect(find('[data-testid="ci-variable-protected-checkbox"]')).to be_checked
@@ -269,7 +269,7 @@ RSpec.shared_examples 'variable list' do
end
it 'defaults to unprotected' do
- click_button('Add Variable')
+ click_button('Add variable')
page.within('#add-ci-variable') do
expect(find('[data-testid="ci-variable-protected-checkbox"]')).not_to be_checked
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index 8a6d5d88ca6..f2576931642 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -24,8 +24,8 @@ RSpec.shared_examples 'User creates wiki page' do
page.within(".wiki-form") do
fill_in(:wiki_content, with: "")
- page.execute_script("window.onbeforeunload = null")
page.execute_script("document.querySelector('.wiki-form').submit()")
+ page.accept_alert # manually force form submit
end
expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index d185e9dd81c..db2a96d9649 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -93,8 +93,8 @@ RSpec.shared_examples 'User updates wiki page' do
it 'shows a validation error message if the form is force submitted', :js do
fill_in(:wiki_content, with: '')
- page.execute_script("window.onbeforeunload = null")
page.execute_script("document.querySelector('.wiki-form').submit()")
+ page.accept_alert # manually force form submit
expect(page).to have_selector('.wiki-form')
expect(page).to have_content('Edit Page')
@@ -117,14 +117,6 @@ RSpec.shared_examples 'User updates wiki page' do
expect(page).to have_selector('.atwho-view')
end
- it 'shows the error message', :js do
- wiki_page.update(content: 'Update') # rubocop:disable Rails/SaveBang
-
- click_button('Save changes')
-
- expect(page).to have_content('Someone edited the page the same time you did.')
- end
-
it 'updates a page', :js do
fill_in('Content', with: 'Updated Wiki Content')
click_on('Save changes')
@@ -145,6 +137,18 @@ RSpec.shared_examples 'User updates wiki page' do
end
it_behaves_like 'wiki file attachments'
+
+ context 'when multiple people edit the page at the same time' do
+ it 'preserves user changes in the wiki editor', :js do
+ wiki_page.update(content: 'Some Other Updates') # rubocop:disable Rails/SaveBang
+
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Someone edited the page the same time you did.')
+ expect(find('textarea#wiki_content').value).to eq('Updated Wiki Content')
+ end
+ end
end
context 'when the page is in a subdir', :js do
diff --git a/spec/support/shared_examples/finders/packages_shared_examples.rb b/spec/support/shared_examples/finders/packages_shared_examples.rb
index 2d4e8d0df1f..b3ec2336cca 100644
--- a/spec/support/shared_examples/finders/packages_shared_examples.rb
+++ b/spec/support/shared_examples/finders/packages_shared_examples.rb
@@ -20,9 +20,11 @@ end
RSpec.shared_examples 'concerning package statuses' do
let_it_be(:hidden_package) { create(:maven_package, :hidden, project: project) }
+ let_it_be(:error_package) { create(:maven_package, :error, project: project) }
- context 'hidden packages' do
+ context 'displayable packages' do
it { is_expected.not_to include(hidden_package) }
+ it { is_expected.to include(error_package) }
end
context 'with status param' do
diff --git a/spec/support/shared_examples/graphql/mutations/boards/update_list_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/boards/update_list_shared_examples.rb
new file mode 100644
index 00000000000..4385cd519be
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/boards/update_list_shared_examples.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'update board list mutation' do
+ describe '#resolve' do
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+ let(:list_update_params) { { position: 1, collapsed: true } }
+
+ subject { mutation.resolve(list: list, **list_update_params) }
+
+ before_all do
+ group.add_reporter(reporter)
+ group.add_guest(guest)
+ list.update_preferences_for(reporter, collapsed: false)
+ end
+
+ context 'with permission to admin board lists' do
+ let(:current_user) { reporter }
+
+ it 'updates the list position and collapsed state as expected' do
+ subject
+
+ reloaded_list = list.reload
+ expect(reloaded_list.position).to eq(1)
+ expect(reloaded_list.collapsed?(current_user)).to eq(true)
+ end
+ end
+
+ context 'with permission to read board lists' do
+ let(:current_user) { guest }
+
+ it 'updates the list collapsed state but not the list position' do
+ subject
+
+ reloaded_list = list.reload
+ expect(reloaded_list.position).to eq(0)
+ expect(reloaded_list.collapsed?(current_user)).to eq(true)
+ end
+ end
+
+ context 'without permission to read board lists' do
+ let(:current_user) { create(:user) }
+
+ it 'raises Resource Not Found error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
new file mode 100644
index 00000000000..2bb3d807aa7
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/security/ci_configuration_shared_examples.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples_for 'graphql mutations security ci configuration' do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:branch) do
+ "set-secret-config"
+ end
+
+ let(:success_path) do
+ "http://127.0.0.1:3000/root/demo-historic-secrets/-/merge_requests/new?"
+ end
+
+ let(:service_response) do
+ ServiceResponse.success(payload: { branch: branch, success_path: success_path })
+ end
+
+ let(:error) { "An error occured!" }
+
+ let(:service_error_response) do
+ ServiceResponse.error(message: error)
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:push_code) }
+
+ describe '#resolve' do
+ let(:result) { subject }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when user does not have enough permissions' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when user is a maintainer of a different project' do
+ before do
+ create(:project_empty_repo).add_maintainer(user)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when the user does not have permission to create a new branch' do
+ let(:error_message) { 'You are not allowed to create protected branches on this project.' }
+
+ before do
+ project.add_developer(user)
+
+ allow_next_instance_of(::Files::MultiService) do |multi_service|
+ allow(multi_service).to receive(:execute).and_raise(Gitlab::Git::PreReceiveError.new("GitLab: #{error_message}"))
+ end
+ end
+
+ it 'returns an array of errors' do
+ expect(result).to match(
+ branch: be_nil,
+ success_path: be_nil,
+ errors: match_array([error_message])
+ )
+ end
+ end
+
+ context 'when the user can create a merge request' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when service successfully generates a path to create a new merge request' do
+ before do
+ allow_next_instance_of(service) do |service|
+ allow(service).to receive(:execute).and_return(service_response)
+ end
+ end
+
+ it 'returns a success path' do
+ expect(result).to match(
+ branch: branch,
+ success_path: success_path,
+ errors: []
+ )
+ end
+ end
+
+ context 'when service can not generate any path to create a new merge request' do
+ before do
+ allow_next_instance_of(service) do |service|
+ allow(service).to receive(:execute).and_return(service_error_response)
+ end
+ end
+
+ it 'returns an array of errors' do
+ expect(result).to match(
+ branch: be_nil,
+ success_path: be_nil,
+ errors: match_array([error])
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb b/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb
new file mode 100644
index 00000000000..3d6fec85490
--- /dev/null
+++ b/spec/support/shared_examples/graphql/resolvers/packages_resolvers_shared_examples.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'group and projects packages resolver' do
+ context 'without sort' do
+ let_it_be(:npm_package) { create(:package, project: project) }
+
+ it { is_expected.to contain_exactly(npm_package) }
+ end
+
+ context 'with sorting and filtering' do
+ let_it_be(:conan_package) do
+ create(:conan_package, name: 'bar', project: project, created_at: 1.day.ago, version: "1.0.0", status: 'default')
+ end
+
+ let_it_be(:maven_package) do
+ create(:maven_package, name: 'foo', project: project, created_at: 1.hour.ago, version: "2.0.0", status: 'error')
+ end
+
+ let_it_be(:repository3) do
+ create(:maven_package, name: 'baz', project: project, created_at: 1.minute.ago, version: nil)
+ end
+
+ [:created_desc, :name_desc, :version_desc, :type_asc].each do |order|
+ context "#{order}" do
+ let(:args) { { sort: order } }
+
+ it { is_expected.to eq([maven_package, conan_package]) }
+ end
+ end
+
+ [:created_asc, :name_asc, :version_asc, :type_desc].each do |order|
+ context "#{order}" do
+ let(:args) { { sort: order } }
+
+ it { is_expected.to eq([conan_package, maven_package]) }
+ end
+ end
+
+ context 'filter by package_name' do
+ let(:args) { { package_name: 'bar', sort: :created_desc } }
+
+ it { is_expected.to eq([conan_package]) }
+ end
+
+ context 'filter by package_type' do
+ let(:args) { { package_type: 'conan', sort: :created_desc } }
+
+ it { is_expected.to eq([conan_package]) }
+ end
+
+ context 'filter by status' do
+ let(:args) { { status: 'error', sort: :created_desc } }
+
+ it { is_expected.to eq([maven_package]) }
+ end
+
+ context 'include_versionless' do
+ let(:args) { { include_versionless: true, sort: :created_desc } }
+
+ it { is_expected.to include(repository3) }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
index c9e03ced0dd..1f7325df11a 100644
--- a/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
@@ -166,16 +166,6 @@ shared_examples_for 'sortable diff files' do
it 'returns sorted diff files' do
expect(raw_diff_files_paths).to eq(sorted_diff_files_paths)
end
-
- context 'when sort_diffs feature flag is disabled' do
- before do
- stub_feature_flags(sort_diffs: false)
- end
-
- it 'returns unsorted diff files' do
- expect(raw_diff_files_paths).to eq(unsorted_diff_files_paths)
- end
- end
end
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/jwt_token_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/jwt_token_shared_examples.rb
new file mode 100644
index 00000000000..5c92bb3b0d4
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/jwt_token_shared_examples.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a gitlab jwt token' do
+ let_it_be(:base_secret) { SecureRandom.base64(64) }
+
+ let(:jwt_secret) do
+ OpenSSL::HMAC.hexdigest(
+ 'SHA256',
+ base_secret,
+ described_class::HMAC_KEY
+ )
+ end
+
+ before do
+ allow(Settings).to receive(:attr_encrypted_db_key_base).and_return(base_secret)
+ end
+
+ describe '#secret' do
+ subject { described_class.secret }
+
+ it { is_expected.to eq(jwt_secret) }
+ end
+
+ describe '#decode' do
+ let(:encoded_jwt_token) { jwt_token.encoded }
+
+ subject(:decoded_jwt_token) { described_class.decode(encoded_jwt_token) }
+
+ context 'with a custom payload' do
+ let(:personal_access_token) { create(:personal_access_token) }
+ let(:jwt_token) { described_class.new.tap { |jwt_token| jwt_token['token'] = personal_access_token.token } }
+
+ it 'returns the correct token' do
+ expect(decoded_jwt_token['token']).to eq jwt_token['token']
+ end
+
+ it 'returns nil and logs the exception after expiration' do
+ travel_to((described_class::HMAC_EXPIRES_IN + 1.minute).ago) do
+ encoded_jwt_token
+ end
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(instance_of(JWT::ExpiredSignature))
+
+ expect(decoded_jwt_token).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
index aa6a51c3646..8d758ed1655 100644
--- a/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
@@ -21,7 +21,7 @@ RSpec.shared_examples 'SQL set operator' do |operator_keyword|
expect(set_operator.to_sql).to eq("(#{to_sql(relation_1)})\n#{operator_keyword}\n(#{to_sql(relation_2)})")
end
- it 'skips Model.none segements' do
+ it 'skips Model.none segments' do
empty_relation = User.none
set_operator = described_class.new([empty_relation, relation_1, relation_2])
diff --git a/spec/support/shared_examples/models/chat_service_shared_examples.rb b/spec/support/shared_examples/models/chat_service_shared_examples.rb
index 59e249bb865..4a47aad0957 100644
--- a/spec/support/shared_examples/models/chat_service_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_service_shared_examples.rb
@@ -163,7 +163,7 @@ RSpec.shared_examples "chat service" do |service_name|
context "with issue events" do
let(:opts) { { title: "Awesome issue", description: "please fix" } }
let(:sample_data) do
- service = Issues::CreateService.new(project, user, opts)
+ service = Issues::CreateService.new(project: project, current_user: user, params: opts)
issue = service.execute
service.hook_data(issue, "open")
end
@@ -182,7 +182,7 @@ RSpec.shared_examples "chat service" do |service_name|
end
let(:sample_data) do
- service = MergeRequests::CreateService.new(project, user, opts)
+ service = MergeRequests::CreateService.new(project: project, current_user: user, params: opts)
merge_request = service.execute
service.hook_data(merge_request, "open")
end
diff --git a/spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb b/spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb
index 0ee24dd93d7..49729afce61 100644
--- a/spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_slash_commands_shared_examples.rb
@@ -81,7 +81,7 @@ RSpec.shared_examples 'chat slash commands service' do
end
context 'when the user is authenticated' do
- let!(:chat_name) { create(:chat_name, service: subject) }
+ let!(:chat_name) { create(:chat_name, integration: subject) }
let(:params) { { token: 'token', team_id: chat_name.team_id, user_id: chat_name.chat_id } }
subject do
diff --git a/spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb b/spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb
new file mode 100644
index 00000000000..d3ce916cd64
--- /dev/null
+++ b/spec/support/shared_examples/models/clusters/elastic_stack_client_shared.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+# Input
+# - factory: [:clusters_applications_elastic_stack, :clusters_integrations_elastic_stack]
+RSpec.shared_examples 'cluster-based #elasticsearch_client' do |factory|
+ describe '#elasticsearch_client' do
+ context 'cluster is nil' do
+ subject { build(factory, cluster: nil) }
+
+ it 'returns nil' do
+ expect(subject.cluster).to be_nil
+ expect(subject.elasticsearch_client).to be_nil
+ end
+ end
+
+ context "cluster doesn't have kubeclient" do
+ let(:cluster) { create(:cluster) }
+
+ subject { create(factory, cluster: cluster) }
+
+ it 'returns nil' do
+ expect(subject.elasticsearch_client).to be_nil
+ end
+ end
+
+ context 'cluster has kubeclient' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
+ let(:kube_client) { subject.cluster.kubeclient.core_client }
+
+ subject { create(factory, cluster: cluster) }
+
+ before do
+ subject.cluster.platform_kubernetes.namespace = 'a-namespace'
+ stub_kubeclient_discover(cluster.platform_kubernetes.api_url)
+
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
+
+ it 'creates proxy elasticsearch_client' do
+ expect(subject.elasticsearch_client).to be_instance_of(Elasticsearch::Transport::Client)
+ end
+
+ it 'copies proxy_url, options and headers from kube client to elasticsearch_client' do
+ expect(Elasticsearch::Client)
+ .to(receive(:new))
+ .with(url: a_valid_url)
+ .and_call_original
+
+ client = subject.elasticsearch_client
+ faraday_connection = client.transport.connections.first.connection
+
+ expect(faraday_connection.headers["Authorization"]).to eq(kube_client.headers[:Authorization])
+ expect(faraday_connection.ssl.cert_store).to be_instance_of(OpenSSL::X509::Store)
+ expect(faraday_connection.ssl.verify).to eq(1)
+ expect(faraday_connection.options.timeout).to be_nil
+ end
+
+ context 'when cluster is not reachable' do
+ before do
+ allow(kube_client).to receive(:proxy_url).and_raise(Kubeclient::HttpError.new(401, 'Unauthorized', nil))
+ end
+
+ it 'returns nil' do
+ expect(subject.elasticsearch_client).to be_nil
+ end
+ end
+
+ context 'when timeout is provided' do
+ it 'sets timeout in elasticsearch_client' do
+ client = subject.elasticsearch_client(timeout: 123)
+ faraday_connection = client.transport.connections.first.connection
+
+ expect(faraday_connection.options.timeout).to eq(123)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
index 3db5d7a8d7d..ec9756007f1 100644
--- a/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/bulk_insert_safe_shared_examples.rb
@@ -30,10 +30,6 @@ RSpec.shared_examples 'a BulkInsertSafe model' do |klass|
expect { target_class.set_callback(name) {} }.not_to raise_error
end
end
-
- it 'does not raise an error when the call is triggered by belongs_to' do
- expect { target_class.belongs_to(:other_record) }.not_to raise_error
- end
end
describe '.bulk_insert!' do
diff --git a/spec/support/shared_examples/models/concerns/cron_schedulable_shared_examples.rb b/spec/support/shared_examples/models/concerns/cron_schedulable_shared_examples.rb
new file mode 100644
index 00000000000..47a02a8fa49
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/cron_schedulable_shared_examples.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'handles set_next_run_at' do
+ context 'when schedule runs every minute' do
+ it "updates next_run_at to the worker's execution time" do
+ travel_to(1.day.ago) do
+ expect(schedule.next_run_at).to eq(cron_worker_next_run_at)
+ end
+ end
+ end
+
+ context 'when there are two different schedules in the same time zones' do
+ it 'sets the sames next_run_at' do
+ expect(schedule_1.next_run_at).to eq(schedule_2.next_run_at)
+ end
+ end
+
+ context 'when cron is updated for existing schedules' do
+ it 'updates next_run_at automatically' do
+ expect { schedule.update!(cron: new_cron) }.to change { schedule.next_run_at }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
index 819cf6018fe..3f1588c46b3 100644
--- a/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/repository_storage_movable_shared_examples.rb
@@ -36,7 +36,7 @@ RSpec.shared_examples 'handles repository moves' do
container.set_repository_read_only!
expect(subject).not_to be_valid
- expect(subject.errors[error_key].first).to match(/is read only/)
+ expect(subject.errors[error_key].first).to match(/is read-only/)
end
end
end
diff --git a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
index 17948d648cb..d23f95b2e9e 100644
--- a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
@@ -58,6 +58,19 @@ RSpec.shared_examples 'value stream analytics stage' do
it { expect(stage).not_to be_valid }
end
+
+ # rubocop: disable Rails/SaveBang
+ describe '.by_value_stream' do
+ it 'finds stages by value stream' do
+ stage1 = create(factory)
+ create(factory) # other stage with different value stream
+
+ result = described_class.by_value_stream(stage1.value_stream)
+
+ expect(result).to eq([stage1])
+ end
+ end
+ # rubocop: enable Rails/SaveBang
end
describe '#subject_class' do
diff --git a/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
index fbb94b4f5c1..33a04059491 100644
--- a/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.shared_examples 'Debian Distribution Architecture' do |factory, container, can_freeze|
- let_it_be_with_refind(:architecture) { create(factory) } # rubocop:disable Rails/SaveBang
- let_it_be(:architecture_same_distribution, freeze: can_freeze) { create(factory, distribution: architecture.distribution) }
+ let_it_be_with_refind(:architecture) { create(factory, name: 'name1') }
+ let_it_be(:architecture_same_distribution, freeze: can_freeze) { create(factory, distribution: architecture.distribution, name: 'name2') }
let_it_be(:architecture_same_name, freeze: can_freeze) { create(factory, name: architecture.name) }
subject { architecture }
@@ -30,20 +30,22 @@ RSpec.shared_examples 'Debian Distribution Architecture' do |factory, container,
end
describe 'scopes' do
+ describe '.ordered_by_name' do
+ subject { described_class.with_distribution(architecture.distribution).ordered_by_name }
+
+ it { expect(subject).to match_array([architecture, architecture_same_distribution]) }
+ end
+
describe '.with_distribution' do
subject { described_class.with_distribution(architecture.distribution) }
- it 'does not return other distributions' do
- expect(subject.to_a).to match_array([architecture, architecture_same_distribution])
- end
+ it { expect(subject).to match_array([architecture, architecture_same_distribution]) }
end
describe '.with_name' do
subject { described_class.with_name(architecture.name) }
- it 'does not return other distributions' do
- expect(subject.to_a).to match_array([architecture, architecture_same_name])
- end
+ it { expect(subject).to match_array([architecture, architecture_same_name]) }
end
end
end
diff --git a/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb b/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb
index 02ced49ee94..e6b16d5881d 100644
--- a/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb
+++ b/spec/support/shared_examples/models/packages/debian/component_file_shared_example.rb
@@ -114,11 +114,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_container(container2) }
it do
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_other_container)
- end
-
- expect(queries.count).to eq(1)
+ expect(subject.to_a).to contain_exactly(component_file_other_container)
end
end
@@ -126,11 +122,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_codename_or_suite(distribution2.codename) }
it do
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_other_container)
- end
-
- expect(queries.count).to eq(1)
+ expect(subject.to_a).to contain_exactly(component_file_other_container)
end
end
@@ -138,11 +130,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_component_name(component1_2.name) }
it do
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_other_component)
- end
-
- expect(queries.count).to eq(1)
+ expect(subject.to_a).to contain_exactly(component_file_other_component)
end
end
@@ -150,14 +138,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_file_type(:source) }
it do
- # let_it_be_with_refind triggers a query
- component_file_with_file_type_source
-
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_with_file_type_source)
- end
-
- expect(queries.count).to eq(1)
+ expect(subject.to_a).to contain_exactly(component_file_with_file_type_source)
end
end
@@ -165,11 +146,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_architecture_name(architecture1_2.name) }
it do
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_other_architecture)
- end
-
- expect(queries.count).to eq(1)
+ expect(subject.to_a).to contain_exactly(component_file_other_architecture)
end
end
@@ -177,11 +154,7 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_compression_type(:xz) }
it do
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_other_compression_type)
- end
-
- expect(queries.count).to eq(1)
+ expect(subject.to_a).to contain_exactly(component_file_other_compression_type)
end
end
@@ -189,11 +162,19 @@ RSpec.shared_examples 'Debian Component File' do |container_type, can_freeze|
subject { described_class.with_file_sha256('other_sha256') }
it do
- queries = ActiveRecord::QueryRecorder.new do
- expect(subject.to_a).to contain_exactly(component_file_other_file_sha256)
- end
+ expect(subject.to_a).to contain_exactly(component_file_other_file_sha256)
+ end
+ end
+
+ describe '.created_before' do
+ let_it_be(:component_file1) { create("debian_#{container_type}_component_file", component: component1_1, architecture: architecture1_1, created_at: 4.hours.ago) }
+ let_it_be(:component_file2) { create("debian_#{container_type}_component_file", component: component1_1, architecture: architecture1_1, created_at: 3.hours.ago) }
+ let_it_be(:component_file3) { create("debian_#{container_type}_component_file", component: component1_1, architecture: architecture1_1, created_at: 1.hour.ago) }
- expect(queries.count).to eq(1)
+ subject { described_class.created_before(2.hours.ago) }
+
+ it do
+ expect(subject.to_a).to contain_exactly(component_file1, component_file2)
end
end
end
diff --git a/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
index 23e76d32fb0..635d45f40e5 100644
--- a/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.shared_examples 'Debian Distribution Component' do |factory, container, can_freeze|
- let_it_be_with_refind(:component) { create(factory) } # rubocop:disable Rails/SaveBang
- let_it_be(:component_same_distribution, freeze: can_freeze) { create(factory, distribution: component.distribution) }
+ let_it_be_with_refind(:component) { create(factory, name: 'name1') }
+ let_it_be(:component_same_distribution, freeze: can_freeze) { create(factory, distribution: component.distribution, name: 'name2') }
let_it_be(:component_same_name, freeze: can_freeze) { create(factory, name: component.name) }
subject { component }
@@ -32,6 +32,14 @@ RSpec.shared_examples 'Debian Distribution Component' do |factory, container, ca
end
describe 'scopes' do
+ describe '.ordered_by_name' do
+ subject { described_class.with_distribution(component.distribution).ordered_by_name }
+
+ it 'sorts by name' do
+ expect(subject.to_a).to eq([component, component_same_distribution])
+ end
+ end
+
describe '.with_distribution' do
subject { described_class.with_distribution(component.distribution) }
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
index 9eacacf725f..8693d6868e9 100644
--- a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -19,11 +19,6 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
it { is_expected.to have_many(:components).class_name("Packages::Debian::#{container.capitalize}Component").inverse_of(:distribution) }
it { is_expected.to have_many(:architectures).class_name("Packages::Debian::#{container.capitalize}Architecture").inverse_of(:distribution) }
-
- if container != :group
- it { is_expected.to have_many(:publications).class_name('Packages::Debian::Publication').inverse_of(:distribution).with_foreign_key(:distribution_id) }
- it { is_expected.to have_many(:packages).class_name('Packages::Package').through(:publications) }
- end
end
describe 'validations' do
@@ -228,4 +223,44 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
end
end
end
+
+ if container == :project
+ describe 'project distribution specifics' do
+ describe 'relationships' do
+ it { is_expected.to have_many(:publications).class_name('Packages::Debian::Publication').inverse_of(:distribution).with_foreign_key(:distribution_id) }
+ it { is_expected.to have_many(:packages).class_name('Packages::Package').through(:publications) }
+ it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile').through(:packages) }
+ end
+ end
+ else
+ describe 'group distribution specifics' do
+ let_it_be(:public_project) { create(:project, :public, group: distribution_with_suite.container)}
+ let_it_be(:public_distribution_with_same_codename) { create(:debian_project_distribution, container: public_project, codename: distribution_with_suite.codename) }
+ let_it_be(:public_package_with_same_codename) { create(:debian_package, project: public_project, published_in: public_distribution_with_same_codename)}
+ let_it_be(:public_distribution_with_same_suite) { create(:debian_project_distribution, container: public_project, suite: distribution_with_suite.suite) }
+ let_it_be(:public_package_with_same_suite) { create(:debian_package, project: public_project, published_in: public_distribution_with_same_suite)}
+
+ let_it_be(:private_project) { create(:project, :private, group: distribution_with_suite.container)}
+ let_it_be(:private_distribution_with_same_codename) { create(:debian_project_distribution, container: private_project, codename: distribution_with_suite.codename) }
+ let_it_be(:private_package_with_same_codename) { create(:debian_package, project: private_project, published_in: private_distribution_with_same_codename)}
+ let_it_be(:private_distribution_with_same_suite) { create(:debian_project_distribution, container: private_project, suite: distribution_with_suite.suite) }
+ let_it_be(:private_package_with_same_suite) { create(:debian_package, project: private_project, published_in: private_distribution_with_same_codename)}
+
+ describe '#packages' do
+ subject { distribution_with_suite.packages }
+
+ it 'returns only public packages with same codename' do
+ expect(subject.to_a).to contain_exactly(public_package_with_same_codename)
+ end
+ end
+
+ describe '#package_files' do
+ subject { distribution_with_suite.package_files }
+
+ it 'returns only files from public packages with same codename' do
+ expect(subject.to_a).to contain_exactly(*public_package_with_same_codename.package_files)
+ end
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
index 71a76121d38..09b7d1be704 100644
--- a/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
+++ b/spec/support/shared_examples/models/slack_mattermost_notifications_shared_examples.rb
@@ -201,7 +201,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do |service_name|
context 'deployment events' do
let_it_be(:deployment) { create(:deployment) }
- let(:data) { Gitlab::DataBuilder::Deployment.build(deployment) }
+ let(:data) { Gitlab::DataBuilder::Deployment.build(deployment, Time.current) }
it_behaves_like 'calls the service API with the event message', /Deploy to (.*?) created/
end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index 6b243aef3e6..2498bf35a09 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -469,34 +469,30 @@ RSpec.shared_examples 'wiki model' do
end
describe '#delete_page' do
- shared_examples 'delete_page operations' do
- let(:page) { create(:wiki_page, wiki: wiki) }
+ let(:page) { create(:wiki_page, wiki: wiki) }
- it 'deletes the page' do
- subject.delete_page(page)
+ it 'deletes the page' do
+ subject.delete_page(page)
- expect(subject.list_pages.count).to eq(0)
- end
+ expect(subject.list_pages.count).to eq(0)
+ end
- it 'sets the correct commit email' do
- subject.delete_page(page)
+ it 'sets the correct commit email' do
+ subject.delete_page(page)
- expect(user.commit_email).not_to eq(user.email)
- expect(commit.author_email).to eq(user.commit_email)
- expect(commit.committer_email).to eq(user.commit_email)
- end
+ expect(user.commit_email).not_to eq(user.email)
+ expect(commit.author_email).to eq(user.commit_email)
+ expect(commit.committer_email).to eq(user.commit_email)
+ end
- it 'runs after_wiki_activity callbacks' do
- page
+ it 'runs after_wiki_activity callbacks' do
+ page
- expect(subject).to receive(:after_wiki_activity)
+ expect(subject).to receive(:after_wiki_activity)
- subject.delete_page(page)
- end
+ subject.delete_page(page)
end
- it_behaves_like 'delete_page operations'
-
context 'when an error is raised' do
it 'logs the error and returns false' do
page = build(:wiki_page, wiki: wiki)
@@ -509,14 +505,6 @@ RSpec.shared_examples 'wiki model' do
expect(subject.delete_page(page)).to be_falsey
end
end
-
- context 'when feature flag :gitaly_replace_wiki_delete_page is disabled' do
- before do
- stub_feature_flags(gitaly_replace_wiki_delete_page: false)
- end
-
- it_behaves_like 'delete_page operations'
- end
end
describe '#ensure_repository' do
diff --git a/spec/support/shared_examples/namespaces/namespace_traversal_examples.rb b/spec/support/shared_examples/namespaces/traversal_examples.rb
index 36e5808fa28..77a1705627e 100644
--- a/spec/support/shared_examples/namespaces/namespace_traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_examples.rb
@@ -39,16 +39,17 @@ RSpec.shared_examples 'namespace traversal' do
end
describe '#ancestors' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
- let(:deep_nested_group) { create(:group, parent: nested_group) }
- let(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:nested_group) { create(:group, parent: group) }
+ let_it_be(:deep_nested_group) { create(:group, parent: nested_group) }
+ let_it_be(:very_deep_nested_group) { create(:group, parent: deep_nested_group) }
it 'returns the correct ancestors' do
- expect(very_deep_nested_group.ancestors).to include(group, nested_group, deep_nested_group)
- expect(deep_nested_group.ancestors).to include(group, nested_group)
- expect(nested_group.ancestors).to include(group)
- expect(group.ancestors).to eq([])
+ # #reload is called to make sure traversal_ids are reloaded
+ expect(very_deep_nested_group.reload.ancestors).to contain_exactly(group, nested_group, deep_nested_group)
+ expect(deep_nested_group.reload.ancestors).to contain_exactly(group, nested_group)
+ expect(nested_group.reload.ancestors).to contain_exactly(group)
+ expect(group.reload.ancestors).to eq([])
end
describe '#recursive_ancestors' do
diff --git a/spec/support/shared_examples/policies/project_policy_shared_examples.rb b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
index d05e5eb9120..013c9b61b99 100644
--- a/spec/support/shared_examples/policies/project_policy_shared_examples.rb
+++ b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
@@ -57,7 +57,7 @@ RSpec.shared_examples 'project policies as anonymous' do
context 'when a project has pending invites' do
let(:group) { create(:group, :public) }
let(:project) { create(:project, :public, namespace: group) }
- let(:user_permissions) { [:create_merge_request_in, :create_project, :create_issue, :create_note, :upload_file, :award_emoji] }
+ let(:user_permissions) { [:create_merge_request_in, :create_project, :create_issue, :create_note, :upload_file, :award_emoji, :create_incident] }
let(:anonymous_permissions) { guest_permissions - user_permissions }
let(:current_user) { anonymous }
diff --git a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
index 87aaac673c1..c938c6432fe 100644
--- a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
@@ -106,7 +106,7 @@ RSpec.shared_examples 'conan authenticate endpoint' do
expect(payload['user_id']).to eq(personal_access_token.user_id)
duration = payload['exp'] - payload['iat']
- expect(duration).to eq(1.hour)
+ expect(duration).to eq(::Gitlab::ConanToken::CONAN_TOKEN_EXPIRE_TIME)
end
end
end
@@ -661,7 +661,7 @@ RSpec.shared_examples 'workhorse package file upload endpoint' do
end
RSpec.shared_examples 'creates build_info when there is a job' do
- context 'with job token' do
+ context 'with job token', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/294047' do
let(:jwt) { build_jwt_from_job(job) }
it 'creates a build_info record' do
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
index acaa0d8c2bc..dfd19167dcd 100644
--- a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -1,20 +1,16 @@
# frozen_string_literal: true
-RSpec.shared_context 'Debian repository shared context' do |object_type|
+RSpec.shared_context 'Debian repository shared context' do |container_type, can_freeze|
include_context 'workhorse headers'
before do
stub_feature_flags(debian_packages: true)
end
- if object_type == :project
- let(:project) { create(:project, :public) }
- elsif object_type == :group
- let(:group) { create(:group, :public) }
- end
-
- let(:user) { create(:user) }
- let(:personal_access_token) { create(:personal_access_token, user: user) }
+ let_it_be(:private_container, freeze: can_freeze) { create(container_type, :private) }
+ let_it_be(:public_container, freeze: can_freeze) { create(container_type, :public) }
+ let_it_be(:user, freeze: true) { create(:user) }
+ let_it_be(:personal_access_token, freeze: true) { create(:personal_access_token, user: user) }
let(:distribution) { 'bullseye' }
let(:component) { 'main' }
@@ -36,7 +32,7 @@ RSpec.shared_context 'Debian repository shared context' do |object_type|
end
end
- let(:params) { workhorse_params }
+ let(:api_params) { workhorse_params }
let(:auth_headers) { {} }
let(:wh_headers) do
@@ -57,12 +53,12 @@ RSpec.shared_context 'Debian repository shared context' do |object_type|
api(url),
method: method,
file_key: :file,
- params: params,
+ params: api_params,
headers: headers,
send_rewritten_field: send_rewritten_field
)
else
- send method, api(url), headers: headers, params: params
+ send method, api(url), headers: headers, params: api_params
end
end
end
@@ -81,289 +77,190 @@ RSpec.shared_context 'Debian repository auth headers' do |user_role, user_token,
end
end
-RSpec.shared_context 'Debian repository project access' do |project_visibility_level, user_role, user_token, auth_method|
+RSpec.shared_context 'Debian repository access' do |visibility_level, user_role, add_member, user_token, auth_method|
include_context 'Debian repository auth headers', user_role, user_token, auth_method do
+ let(:containers) { { private: private_container, public: public_container } }
+ let(:container) { containers[visibility_level] }
+
before do
- project.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ container.send("add_#{user_role}", user) if add_member && user_role != :anonymous
end
end
end
-RSpec.shared_examples 'Debian project repository GET request' do |user_role, add_member, status, body|
- context "for user type #{user_role}" do
- before do
- project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
- end
+RSpec.shared_examples 'Debian repository GET request' do |status, body = nil|
+ and_body = body.nil? ? '' : ' and expected body'
- and_body = body.nil? ? '' : ' and expected body'
+ it "returns #{status}#{and_body}" do
+ subject
- it "returns #{status}#{and_body}" do
- subject
+ expect(response).to have_gitlab_http_status(status)
- expect(response).to have_gitlab_http_status(status)
-
- unless body.nil?
- expect(response.body).to eq(body)
- end
+ unless body.nil?
+ expect(response.body).to eq(body)
end
end
end
-RSpec.shared_examples 'Debian project repository PUT request' do |user_role, add_member, status, body|
- context "for user type #{user_role}" do
- before do
- project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
- end
+RSpec.shared_examples 'Debian repository upload request' do |status, body = nil|
+ and_body = body.nil? ? '' : ' and expected body'
- and_body = body.nil? ? '' : ' and expected body'
+ if status == :created
+ it 'creates package files', :aggregate_failures do
+ pending "Debian package creation not implemented"
- if status == :created
- it 'creates package files', :aggregate_failures do
- pending "Debian package creation not implemented"
- expect { subject }
- .to change { project.packages.debian.count }.by(1)
+ expect { subject }
+ .to change { container.packages.debian.count }.by(1)
- expect(response).to have_gitlab_http_status(status)
- expect(response.media_type).to eq('text/plain')
+ expect(response).to have_gitlab_http_status(status)
+ expect(response.media_type).to eq('text/plain')
- unless body.nil?
- expect(response.body).to eq(body)
- end
+ unless body.nil?
+ expect(response.body).to eq(body)
end
- it_behaves_like 'a package tracking event', described_class.name, 'push_package'
- else
- it "returns #{status}#{and_body}", :aggregate_failures do
- subject
+ end
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
+ else
+ it "returns #{status}#{and_body}", :aggregate_failures do
+ subject
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(status)
- unless body.nil?
- expect(response.body).to eq(body)
- end
+ unless body.nil?
+ expect(response.body).to eq(body)
end
end
end
end
-RSpec.shared_examples 'Debian project repository PUT authorize request' do |user_role, add_member, status, body, is_authorize|
- context "for user type #{user_role}" do
- before do
- project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
- end
-
- and_body = body.nil? ? '' : ' and expected body'
+RSpec.shared_examples 'Debian repository upload authorize request' do |status, body = nil|
+ and_body = body.nil? ? '' : ' and expected body'
- if status == :created
- it 'authorizes package file upload', :aggregate_failures do
- subject
+ if status == :created
+ it 'authorizes package file upload', :aggregate_failures do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response['TempPath']).to eq(Packages::PackageFileUploader.workhorse_local_upload_path)
- expect(json_response['RemoteObject']).to be_nil
- expect(json_response['MaximumSize']).to be_nil
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(Packages::PackageFileUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ expect(json_response['MaximumSize']).to be_nil
+ end
- context 'without a valid token' do
- let(:workhorse_token) { 'invalid' }
+ context 'without a valid token' do
+ let(:workhorse_token) { 'invalid' }
- it 'rejects request' do
- subject
+ it 'rejects request' do
+ subject
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
end
+ end
- context 'bypassing gitlab-workhorse' do
- let(:workhorse_headers) { {} }
+ context 'bypassing gitlab-workhorse' do
+ let(:workhorse_headers) { {} }
- it 'rejects request' do
- subject
+ it 'rejects request' do
+ subject
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
end
- else
- it "returns #{status}#{and_body}", :aggregate_failures do
- subject
+ end
+ else
+ it "returns #{status}#{and_body}", :aggregate_failures do
+ subject
- expect(response).to have_gitlab_http_status(status)
+ expect(response).to have_gitlab_http_status(status)
- unless body.nil?
- expect(response.body).to eq(body)
- end
+ unless body.nil?
+ expect(response.body).to eq(body)
end
end
end
end
-RSpec.shared_examples 'rejects Debian access with unknown project id' do
- context 'with an unknown project' do
- let(:project) { double(id: non_existing_record_id) }
+RSpec.shared_examples 'rejects Debian access with unknown container id' do
+ context 'with an unknown container' do
+ let(:container) { double(id: non_existing_record_id) }
context 'as anonymous' do
- it_behaves_like 'Debian project repository GET request', :anonymous, true, :unauthorized, nil
+ it_behaves_like 'Debian repository GET request', :unauthorized, nil
end
context 'as authenticated user' do
subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
- it_behaves_like 'Debian project repository GET request', :anonymous, true, :not_found, nil
+ it_behaves_like 'Debian repository GET request', :not_found, nil
end
end
end
-RSpec.shared_examples 'Debian project repository GET endpoint' do |success_status, success_body|
- context 'with valid project' do
+RSpec.shared_examples 'Debian repository read endpoint' do |desired_behavior, success_status, success_body|
+ context 'with valid container' do
using RSpec::Parameterized::TableSyntax
- where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
- 'PUBLIC' | :developer | true | true | success_status | success_body
- 'PUBLIC' | :guest | true | true | success_status | success_body
- 'PUBLIC' | :developer | true | false | success_status | success_body
- 'PUBLIC' | :guest | true | false | success_status | success_body
- 'PUBLIC' | :developer | false | true | success_status | success_body
- 'PUBLIC' | :guest | false | true | success_status | success_body
- 'PUBLIC' | :developer | false | false | success_status | success_body
- 'PUBLIC' | :guest | false | false | success_status | success_body
- 'PUBLIC' | :anonymous | false | true | success_status | success_body
- 'PRIVATE' | :developer | true | true | success_status | success_body
- 'PRIVATE' | :guest | true | true | :forbidden | nil
- 'PRIVATE' | :developer | true | false | :unauthorized | nil
- 'PRIVATE' | :guest | true | false | :unauthorized | nil
- 'PRIVATE' | :developer | false | true | :not_found | nil
- 'PRIVATE' | :guest | false | true | :not_found | nil
- 'PRIVATE' | :developer | false | false | :unauthorized | nil
- 'PRIVATE' | :guest | false | false | :unauthorized | nil
- 'PRIVATE' | :anonymous | false | true | :unauthorized | nil
+ where(:visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ :public | :developer | true | true | success_status | success_body
+ :public | :guest | true | true | success_status | success_body
+ :public | :developer | true | false | success_status | success_body
+ :public | :guest | true | false | success_status | success_body
+ :public | :developer | false | true | success_status | success_body
+ :public | :guest | false | true | success_status | success_body
+ :public | :developer | false | false | success_status | success_body
+ :public | :guest | false | false | success_status | success_body
+ :public | :anonymous | false | true | success_status | success_body
+ :private | :developer | true | true | success_status | success_body
+ :private | :guest | true | true | :forbidden | nil
+ :private | :developer | true | false | :unauthorized | nil
+ :private | :guest | true | false | :unauthorized | nil
+ :private | :developer | false | true | :not_found | nil
+ :private | :guest | false | true | :not_found | nil
+ :private | :developer | false | false | :unauthorized | nil
+ :private | :guest | false | false | :unauthorized | nil
+ :private | :anonymous | false | true | :unauthorized | nil
end
with_them do
- include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
- it_behaves_like 'Debian project repository GET request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ include_context 'Debian repository access', params[:visibility_level], params[:user_role], params[:member], params[:user_token], :basic do
+ it_behaves_like "Debian repository #{desired_behavior}", params[:expected_status], params[:expected_body]
end
end
end
- it_behaves_like 'rejects Debian access with unknown project id'
-end
-
-RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_status, success_body, is_authorize = false|
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
- 'PUBLIC' | :developer | true | true | success_status | nil
- 'PUBLIC' | :guest | true | true | :forbidden | nil
- 'PUBLIC' | :developer | true | false | :unauthorized | nil
- 'PUBLIC' | :guest | true | false | :unauthorized | nil
- 'PUBLIC' | :developer | false | true | :forbidden | nil
- 'PUBLIC' | :guest | false | true | :forbidden | nil
- 'PUBLIC' | :developer | false | false | :unauthorized | nil
- 'PUBLIC' | :guest | false | false | :unauthorized | nil
- 'PUBLIC' | :anonymous | false | true | :unauthorized | nil
- 'PRIVATE' | :developer | true | true | success_status | nil
- 'PRIVATE' | :guest | true | true | :forbidden | nil
- 'PRIVATE' | :developer | true | false | :unauthorized | nil
- 'PRIVATE' | :guest | true | false | :unauthorized | nil
- 'PRIVATE' | :developer | false | true | :not_found | nil
- 'PRIVATE' | :guest | false | true | :not_found | nil
- 'PRIVATE' | :developer | false | false | :unauthorized | nil
- 'PRIVATE' | :guest | false | false | :unauthorized | nil
- 'PRIVATE' | :anonymous | false | true | :unauthorized | nil
- end
-
- with_them do
- include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
- desired_behavior = if is_authorize
- 'Debian project repository PUT authorize request'
- else
- 'Debian project repository PUT request'
- end
-
- it_behaves_like desired_behavior, params[:user_role], params[:member], params[:expected_status], params[:expected_body]
- end
- end
- end
-
- it_behaves_like 'rejects Debian access with unknown project id'
-end
-
-RSpec.shared_context 'Debian repository group access' do |group_visibility_level, user_role, user_token, auth_method|
- include_context 'Debian repository auth headers', user_role, user_token, auth_method do
- before do
- group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility_level, false))
- end
- end
-end
-
-RSpec.shared_examples 'Debian group repository GET request' do |user_role, add_member, status, body|
- context "for user type #{user_role}" do
- before do
- group.send("add_#{user_role}", user) if add_member && user_role != :anonymous
- end
-
- and_body = body.nil? ? '' : ' and expected body'
-
- it "returns #{status}#{and_body}" do
- subject
-
- expect(response).to have_gitlab_http_status(status)
-
- unless body.nil?
- expect(response.body).to eq(body)
- end
- end
- end
-end
-
-RSpec.shared_examples 'rejects Debian access with unknown group id' do
- context 'with an unknown group' do
- let(:group) { double(id: non_existing_record_id) }
-
- context 'as anonymous' do
- it_behaves_like 'Debian group repository GET request', :anonymous, true, :unauthorized, nil
- end
-
- context 'as authenticated user' do
- subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
-
- it_behaves_like 'Debian group repository GET request', :anonymous, true, :not_found, nil
- end
- end
+ it_behaves_like 'rejects Debian access with unknown container id'
end
-RSpec.shared_examples 'Debian group repository GET endpoint' do |success_status, success_body|
- context 'with valid group' do
+RSpec.shared_examples 'Debian repository write endpoint' do |desired_behavior, success_status, success_body|
+ context 'with valid container' do
using RSpec::Parameterized::TableSyntax
- where(:group_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
- 'PUBLIC' | :developer | true | true | success_status | success_body
- 'PUBLIC' | :guest | true | true | success_status | success_body
- 'PUBLIC' | :developer | true | false | success_status | success_body
- 'PUBLIC' | :guest | true | false | success_status | success_body
- 'PUBLIC' | :developer | false | true | success_status | success_body
- 'PUBLIC' | :guest | false | true | success_status | success_body
- 'PUBLIC' | :developer | false | false | success_status | success_body
- 'PUBLIC' | :guest | false | false | success_status | success_body
- 'PUBLIC' | :anonymous | false | true | success_status | success_body
- 'PRIVATE' | :developer | true | true | success_status | success_body
- 'PRIVATE' | :guest | true | true | :forbidden | nil
- 'PRIVATE' | :developer | true | false | :unauthorized | nil
- 'PRIVATE' | :guest | true | false | :unauthorized | nil
- 'PRIVATE' | :developer | false | true | :not_found | nil
- 'PRIVATE' | :guest | false | true | :not_found | nil
- 'PRIVATE' | :developer | false | false | :unauthorized | nil
- 'PRIVATE' | :guest | false | false | :unauthorized | nil
- 'PRIVATE' | :anonymous | false | true | :unauthorized | nil
+ where(:visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ :public | :developer | true | true | success_status | success_body
+ :public | :guest | true | true | :forbidden | nil
+ :public | :developer | true | false | :unauthorized | nil
+ :public | :guest | true | false | :unauthorized | nil
+ :public | :developer | false | true | :forbidden | nil
+ :public | :guest | false | true | :forbidden | nil
+ :public | :developer | false | false | :unauthorized | nil
+ :public | :guest | false | false | :unauthorized | nil
+ :public | :anonymous | false | true | :unauthorized | nil
+ :private | :developer | true | true | success_status | success_body
+ :private | :guest | true | true | :forbidden | nil
+ :private | :developer | true | false | :unauthorized | nil
+ :private | :guest | true | false | :unauthorized | nil
+ :private | :developer | false | true | :not_found | nil
+ :private | :guest | false | true | :not_found | nil
+ :private | :developer | false | false | :unauthorized | nil
+ :private | :guest | false | false | :unauthorized | nil
+ :private | :anonymous | false | true | :unauthorized | nil
end
with_them do
- include_context 'Debian repository group access', params[:group_visibility_level], params[:user_role], params[:user_token], :basic do
- it_behaves_like 'Debian group repository GET request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ include_context 'Debian repository access', params[:visibility_level], params[:user_role], params[:member], params[:user_token], :basic do
+ it_behaves_like "Debian repository #{desired_behavior}", params[:expected_status], params[:expected_body]
end
end
end
- it_behaves_like 'rejects Debian access with unknown group id'
+ it_behaves_like 'rejects Debian access with unknown container id'
end
diff --git a/spec/support/shared_examples/requests/api/graphql/mutations/boards/update_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/mutations/boards/update_list_shared_examples.rb
new file mode 100644
index 00000000000..9b55b0f061f
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/mutations/boards/update_list_shared_examples.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a GraphQL request to update board list' do
+ context 'the user is not allowed to read board lists' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ before do
+ list.update_preferences_for(current_user, collapsed: false)
+ end
+
+ context 'when user has permissions to admin board lists' do
+ before do
+ group.add_reporter(current_user)
+ end
+
+ it 'updates the list position and collapsed state' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['list']).to include(
+ 'position' => 1,
+ 'collapsed' => true
+ )
+ end
+ end
+
+ context 'when user has permissions to read board lists' do
+ before do
+ group.add_guest(current_user)
+ end
+
+ it 'updates the list collapsed state but not the list position' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['list']).to include(
+ 'position' => 0,
+ 'collapsed' => true
+ )
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/graphql/mutations/destroy_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/mutations/destroy_list_shared_examples.rb
new file mode 100644
index 00000000000..0cec67ff541
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/mutations/destroy_list_shared_examples.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'board lists destroy request' do
+ include GraphqlHelpers
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ shared_examples 'does not destroy the list and returns an error' do
+ it 'does not destroy the list' do
+ expect { subject }.not_to change { klass.count }
+ end
+
+ it 'returns an error and not nil list' do
+ subject
+
+ expect(mutation_response['errors']).not_to be_empty
+ expect(mutation_response['list']).not_to be_nil
+ end
+ end
+
+ context 'when the user does not have permission' do
+ it 'does not destroy the list' do
+ expect { subject }.not_to change { klass.count }
+ end
+
+ it 'returns an error' do
+ subject
+
+ expect(graphql_errors.first['message']).to include("The resource that you are attempting to access does not exist or you don't have permission to perform this action")
+ end
+ end
+
+ context 'when the user has permission' do
+ before do
+ group.add_maintainer(current_user)
+ end
+
+ context 'when given id is not for a list' do
+ # could be any non-list thing
+ let_it_be(:list) { group }
+
+ it 'returns an error' do
+ subject
+
+ expect(graphql_errors.first['message']).to include('does not represent an instance of')
+ end
+ end
+
+ context 'when list does not exist' do
+ let(:variables) do
+ {
+ list_id: "gid://gitlab/#{klass}/#{non_existing_record_id}"
+ }
+ end
+
+ it 'returns a top level error' do
+ subject
+
+ expect(graphql_errors.first['message']).to include('No object found for')
+ end
+ end
+
+ context 'when everything is ok' do
+ it 'destroys the list' do
+ expect { subject }.to change { klass.count }.by(-1)
+ end
+
+ it 'returns an empty list' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('list')
+ expect(mutation_response['list']).to be_nil
+ expect(mutation_response['errors']).to be_empty
+ end
+ end
+
+ context 'when the list is not destroyable' do
+ before do
+ list.update!(list_type: :backlog)
+ end
+
+ it_behaves_like 'does not destroy the list and returns an error'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
index 66fbfa798b0..af4c9286e7c 100644
--- a/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/packages/group_and_project_packages_list_shared_examples.rb
@@ -3,6 +3,38 @@
RSpec.shared_examples 'group and project packages query' do
include GraphqlHelpers
+ let_it_be(:versionaless_package) { create(:maven_package, project: project1, version: nil) }
+ let_it_be(:maven_package) { create(:maven_package, project: project1, name: 'tab', version: '4.0.0', created_at: 5.days.ago) }
+ let_it_be(:package) { create(:npm_package, project: project1, name: 'uab', version: '5.0.0', created_at: 4.days.ago) }
+ let_it_be(:composer_package) { create(:composer_package, project: project2, name: 'vab', version: '6.0.0', created_at: 3.days.ago) }
+ let_it_be(:debian_package) { create(:debian_package, project: project2, name: 'zab', version: '7.0.0', created_at: 2.days.ago) }
+ let_it_be(:composer_metadatum) do
+ create(:composer_metadatum, package: composer_package,
+ target_sha: 'afdeh',
+ composer_json: { name: 'x', type: 'y', license: 'z', version: 1 })
+ end
+
+ let(:package_names) { graphql_data_at(resource_type, :packages, :nodes, :name) }
+ let(:target_shas) { graphql_data_at(resource_type, :packages, :nodes, :metadata, :target_sha) }
+ let(:packages) { graphql_data_at(resource_type, :packages, :nodes) }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('packages'.classify, excluded: ['project'])}
+ metadata { #{query_graphql_fragment('ComposerMetadata')} }
+ }
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ resource_type,
+ { 'fullPath' => resource.full_path },
+ query_graphql_field('packages', {}, fields)
+ )
+ end
+
context 'when user has access to the resource' do
before do
resource.add_reporter(current_user)
@@ -48,4 +80,101 @@ RSpec.shared_examples 'group and project packages query' do
expect(packages).to be_nil
end
end
+
+ describe 'sorting and pagination' do
+ let_it_be(:ascending_packages) { [maven_package, package, composer_package, debian_package].map { |package| global_id_of(package)} }
+
+ let(:data_path) { [resource_type, :packages] }
+
+ before do
+ resource.add_reporter(current_user)
+ end
+
+ [:CREATED_ASC, :NAME_ASC, :VERSION_ASC, :TYPE_ASC].each do |order|
+ context "#{order}" do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { order }
+ let(:first_param) { 4 }
+ let(:expected_results) { ascending_packages }
+ end
+ end
+ end
+
+ [:CREATED_DESC, :NAME_DESC, :VERSION_DESC, :TYPE_DESC].each do |order|
+ context "#{order}" do
+ it_behaves_like 'sorted paginated query' do
+ let(:sort_param) { order }
+ let(:first_param) { 4 }
+ let(:expected_results) { ascending_packages.reverse }
+ end
+ end
+ end
+
+ context 'with an invalid sort' do
+ let(:query) do
+ graphql_query_for(
+ resource_type,
+ { 'fullPath' => resource.full_path },
+ query_nodes(:packages, :name, args: { sort: :WRONG_ORDER })
+ )
+ end
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'throws an error' do
+ expect_graphql_errors_to_include(/Argument \'sort\' on Field \'packages\' has an invalid value/)
+ end
+ end
+
+ def pagination_query(params)
+ graphql_query_for(resource_type, { 'fullPath' => resource.full_path },
+ query_nodes(:packages, :id, include_pagination_info: true, args: params)
+ )
+ end
+ end
+
+ describe 'filtering' do
+ subject { packages }
+
+ let(:query) do
+ graphql_query_for(
+ resource_type,
+ { 'fullPath' => resource.full_path },
+ query_nodes(:packages, :name, args: params)
+ )
+ end
+
+ before do
+ resource.add_reporter(current_user)
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'package_name' do
+ let(:params) { { package_name: maven_package.name } }
+
+ it { is_expected.to contain_exactly({ "name" => maven_package.name }) }
+ end
+
+ context 'package_type' do
+ let(:params) { { package_type: :COMPOSER } }
+
+ it { is_expected.to contain_exactly({ "name" => composer_package.name }) }
+ end
+
+ context 'status' do
+ let_it_be(:errored_package) { create(:maven_package, project: project1, status: 'error') }
+
+ let(:params) { { status: :ERROR } }
+
+ it { is_expected.to contain_exactly({ "name" => errored_package.name }) }
+ end
+
+ context 'include_versionless' do
+ let(:params) { { include_versionless: true } }
+
+ it { is_expected.to include({ "name" => versionaless_package.name }) }
+ end
+ end
end
diff --git a/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb b/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb
index ded381fd402..a3378d4619b 100644
--- a/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/issuable_update_shared_examples.rb
@@ -3,7 +3,7 @@
RSpec.shared_examples 'issuable update endpoint' do
let(:area) { entity.class.name.underscore.pluralize }
- describe 'PUT /projects/:id/issues/:issue_id' do
+ describe 'PUT /projects/:id/issues/:issue_iid' do
let(:url) { "/projects/#{project.id}/#{area}/#{entity.iid}" }
it 'clears labels when labels param is nil' do
diff --git a/spec/support/shared_examples/requests/api/multiple_and_scoped_issue_boards_shared_examples.rb b/spec/support/shared_examples/requests/api/multiple_and_scoped_issue_boards_shared_examples.rb
index 54aa9d47dd8..fa111ca5811 100644
--- a/spec/support/shared_examples/requests/api/multiple_and_scoped_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/multiple_and_scoped_issue_boards_shared_examples.rb
@@ -14,7 +14,6 @@ RSpec.shared_examples 'multiple and scoped issue boards' do |route_definition|
post api(root_url, user), params: { name: "new board" }
expect(response).to have_gitlab_http_status(:created)
-
expect(response).to match_response_schema('public_api/v4/board', dir: "ee")
end
end
diff --git a/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
new file mode 100644
index 00000000000..70cc9b1e6b5
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/terraform/modules/v1/packages_shared_examples.rb
@@ -0,0 +1,251 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'when package feature is disabled' do
+ before do
+ stub_config(packages: { enabled: false })
+ end
+
+ it_behaves_like 'returning response status', :not_found
+end
+
+RSpec.shared_examples 'without authentication' do
+ it_behaves_like 'returning response status', :unauthorized
+end
+
+RSpec.shared_examples 'with authentication' do
+ where(:user_role, :token_header, :token_type, :valid_token, :status) do
+ :guest | 'PRIVATE-TOKEN' | :personal_access_token | true | :not_found
+ :guest | 'PRIVATE-TOKEN' | :personal_access_token | false | :unauthorized
+ :guest | 'DEPLOY-TOKEN' | :deploy_token | true | :not_found
+ :guest | 'DEPLOY-TOKEN' | :deploy_token | false | :unauthorized
+ :guest | 'JOB-TOKEN' | :job_token | true | :not_found
+ :guest | 'JOB-TOKEN' | :job_token | false | :unauthorized
+ :reporter | 'PRIVATE-TOKEN' | :personal_access_token | true | :not_found
+ :reporter | 'PRIVATE-TOKEN' | :personal_access_token | false | :unauthorized
+ :reporter | 'DEPLOY-TOKEN' | :deploy_token | true | :not_found
+ :reporter | 'DEPLOY-TOKEN' | :deploy_token | false | :unauthorized
+ :reporter | 'JOB-TOKEN' | :job_token | true | :not_found
+ :reporter | 'JOB-TOKEN' | :job_token | false | :unauthorized
+ :developer | 'PRIVATE-TOKEN' | :personal_access_token | true | :not_found
+ :developer | 'PRIVATE-TOKEN' | :personal_access_token | false | :unauthorized
+ :developer | 'DEPLOY-TOKEN' | :deploy_token | true | :not_found
+ :developer | 'DEPLOY-TOKEN' | :deploy_token | false | :unauthorized
+ :developer | 'JOB-TOKEN' | :job_token | true | :not_found
+ :developer | 'JOB-TOKEN' | :job_token | false | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ let(:token) { valid_token ? tokens[token_type] : 'invalid-token123' }
+ let(:headers) { { token_header => token } }
+
+ it_behaves_like 'returning response status', params[:status]
+ end
+end
+
+RSpec.shared_examples 'an unimplemented route' do
+ it_behaves_like 'without authentication'
+ it_behaves_like 'with authentication'
+ it_behaves_like 'when package feature is disabled'
+end
+
+RSpec.shared_examples 'grants terraform module download' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returns a valid response' do
+ subject
+
+ expect(response.headers).to include 'X-Terraform-Get'
+ end
+ end
+end
+
+RSpec.shared_examples 'returns terraform module packages' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returning a valid response' do
+ subject
+
+ expect(json_response).to match_schema('public_api/v4/packages/terraform/modules/v1/versions')
+ end
+ end
+end
+
+RSpec.shared_examples 'returns no terraform module packages' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ group.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returns a response with no versions' do
+ subject
+
+ expect(json_response['modules'][0]['versions'].size).to eq(0)
+ end
+ end
+end
+
+RSpec.shared_examples 'grants terraform module packages access' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+ end
+end
+
+RSpec.shared_examples 'grants terraform module package file access' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
+ end
+end
+
+RSpec.shared_examples 'rejects terraform module packages access' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+ end
+end
+
+RSpec.shared_examples 'process terraform module workhorse authorization' do |user_type, status, add_member = true|
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'has the proper content type' do
+ subject
+
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ end
+
+ context 'with a request that bypassed gitlab-workhorse' do
+ let(:headers) do
+ { 'HTTP_PRIVATE_TOKEN' => personal_access_token.token }
+ .merge(workhorse_headers)
+ .tap { |h| h.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER) }
+ end
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+ end
+end
+
+RSpec.shared_examples 'process terraform module upload' do |user_type, status, add_member = true|
+ RSpec.shared_examples 'creates terraform module package files' do
+ it 'creates package files', :aggregate_failures do
+ expect { subject }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ expect(response).to have_gitlab_http_status(status)
+
+ package_file = project.packages.last.package_files.reload.last
+ expect(package_file.file_name).to eq('mymodule-mysystem-1.0.0.tgz')
+ end
+ end
+
+ context "for user type #{user_type}" do
+ before do
+ project.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ context 'with object storage disabled' do
+ before do
+ stub_package_file_object_storage(enabled: false)
+ end
+
+ context 'without a file from workhorse' do
+ let(:send_rewritten_field) { false }
+
+ it_behaves_like 'returning response status', :bad_request
+ end
+
+ context 'with correct params' do
+ it_behaves_like 'package workhorse uploads'
+ it_behaves_like 'creates terraform module package files'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
+ end
+ end
+
+ context 'with object storage enabled' do
+ let(:tmp_object) do
+ fog_connection.directories.new(key: 'packages').files.create( # rubocop:disable Rails/SaveBang
+ key: "tmp/uploads/#{file_name}",
+ body: 'content'
+ )
+ end
+
+ let(:fog_file) { fog_to_uploaded_file(tmp_object) }
+ let(:params) { { file: fog_file, 'file.remote_id' => file_name } }
+
+ context 'and direct upload enabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: true)
+ end
+
+ it_behaves_like 'creates terraform module package files'
+
+ ['123123', '../../123123'].each do |remote_id|
+ context "with invalid remote_id: #{remote_id}" do
+ let(:params) do
+ {
+ file: fog_file,
+ 'file.remote_id' => remote_id
+ }
+ end
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+ end
+ end
+
+ context 'and direct upload disabled' do
+ context 'and background upload disabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: false, background_upload: false)
+ end
+
+ it_behaves_like 'creates terraform module package files'
+ end
+
+ context 'and background upload enabled' do
+ let(:fog_connection) do
+ stub_package_file_object_storage(direct_upload: false, background_upload: true)
+ end
+
+ it_behaves_like 'creates terraform module package files'
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
index fb6d6603beb..afc902dd184 100644
--- a/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/time_tracking_shared_examples.rb
@@ -125,6 +125,22 @@ RSpec.shared_examples 'time tracking endpoints' do |issuable_name|
expect(json_response['message']['base'].first).to eq(_('Time to subtract exceeds the total time spent'))
end
end
+
+ if issuable_name == 'merge_request'
+ it 'calls update service with :use_specialized_service param' do
+ expect(::MergeRequests::UpdateService).to receive(:new).with(project: project, current_user: user, params: hash_including(use_specialized_service: true))
+
+ post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '2h' }
+ end
+ end
+
+ if issuable_name == 'issue'
+ it 'calls update service without :use_specialized_service param' do
+ expect(::Issues::UpdateService).to receive(:new).with(project: project, current_user: user, params: hash_not_including(use_specialized_service: true))
+
+ post api("/projects/#{project.id}/#{issuable_collection_name}/#{issuable.iid}/add_spent_time", user), params: { duration: '2h' }
+ end
+ end
end
describe "POST /projects/:id/#{issuable_collection_name}/:#{issuable_name}_id/reset_spent_time" do
diff --git a/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb b/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb
index 490c7d12115..91fdcbd9b1d 100644
--- a/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb
+++ b/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples '#create_or_update action' do
let(:params) do
- { integration: { application_type: Clusters::Applications::Prometheus.application_name, enabled: true } }
+ { integration: { application_type: 'prometheus', enabled: true } }
end
let(:path) { raise NotImplementedError }
diff --git a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
index 926da827e75..95817624658 100644
--- a/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
+++ b/spec/support/shared_examples/requests/rack_attack_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
#
# Requires let variables:
-# * throttle_setting_prefix: "throttle_authenticated_api", "throttle_authenticated_web", "throttle_protected_paths"
+# * throttle_setting_prefix: "throttle_authenticated_api", "throttle_authenticated_web", "throttle_protected_paths", "throttle_authenticated_packages_api"
# * request_method
# * request_args
# * other_user_request_args
@@ -13,7 +13,8 @@ RSpec.shared_examples 'rate-limited token-authenticated requests' do
{
"throttle_protected_paths" => "throttle_authenticated_protected_paths_api",
"throttle_authenticated_api" => "throttle_authenticated_api",
- "throttle_authenticated_web" => "throttle_authenticated_web"
+ "throttle_authenticated_web" => "throttle_authenticated_web",
+ "throttle_authenticated_packages_api" => "throttle_authenticated_packages_api"
}
end
diff --git a/spec/support/shared_examples/row_lock_shared_examples.rb b/spec/support/shared_examples/row_lock_shared_examples.rb
new file mode 100644
index 00000000000..5e003172215
--- /dev/null
+++ b/spec/support/shared_examples/row_lock_shared_examples.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+# Ensure that a SQL command to lock this row(s) was requested.
+# Ensure a transaction also occurred.
+# Be careful! This form of spec is not foolproof, but better than nothing.
+
+RSpec.shared_examples 'locked row' do
+ it "has locked row" do
+ table_name = row.class.table_name
+ ids_regex = /SELECT.*FROM.*#{table_name}.*"#{table_name}"."id" = #{row.id}.+FOR UPDATE/m
+
+ expect(recorded_queries.log).to include a_string_matching 'SAVEPOINT'
+ expect(recorded_queries.log).to include a_string_matching ids_regex
+ end
+end
+
+RSpec.shared_examples 'locked rows' do
+ it "has locked rows" do
+ table_name = rows.first.class.table_name
+
+ row_ids = rows.map(&:id).join(', ')
+ ids_regex = /SELECT.+FROM.+"#{table_name}".+"#{table_name}"."id" IN \(#{row_ids}\).+FOR UPDATE/m
+
+ expect(recorded_queries.log).to include a_string_matching 'SAVEPOINT'
+ expect(recorded_queries.log).to include a_string_matching ids_regex
+ end
+end
diff --git a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
index 00146335ef7..9d7ae6bcb3d 100644
--- a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
+++ b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
@@ -20,9 +20,27 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do
expect { serialize(grouping: false) }.not_to exceed_query_limit(control.count)
end
- def serialize(grouping:)
+ it 'does not preload for environments that does not exist in the page', :request_store do
+ create_environment_with_associations(project)
+
+ first_page_query = ActiveRecord::QueryRecorder.new do
+ serialize(grouping: false, query: { page: 1, per_page: 1 } )
+ end
+
+ second_page_query = ActiveRecord::QueryRecorder.new do
+ serialize(grouping: false, query: { page: 2, per_page: 1 } )
+ end
+
+ expect(second_page_query.count).to be < first_page_query.count
+ end
+
+ def serialize(grouping:, query: nil)
+ query ||= { page: 1, per_page: 1 }
+ request = double(url: "#{Gitlab.config.gitlab.url}:8080/api/v4/projects?#{query.to_query}", query_parameters: query)
+
EnvironmentSerializer.new(current_user: user, project: project).yield_self do |serializer|
serializer.within_folders if grouping
+ serializer.with_pagination(request, spy('response'))
serializer.represent(Environment.where(project: project))
end
end
diff --git a/spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb b/spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb
deleted file mode 100644
index d5ffd5e7510..00000000000
--- a/spec/support/shared_examples/serializers/pipeline_artifacts_shared_example.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-# frozen_string_literal: true
-RSpec.shared_examples 'public artifacts' do
- let_it_be(:project) { create(:project, :public) }
- let(:pipeline) { create(:ci_empty_pipeline, status: :success, project: project) }
-
- context 'that has artifacts' do
- let!(:build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
-
- it 'contains information about artifacts' do
- expect(subject[:details][:artifacts].length).to eq(1)
- end
- end
-
- context 'that has non public artifacts' do
- let!(:build) { create(:ci_build, :success, :artifacts, :non_public_artifacts, pipeline: pipeline) }
-
- it 'does not contain information about artifacts' do
- expect(subject[:details][:artifacts].length).to eq(0)
- end
- end
-end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
new file mode 100644
index 00000000000..218a3462c35
--- /dev/null
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
@@ -0,0 +1,161 @@
+# frozen_string_literal: true
+
+# This shared_example requires the following variables:
+# - `service`, the service which includes AlertManagement::AlertProcessing
+RSpec.shared_examples 'creates an alert management alert or errors' do
+ it { is_expected.to be_success }
+
+ it 'creates AlertManagement::Alert' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+
+ expect { subject }.to change(AlertManagement::Alert, :count).by(1)
+ end
+
+ it 'executes the alert service hooks' do
+ expect_next_instance_of(AlertManagement::Alert) do |alert|
+ expect(alert).to receive(:execute_services)
+ end
+
+ subject
+ end
+
+ context 'and fails to save' do
+ let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] })}
+
+ before do
+ allow(service).to receive(:alert).and_call_original
+ allow(service).to receive_message_chain(:alert, :save).and_return(false)
+ allow(service).to receive_message_chain(:alert, :errors).and_return(errors)
+ end
+
+ it_behaves_like 'alerts service responds with an error', :bad_request
+
+ it 'writes a warning to the log' do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: "Unable to create AlertManagement::Alert from #{source}",
+ project_id: project.id,
+ alert_errors: { hosts: ['hosts array is over 255 chars'] }
+ )
+
+ subject
+ end
+ end
+end
+
+# This shared_example requires the following variables:
+# - last_alert_attributes, last created alert
+# - project, project that alert created
+# - payload_raw, hash representation of payload
+# - environment, project's environment
+# - fingerprint, fingerprint hash
+RSpec.shared_examples 'properly assigns the alert properties' do
+ specify do
+ subject
+
+ expect(last_alert_attributes).to match({
+ project_id: project.id,
+ title: payload_raw.fetch(:title),
+ started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
+ severity: payload_raw.fetch(:severity, nil),
+ status: AlertManagement::Alert.status_value(:triggered),
+ events: 1,
+ domain: domain,
+ hosts: payload_raw.fetch(:hosts, nil),
+ payload: payload_raw.with_indifferent_access,
+ issue_id: nil,
+ description: payload_raw.fetch(:description, nil),
+ monitoring_tool: payload_raw.fetch(:monitoring_tool, nil),
+ service: payload_raw.fetch(:service, nil),
+ fingerprint: Digest::SHA1.hexdigest(fingerprint),
+ environment_id: environment.id,
+ ended_at: nil,
+ prometheus_alert_id: nil
+ }.with_indifferent_access)
+ end
+end
+
+RSpec.shared_examples 'does not create an alert management alert' do
+ specify do
+ expect { subject }.not_to change(AlertManagement::Alert, :count)
+ end
+end
+
+# This shared_example requires the following variables:
+# - `alert`, the alert for which events should be incremented
+RSpec.shared_examples 'adds an alert management alert event' do
+ specify do
+ expect(alert).not_to receive(:execute_services)
+
+ expect { subject }.to change { alert.reload.events }.by(1)
+
+ expect(subject).to be_success
+ end
+
+ it_behaves_like 'does not create an alert management alert'
+end
+
+# This shared_example requires the following variables:
+# - `alert`, the alert for which events should not be incremented
+RSpec.shared_examples 'does not add an alert management alert event' do
+ specify do
+ expect { subject }.not_to change { alert.reload.events }
+ end
+end
+
+RSpec.shared_examples 'processes new firing alert' do
+ include_examples 'processes never-before-seen alert'
+
+ context 'for an existing alert with the same fingerprint' do
+ let_it_be(:gitlab_fingerprint) { Digest::SHA1.hexdigest(fingerprint) }
+
+ context 'which is triggered' do
+ let_it_be(:alert) { create(:alert_management_alert, :triggered, fingerprint: gitlab_fingerprint, project: project) }
+
+ it_behaves_like 'adds an alert management alert event'
+ it_behaves_like 'sends alert notification emails if enabled'
+ it_behaves_like 'processes incident issues if enabled', with_issue: true
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not create a system note for alert'
+
+ context 'with an existing resolved alert as well' do
+ let_it_be(:resolved_alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: gitlab_fingerprint) }
+
+ it_behaves_like 'adds an alert management alert event'
+ it_behaves_like 'sends alert notification emails if enabled'
+ it_behaves_like 'processes incident issues if enabled', with_issue: true
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not create a system note for alert'
+ end
+ end
+
+ context 'which is acknowledged' do
+ let_it_be(:alert) { create(:alert_management_alert, :acknowledged, fingerprint: gitlab_fingerprint, project: project) }
+
+ it_behaves_like 'adds an alert management alert event'
+ it_behaves_like 'processes incident issues if enabled', with_issue: true
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not create a system note for alert'
+ it_behaves_like 'does not send alert notification emails'
+ end
+
+ context 'which is ignored' do
+ let_it_be(:alert) { create(:alert_management_alert, :ignored, fingerprint: gitlab_fingerprint, project: project) }
+
+ it_behaves_like 'adds an alert management alert event'
+ it_behaves_like 'processes incident issues if enabled', with_issue: true
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not create a system note for alert'
+ it_behaves_like 'does not send alert notification emails'
+ end
+
+ context 'which is resolved' do
+ let_it_be(:alert) { create(:alert_management_alert, :resolved, fingerprint: gitlab_fingerprint, project: project) }
+
+ include_examples 'processes never-before-seen alert'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
new file mode 100644
index 00000000000..86e7da5bcbe
--- /dev/null
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+# This shared_example requires the following variables:
+# - `alert`, the alert to be resolved
+RSpec.shared_examples 'resolves an existing alert management alert' do
+ it 'sets the end time and status' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
+
+ expect { subject }
+ .to change { alert.reload.resolved? }.to(true)
+ .and change { alert.ended_at.present? }.to(true)
+
+ expect(subject).to be_success
+ end
+end
+
+# This shared_example requires the following variables:
+# - `alert`, the alert not to be updated
+RSpec.shared_examples 'does not change the alert end time' do
+ specify do
+ expect { subject }.not_to change { alert.reload.ended_at }
+ end
+end
+
+# This shared_example requires the following variables:
+# - `project`, expected project for an incoming alert
+# - `service`, a service which includes AlertManagement::AlertProcessing
+# - `alert` (optional), the alert which should fail to resolve. If not
+# included, the log is expected to correspond to a new alert
+RSpec.shared_examples 'writes a warning to the log for a failed alert status update' do
+ before do
+ allow(service).to receive(:alert).and_call_original
+ allow(service).to receive_message_chain(:alert, :resolve).and_return(false)
+ end
+
+ specify do
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Unable to update AlertManagement::Alert status to resolved',
+ project_id: project.id,
+ alert_id: alert ? alert.id : (last_alert_id + 1)
+ )
+
+ # Failure to resolve a recovery alert is not a critical failure
+ expect(subject).to be_success
+ end
+
+ private
+
+ def last_alert_id
+ AlertManagement::Alert.connection
+ .select_value("SELECT nextval('#{AlertManagement::Alert.sequence_name}')")
+ end
+end
+
+RSpec.shared_examples 'processes recovery alert' do
+ context 'seen for the first time' do
+ let(:alert) { AlertManagement::Alert.last }
+
+ include_examples 'processes never-before-seen recovery alert'
+ end
+
+ context 'for an existing alert with the same fingerprint' do
+ let_it_be(:gitlab_fingerprint) { Digest::SHA1.hexdigest(fingerprint) }
+
+ context 'which is triggered' do
+ let_it_be(:alert) { create(:alert_management_alert, :triggered, project: project, fingerprint: gitlab_fingerprint, monitoring_tool: source) }
+
+ it_behaves_like 'resolves an existing alert management alert'
+ it_behaves_like 'creates expected system notes for alert', :recovery_alert, :resolve_alert
+ it_behaves_like 'sends alert notification emails if enabled'
+ it_behaves_like 'closes related incident if enabled'
+ it_behaves_like 'writes a warning to the log for a failed alert status update'
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not process incident issues'
+ it_behaves_like 'does not add an alert management alert event'
+ end
+
+ context 'which is ignored' do
+ let_it_be(:alert) { create(:alert_management_alert, :ignored, project: project, fingerprint: gitlab_fingerprint, monitoring_tool: source) }
+
+ it_behaves_like 'resolves an existing alert management alert'
+ it_behaves_like 'creates expected system notes for alert', :recovery_alert, :resolve_alert
+ it_behaves_like 'sends alert notification emails if enabled'
+ it_behaves_like 'closes related incident if enabled'
+ it_behaves_like 'writes a warning to the log for a failed alert status update'
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not process incident issues'
+ it_behaves_like 'does not add an alert management alert event'
+ end
+
+ context 'which is acknowledged' do
+ let_it_be(:alert) { create(:alert_management_alert, :acknowledged, project: project, fingerprint: gitlab_fingerprint, monitoring_tool: source) }
+
+ it_behaves_like 'resolves an existing alert management alert'
+ it_behaves_like 'creates expected system notes for alert', :recovery_alert, :resolve_alert
+ it_behaves_like 'sends alert notification emails if enabled'
+ it_behaves_like 'closes related incident if enabled'
+ it_behaves_like 'writes a warning to the log for a failed alert status update'
+
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not process incident issues'
+ it_behaves_like 'does not add an alert management alert event'
+ end
+
+ context 'which is resolved' do
+ let_it_be(:alert) { create(:alert_management_alert, :resolved, project: project, fingerprint: gitlab_fingerprint, monitoring_tool: source) }
+
+ include_examples 'processes never-before-seen recovery alert'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
new file mode 100644
index 00000000000..c6ac07b6dd5
--- /dev/null
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+# Expects usage of 'incident settings enabled' context.
+#
+# This shared_example includes the following option:
+# - with_issue: includes a test for when the defined `alert` has an associated issue
+#
+# This shared_example requires the following variables:
+# - `alert`, required if :with_issue is true
+RSpec.shared_examples 'processes incident issues if enabled' do |with_issue: false|
+ include_examples 'processes incident issues', with_issue
+
+ context 'with incident setting disabled' do
+ let(:create_issue) { false }
+
+ it_behaves_like 'does not process incident issues'
+ end
+end
+
+RSpec.shared_examples 'processes incident issues' do |with_issue: false|
+ before do
+ allow_next_instance_of(AlertManagement::Alert) do |alert|
+ allow(alert).to receive(:execute_services)
+ end
+ end
+
+ specify do
+ expect(IncidentManagement::ProcessAlertWorkerV2)
+ .to receive(:perform_async)
+ .with(kind_of(Integer))
+
+ Sidekiq::Testing.inline! do
+ expect(subject).to be_success
+ end
+ end
+
+ context 'with issue', if: with_issue do
+ before do
+ alert.update!(issue: create(:issue, project: project))
+ end
+
+ it_behaves_like 'does not process incident issues'
+ end
+end
+
+RSpec.shared_examples 'does not process incident issues' do
+ specify do
+ expect(IncidentManagement::ProcessAlertWorkerV2).not_to receive(:perform_async)
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
new file mode 100644
index 00000000000..132f1e0422e
--- /dev/null
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+# Expects usage of 'incident settings enabled' context.
+#
+# This shared_example requires the following variables:
+# - `alert`, alert for which related incidents should be closed
+# - `project`, project of the alert
+RSpec.shared_examples 'closes related incident if enabled' do
+ context 'with issue' do
+ before do
+ alert.update!(issue: create(:issue, project: project))
+ end
+
+ it { expect { subject }.to change { alert.issue.reload.closed? }.from(false).to(true) }
+ it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
+ end
+
+ context 'without issue' do
+ it { expect { subject }.not_to change { alert.reload.issue } }
+ it { expect { subject }.not_to change(ResourceStateEvent, :count) }
+ end
+
+ context 'with incident setting disabled' do
+ let(:auto_close_incident) { false }
+
+ it_behaves_like 'does not close related incident'
+ end
+end
+
+RSpec.shared_examples 'does not close related incident' do
+ context 'with issue' do
+ before do
+ alert.update!(issue: create(:issue, project: project))
+ end
+
+ it { expect { subject }.not_to change { alert.issue.reload.state } }
+ it { expect { subject }.not_to change(ResourceStateEvent, :count) }
+ end
+
+ context 'without issue' do
+ it { expect { subject }.not_to change { alert.reload.issue } }
+ it { expect { subject }.not_to change(ResourceStateEvent, :count) }
+ end
+end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb
new file mode 100644
index 00000000000..5f30b58176b
--- /dev/null
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# Expects usage of 'incident settings enabled' context.
+#
+# This shared_example includes the following option:
+# - count: number of notifications expected to be sent
+RSpec.shared_examples 'sends alert notification emails if enabled' do |count: 1|
+ include_examples 'sends alert notification emails', count
+
+ context 'with email setting disabled' do
+ let(:send_email) { false }
+
+ it_behaves_like 'does not send alert notification emails'
+ end
+end
+
+RSpec.shared_examples 'sends alert notification emails' do |count: 1|
+ let(:notification_async) { double(NotificationService::Async) }
+
+ specify do
+ allow(NotificationService).to receive_message_chain(:new, :async).and_return(notification_async)
+ expect(notification_async).to receive(:prometheus_alerts_fired).exactly(count).times
+
+ subject
+ end
+end
+
+RSpec.shared_examples 'does not send alert notification emails' do
+ specify do
+ expect(NotificationService).not_to receive(:new)
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/system_notes_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/system_notes_shared_examples.rb
new file mode 100644
index 00000000000..57d598c0259
--- /dev/null
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/system_notes_shared_examples.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# This shared_example includes the following option:
+# - notes: any of [:new_alert, :recovery_alert, :resolve_alert].
+# Represents which notes are expected to be created.
+#
+# This shared_example requires the following variables:
+# - `source` (optional), the monitoring tool or integration name
+# expected in the applicable system notes
+RSpec.shared_examples 'creates expected system notes for alert' do |*notes|
+ let(:expected_note_count) { expected_notes.length }
+ let(:new_notes) { Note.last(expected_note_count).pluck(:note) }
+ let(:expected_notes) do
+ {
+ new_alert: source,
+ recovery_alert: source,
+ resolve_alert: 'Resolved'
+ }.slice(*notes)
+ end
+
+ it "for #{notes.join(', ')}" do
+ expect { subject }.to change(Note, :count).by(expected_note_count)
+
+ expected_notes.each_value.with_index do |value, index|
+ expect(new_notes[index]).to include(value)
+ end
+ end
+end
+
+RSpec.shared_examples 'does not create a system note for alert' do
+ specify do
+ expect { subject }.not_to change(Note, :count)
+ end
+end
diff --git a/spec/support/shared_examples/services/alert_management_shared_examples.rb b/spec/support/shared_examples/services/alert_management_shared_examples.rb
index d9f28a97a0f..827ae42f970 100644
--- a/spec/support/shared_examples/services/alert_management_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management_shared_examples.rb
@@ -1,111 +1,77 @@
# frozen_string_literal: true
-RSpec.shared_examples 'creates an alert management alert' do
- it { is_expected.to be_success }
+RSpec.shared_examples 'alerts service responds with an error and takes no actions' do |http_status|
+ include_examples 'alerts service responds with an error', http_status
- it 'creates AlertManagement::Alert' do
- expect { subject }.to change(AlertManagement::Alert, :count).by(1)
- end
-
- it 'executes the alert service hooks' do
- expect_next_instance_of(AlertManagement::Alert) do |alert|
- expect(alert).to receive(:execute_services)
- end
+ it_behaves_like 'does not create an alert management alert'
+ it_behaves_like 'does not create a system note for alert'
+ it_behaves_like 'does not process incident issues'
+ it_behaves_like 'does not send alert notification emails'
+end
- subject
+RSpec.shared_examples 'alerts service responds with an error' do |http_status|
+ specify do
+ expect(subject).to be_error
+ expect(subject.http_status).to eq(http_status)
end
end
# This shared_example requires the following variables:
-# - last_alert_attributes, last created alert
-# - project, project that alert created
-# - payload_raw, hash representation of payload
-# - environment, project's environment
-# - fingerprint, fingerprint hash
-RSpec.shared_examples 'assigns the alert properties' do
- it 'ensures that created alert has all data properly assigned' do
- subject
-
- expect(last_alert_attributes).to match(
- project_id: project.id,
- title: payload_raw.fetch(:title),
- started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
- severity: payload_raw.fetch(:severity),
- status: AlertManagement::Alert.status_value(:triggered),
- events: 1,
- domain: domain,
- hosts: payload_raw.fetch(:hosts),
- payload: payload_raw.with_indifferent_access,
- issue_id: nil,
- description: payload_raw.fetch(:description),
- monitoring_tool: payload_raw.fetch(:monitoring_tool),
- service: payload_raw.fetch(:service),
- fingerprint: Digest::SHA1.hexdigest(fingerprint),
- environment_id: environment.id,
- ended_at: nil,
- prometheus_alert_id: nil
+# - `service`, a service which includes ::IncidentManagement::Settings
+RSpec.shared_context 'incident management settings enabled' do
+ let(:auto_close_incident) { true }
+ let(:create_issue) { true }
+ let(:send_email) { true }
+
+ let(:incident_management_setting) do
+ double(
+ auto_close_incident?: auto_close_incident,
+ create_issue?: create_issue,
+ send_email?: send_email
)
end
-end
-RSpec.shared_examples 'does not an create alert management alert' do
- it 'does not create alert' do
- expect { subject }.not_to change(AlertManagement::Alert, :count)
+ before do
+ allow(ProjectServiceWorker).to receive(:perform_async)
+ allow(service)
+ .to receive(:incident_management_setting)
+ .and_return(incident_management_setting)
end
end
-RSpec.shared_examples 'adds an alert management alert event' do
- it { is_expected.to be_success }
-
- it 'does not create an alert' do
- expect { subject }.not_to change(AlertManagement::Alert, :count)
- end
-
- it 'increases alert events count' do
- expect { subject }.to change { alert.reload.events }.by(1)
- end
-
- it 'does not executes the alert service hooks' do
- expect(alert).not_to receive(:execute_services)
-
- subject
- end
+RSpec.shared_examples 'processes never-before-seen alert' do
+ it_behaves_like 'creates an alert management alert or errors'
+ it_behaves_like 'creates expected system notes for alert', :new_alert
+ it_behaves_like 'processes incident issues if enabled'
+ it_behaves_like 'sends alert notification emails if enabled'
end
-RSpec.shared_examples 'processes incident issues' do
- let(:create_incident_service) { spy }
-
- before do
- allow_any_instance_of(AlertManagement::Alert).to receive(:execute_services)
+RSpec.shared_examples 'processes never-before-seen recovery alert' do
+ it_behaves_like 'creates an alert management alert or errors'
+ it_behaves_like 'creates expected system notes for alert', :new_alert, :recovery_alert, :resolve_alert
+ it_behaves_like 'sends alert notification emails if enabled'
+ it_behaves_like 'does not process incident issues'
+ it_behaves_like 'writes a warning to the log for a failed alert status update' do
+ let(:alert) { nil } # Ensure the next alert id is used
end
- it 'processes issues' do
- expect(IncidentManagement::ProcessAlertWorker)
- .to receive(:perform_async)
- .with(nil, nil, kind_of(Integer))
- .once
+ it 'resolves the alert' do
+ subject
- Sidekiq::Testing.inline! do
- expect(subject).to be_success
- end
+ expect(AlertManagement::Alert.last.ended_at).to be_present
+ expect(AlertManagement::Alert.last.resolved?).to be(true)
end
end
-RSpec.shared_examples 'does not process incident issues' do
- it 'does not process issues' do
- expect(IncidentManagement::ProcessAlertWorker)
- .not_to receive(:perform_async)
+RSpec.shared_examples 'processes one firing and one resolved prometheus alerts' do
+ it 'creates AlertManagement::Alert' do
+ expect(Gitlab::AppLogger).not_to receive(:warn)
- expect(subject).to be_success
+ expect { subject }
+ .to change(AlertManagement::Alert, :count).by(2)
+ .and change(Note, :count).by(4)
end
-end
-
-RSpec.shared_examples 'does not process incident issues due to error' do |http_status:|
- it 'does not process issues' do
- expect(IncidentManagement::ProcessAlertWorker)
- .not_to receive(:perform_async)
- expect(subject).to be_error
- expect(subject.http_status).to eq(http_status)
- end
+ it_behaves_like 'processes incident issues'
+ it_behaves_like 'sends alert notification emails', count: 2
end
diff --git a/spec/support/shared_examples/services/boards/boards_recent_visit_shared_examples.rb b/spec/support/shared_examples/services/boards/boards_recent_visit_shared_examples.rb
new file mode 100644
index 00000000000..68ea460dabc
--- /dev/null
+++ b/spec/support/shared_examples/services/boards/boards_recent_visit_shared_examples.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'boards recent visit' do
+ let_it_be(:user) { create(:user) }
+
+ describe '#visited' do
+ it 'creates a visit if one does not exists' do
+ expect { described_class.visited!(user, board) }.to change(described_class, :count).by(1)
+ end
+
+ shared_examples 'was visited previously' do
+ let_it_be(:visit) do
+ create(visit_relation,
+ board_parent_relation => board_parent,
+ board_relation => board,
+ user: user,
+ updated_at: 7.days.ago
+ )
+ end
+
+ it 'updates the timestamp' do
+ freeze_time do
+ described_class.visited!(user, board)
+
+ expect(described_class.count).to eq 1
+ expect(described_class.first.updated_at).to be_like_time(Time.zone.now)
+ end
+ end
+ end
+
+ it_behaves_like 'was visited previously'
+
+ context 'when we try to create a visit that is not unique' do
+ before do
+ expect(described_class).to receive(:find_or_create_by).and_raise(ActiveRecord::RecordNotUnique, 'record not unique')
+ expect(described_class).to receive(:find_or_create_by).and_return(visit)
+ end
+
+ it_behaves_like 'was visited previously'
+ end
+ end
+
+ describe '#latest' do
+ def create_visit(time)
+ create(visit_relation, board_parent_relation => board_parent, user: user, updated_at: time)
+ end
+
+ it 'returns the most recent visited' do
+ create_visit(7.days.ago)
+ create_visit(5.days.ago)
+ recent = create_visit(1.day.ago)
+
+ expect(described_class.latest(user, board_parent)).to eq recent
+ end
+
+ it 'returns last 3 visited boards' do
+ create_visit(7.days.ago)
+ visit1 = create_visit(3.days.ago)
+ visit2 = create_visit(2.days.ago)
+ visit3 = create_visit(5.days.ago)
+
+ expect(described_class.latest(user, board_parent, count: 3)).to eq([visit2, visit1, visit3])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/create_service_shared_examples.rb b/spec/support/shared_examples/services/boards/create_service_shared_examples.rb
new file mode 100644
index 00000000000..63b5e3a5a84
--- /dev/null
+++ b/spec/support/shared_examples/services/boards/create_service_shared_examples.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'boards recent visit create service' do
+ let_it_be(:user) { create(:user) }
+
+ subject(:service) { described_class.new(board.resource_parent, user) }
+
+ it 'returns nil when there is no user' do
+ service.current_user = nil
+
+ expect(service.execute(board)).to be_nil
+ end
+
+ it 'returns nil when database is read only' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect(service.execute(board)).to be_nil
+ end
+
+ it 'records the visit' do
+ expect(model).to receive(:visited!).once
+
+ service.execute(board)
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
index 4aa5d7d890b..7d4fbeea0dc 100644
--- a/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_move_service_shared_examples.rb
@@ -146,7 +146,7 @@ RSpec.shared_examples 'issues move service' do |group|
params.merge!(move_after_id: issue1.id, move_before_id: issue2.id)
match_params = { move_between_ids: [issue1.id, issue2.id], board_group_id: parent.id }
- expect(Issues::UpdateService).to receive(:new).with(issue.project, user, match_params).and_return(double(execute: build(:issue)))
+ expect(Issues::UpdateService).to receive(:new).with(project: issue.project, current_user: user, params: match_params).and_return(double(execute: build(:issue)))
described_class.new(parent, user, params).execute(issue)
end
diff --git a/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb b/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
index 94da405e491..af88644ced7 100644
--- a/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
@@ -3,30 +3,27 @@
RSpec.shared_examples 'lists destroy service' do
context 'when list type is label' do
it 'removes list from board' do
- list = create(:list, board: board)
service = described_class.new(parent, user)
expect { service.execute(list) }.to change(board.lists, :count).by(-1)
end
it 'decrements position of higher lists' do
- development = create(:list, board: board, position: 0)
- review = create(:list, board: board, position: 1)
- staging = create(:list, board: board, position: 2)
- closed = board.lists.closed.first
+ development = create(list_type, params.merge(position: 0))
+ review = create(list_type, params.merge(position: 1))
+ staging = create(list_type, params.merge(position: 2))
described_class.new(parent, user).execute(development)
expect(review.reload.position).to eq 0
expect(staging.reload.position).to eq 1
- expect(closed.reload.position).to be_nil
+ expect(closed_list.reload.position).to be_nil
end
end
it 'does not remove list from board when list type is closed' do
- list = board.lists.closed.first
service = described_class.new(parent, user)
- expect { service.execute(list) }.not_to change(board.lists, :count)
+ expect { service.execute(closed_list) }.not_to change(board.lists, :count)
end
end
diff --git a/spec/support/shared_examples/services/common_system_notes_shared_examples.rb b/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
index 7b277d4bede..ce412ef55de 100644
--- a/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
+++ b/spec/support/shared_examples/services/common_system_notes_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'system note creation' do |update_params, note_text|
- subject { described_class.new(project, user).execute(issuable, old_labels: []) }
+ subject { described_class.new(project: project, current_user: user).execute(issuable, old_labels: []) }
before do
issuable.assign_attributes(update_params)
@@ -18,7 +18,7 @@ RSpec.shared_examples 'system note creation' do |update_params, note_text|
end
RSpec.shared_examples 'draft notes creation' do |action|
- subject { described_class.new(project, user).execute(issuable, old_labels: []) }
+ subject { described_class.new(project: project, current_user: user).execute(issuable, old_labels: []) }
it 'creates Draft toggle and title change notes' do
expect { subject }.to change { Note.count }.from(0).to(2)
diff --git a/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb b/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb
new file mode 100644
index 00000000000..d2b52468c25
--- /dev/null
+++ b/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples_for 'service deleting label links of an issuable' do
+ let_it_be(:label_link) { create(:label_link, target: target) }
+
+ def execute
+ described_class.new(target.id, target.class.name).execute
+ end
+
+ it 'deletes label links for specified target ID and type' do
+ control_count = ActiveRecord::QueryRecorder.new { execute }.count
+
+ # Create more label links for the target
+ create(:label_link, target: target)
+ create(:label_link, target: target)
+
+ expect { execute }.not_to exceed_query_limit(control_count)
+ expect(target.reload.label_links.count).to eq(0)
+ end
+end
diff --git a/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb
index ccc287c10de..e776c098fa0 100644
--- a/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb
@@ -1,10 +1,6 @@
# frozen_string_literal: true
shared_examples_for 'service deleting todos' do
- before do
- stub_feature_flags(destroy_issuable_todos_async: group)
- end
-
it 'destroys associated todos asynchronously' do
expect(TodosDestroyer::DestroyedIssuableWorker)
.to receive(:perform_async)
@@ -12,20 +8,14 @@ shared_examples_for 'service deleting todos' do
subject.execute(issuable)
end
+end
- context 'when destroy_issuable_todos_async feature is disabled for group' do
- before do
- stub_feature_flags(destroy_issuable_todos_async: false)
- end
-
- it 'destroy associated todos synchronously' do
- expect_next_instance_of(TodosDestroyer::DestroyedIssuableWorker) do |worker|
- expect(worker)
- .to receive(:perform)
- .with(issuable.id, issuable.class.name)
- end
+shared_examples_for 'service deleting label links' do
+ it 'destroys associated label links asynchronously' do
+ expect(Issuable::LabelLinksDestroyWorker)
+ .to receive(:perform_async)
+ .with(issuable.id, issuable.class.name)
- subject.execute(issuable)
- end
+ subject.execute(issuable)
end
end
diff --git a/spec/support/shared_examples/services/issuable_shared_examples.rb b/spec/support/shared_examples/services/issuable_shared_examples.rb
index 5b3e0f9e0b9..a50a386afe1 100644
--- a/spec/support/shared_examples/services/issuable_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_shared_examples.rb
@@ -4,14 +4,14 @@ RSpec.shared_examples 'cache counters invalidator' do
it 'invalidates counter cache for assignees' do
expect_any_instance_of(User).to receive(:invalidate_merge_request_cache_counts)
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
end
RSpec.shared_examples 'updating a single task' do
def update_issuable(opts)
issuable = try(:issue) || try(:merge_request)
- described_class.new(project, user, opts).execute(issuable)
+ described_class.new(project: project, current_user: user, params: opts).execute(issuable)
end
before do
diff --git a/spec/support/shared_examples/services/merge_request_shared_examples.rb b/spec/support/shared_examples/services/merge_request_shared_examples.rb
index 178b6bc47e1..d2595b92cbc 100644
--- a/spec/support/shared_examples/services/merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/services/merge_request_shared_examples.rb
@@ -70,7 +70,7 @@ RSpec.shared_examples 'merge request reviewers cache counters invalidator' do
it 'invalidates counter cache for reviewers' do
expect(merge_request.reviewers).to all(receive(:invalidate_merge_request_cache_counts))
- described_class.new(project, user, {}).execute(merge_request)
+ described_class.new(project: project, current_user: user).execute(merge_request)
end
end
@@ -86,7 +86,7 @@ RSpec.shared_examples_for 'a service that can create a merge request' do
context 'when project has been forked', :sidekiq_might_not_need_inline do
let(:forked_project) { fork_project(project, user1, repository: true) }
- let(:service) { described_class.new(forked_project, user1, changes, push_options) }
+ let(:service) { described_class.new(project: forked_project, current_user: user1, changes: changes, push_options: push_options) }
before do
allow(forked_project).to receive(:empty_repo?).and_return(false)
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
index b6c33eac7b4..4df12f7849b 100644
--- a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
+++ b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
@@ -12,13 +12,22 @@ RSpec.shared_examples 'misconfigured dashboard service response' do |status_code
end
RSpec.shared_examples 'valid dashboard service response for schema' do
+ file_ref_resolver = proc do |uri|
+ file = Rails.root.join(uri.path)
+ raise StandardError, "Ref file #{uri.path} must be json" unless uri.path.ends_with?('.json')
+ raise StandardError, "File #{file.to_path} doesn't exists" unless file.exist?
+
+ Gitlab::Json.parse(File.read(file))
+ end
+
it 'returns a json representation of the dashboard' do
result = service_call
expect(result.keys).to contain_exactly(:dashboard, :status)
expect(result[:status]).to eq(:success)
- expect(JSON::Validator.fully_validate(dashboard_schema, result[:dashboard])).to be_empty
+ validator = JSONSchemer.schema(dashboard_schema, ref_resolver: file_ref_resolver)
+ expect(validator.valid?(result[:dashboard].with_indifferent_access)).to be true
end
end
diff --git a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
index 8398dd3c453..f7a6bd3676a 100644
--- a/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
+++ b/spec/support/shared_examples/services/namespace_package_settings_shared_examples.rb
@@ -7,6 +7,8 @@ RSpec.shared_examples 'updating the namespace package setting attributes' do |fr
expect { subject }
.to change { namespace.package_settings.reload.maven_duplicates_allowed }.from(from[:maven_duplicates_allowed]).to(to[:maven_duplicates_allowed])
.and change { namespace.package_settings.reload.maven_duplicate_exception_regex }.from(from[:maven_duplicate_exception_regex]).to(to[:maven_duplicate_exception_regex])
+ .and change { namespace.package_settings.reload.generic_duplicates_allowed }.from(from[:generic_duplicates_allowed]).to(to[:generic_duplicates_allowed])
+ .and change { namespace.package_settings.reload.generic_duplicate_exception_regex }.from(from[:generic_duplicate_exception_regex]).to(to[:generic_duplicate_exception_regex])
end
end
@@ -26,6 +28,8 @@ RSpec.shared_examples 'creating the namespace package setting' do
expect(namespace.package_setting_relation.maven_duplicates_allowed).to eq(package_settings[:maven_duplicates_allowed])
expect(namespace.package_setting_relation.maven_duplicate_exception_regex).to eq(package_settings[:maven_duplicate_exception_regex])
+ expect(namespace.package_setting_relation.generic_duplicates_allowed).to eq(package_settings[:generic_duplicates_allowed])
+ expect(namespace.package_setting_relation.generic_duplicate_exception_regex).to eq(package_settings[:generic_duplicate_exception_regex])
end
it_behaves_like 'returning a success'
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 4e34c191306..72878e925dc 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -203,7 +203,9 @@ RSpec.shared_examples 'filters on each package_type' do |is_project: false|
let_it_be(:package7) { create(:generic_package, project: project) }
let_it_be(:package8) { create(:golang_package, project: project) }
let_it_be(:package9) { create(:debian_package, project: project) }
- let_it_be(:package9) { create(:rubygems_package, project: project) }
+ let_it_be(:package10) { create(:rubygems_package, project: project) }
+ let_it_be(:package11) { create(:helm_package, project: project) }
+ let_it_be(:package12) { create(:terraform_module_package, project: project) }
Packages::Package.package_types.keys.each do |package_type|
context "for package type #{package_type}" do
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index 1fb1b9f79b2..275ddebc18c 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -47,7 +47,7 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
expect(original_repository_double).to receive(:remove)
end
- it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read only" do
+ it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read-only" do
old_project_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
project.repository.path_to_repo
end
diff --git a/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb b/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
index e67fc4ab04a..97304680316 100644
--- a/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
+++ b/spec/support/shared_examples/services/schedule_bulk_repository_shard_moves_shared_examples.rb
@@ -27,7 +27,7 @@ RSpec.shared_examples 'moves repository shard in bulk' do
container.set_repository_read_only!
expect(subject).to receive(:log_info)
- .with(/Container #{container.full_path} \(#{container.id}\) was skipped: #{container.class} is read only/)
+ .with(/Container #{container.full_path} \(#{container.id}\) was skipped: #{container.class} is read-only/)
expect { subject.execute(source_storage_name, destination_storage_name) }
.to change(move_service_klass, :count).by(0)
end
diff --git a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
new file mode 100644
index 00000000000..538fd2bb513
--- /dev/null
+++ b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples_for 'services security ci configuration create service' do |skip_w_params|
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ let(:params) { {} }
+
+ context 'user does not belong to project' do
+ it 'returns an error status' do
+ expect(result.status).to eq(:error)
+ expect(result.payload[:success_path]).to be_nil
+ end
+
+ it 'does not track a snowplow event' do
+ subject
+
+ expect_no_snowplow_event
+ end
+ end
+
+ context 'user belongs to project' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does track the snowplow event' do
+ subject
+
+ expect_snowplow_event(**snowplow_event)
+ end
+
+ it 'raises exception if the user does not have permission to create a new branch' do
+ allow(project).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "You are not allowed to create protected branches on this project.")
+
+ expect { subject }.to raise_error(Gitlab::Git::PreReceiveError)
+ end
+
+ context 'when exception is raised' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ before do
+ allow(project.repository).to receive(:add_branch).and_raise(StandardError, "The unexpected happened!")
+ end
+
+ context 'when branch was created' do
+ before do
+ allow(project.repository).to receive(:branch_exists?).and_return(true)
+ end
+
+ it 'tries to rm branch' do
+ expect(project.repository).to receive(:rm_branch).with(user, branch_name)
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+
+ context 'when branch was not created' do
+ before do
+ allow(project.repository).to receive(:branch_exists?).and_return(false)
+ end
+
+ it 'does not try to rm branch' do
+ expect(project.repository).not_to receive(:rm_branch)
+ expect { subject }.to raise_error(StandardError)
+ end
+ end
+ end
+
+ context 'with no parameters' do
+ it 'returns the path to create a new merge request' do
+ expect(result.status).to eq(:success)
+ expect(result.payload[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
+ end
+ end
+
+ unless skip_w_params
+ context 'with parameters' do
+ let(:params) { non_empty_params }
+
+ it 'returns the path to create a new merge request' do
+ expect(result.status).to eq(:success)
+ expect(result.payload[:success_path]).to match(/#{Gitlab::Routing.url_helpers.project_new_merge_request_url(project, {})}(.*)description(.*)source_branch/)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/updating_mentions_shared_examples.rb b/spec/support/shared_examples/services/updating_mentions_shared_examples.rb
index 84f6c4d136a..13a2aa9ddac 100644
--- a/spec/support/shared_examples/services/updating_mentions_shared_examples.rb
+++ b/spec/support/shared_examples/services/updating_mentions_shared_examples.rb
@@ -15,7 +15,7 @@ RSpec.shared_examples 'updating mentions' do |service_class|
def update_mentionable(opts)
perform_enqueued_jobs do
- service_class.new(project, user, opts).execute(mentionable)
+ service_class.new(project: project, current_user: user, params: opts).execute(mentionable)
end
mentionable.reload
diff --git a/spec/support/stored_repositories.rb b/spec/support/stored_repositories.rb
index 95f0f971787..84396c675b9 100644
--- a/spec/support/stored_repositories.rb
+++ b/spec/support/stored_repositories.rb
@@ -3,7 +3,7 @@
RSpec.configure do |config|
config.before(:each, :broken_storage) do
allow(Gitlab::GitalyClient).to receive(:call) do
- raise GRPC::Unavailable.new('Gitaly broken in this spec')
+ raise GRPC::Unavailable, 'Gitaly broken in this spec'
end
end
end
diff --git a/spec/support/stub_languages_translation_percentage.rb b/spec/support/stub_languages_translation_percentage.rb
new file mode 100644
index 00000000000..a93316288b9
--- /dev/null
+++ b/spec/support/stub_languages_translation_percentage.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module StubLanguagesTranslationPercentage
+ # Stubs the translation percentage of the i18n languages
+ # - When a `blank?` list is given no stubbing is done;
+ # - When the list is not empty, the languages in the list
+ # are stubbed with the given values, any other language
+ # will have the translation percent set to 0;
+ def stub_languages_translation_percentage(list = {})
+ return if list.blank?
+
+ expect(Gitlab::I18n)
+ .to receive(:percentage_translated_for)
+ .at_least(:once)
+ .and_wrap_original do |_original, code|
+ list.with_indifferent_access[code].to_i
+ end
+ end
+end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index d1f4a12d8fc..c4623061944 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -298,15 +298,15 @@ RSpec.describe 'gitlab:db namespace rake task' do
end
describe '#migrate_with_instrumentation' do
- subject { run_rake_task('gitlab:db:migration_testing', "[#{filename}]") }
+ subject { run_rake_task('gitlab:db:migration_testing') }
let(:ctx) { double('ctx', migrations: all_migrations, schema_migration: double, get_all_versions: existing_versions) }
let(:instrumentation) { instance_double(Gitlab::Database::Migrations::Instrumentation, observations: observations) }
let(:existing_versions) { [1] }
let(:all_migrations) { [double('migration1', version: 1), pending_migration] }
let(:pending_migration) { double('migration2', version: 2) }
- let(:filename) { 'results-file.json'}
- let(:buffer) { StringIO.new }
+ let(:filename) { Gitlab::Database::Migrations::Instrumentation::STATS_FILENAME }
+ let!(:directory) { Dir.mktmpdir }
let(:observations) { %w[some data] }
before do
@@ -316,17 +316,19 @@ RSpec.describe 'gitlab:db namespace rake task' do
allow(instrumentation).to receive(:observe).and_yield
- allow(File).to receive(:open).with(filename, 'wb+').and_yield(buffer)
+ allow(Dir).to receive(:mkdir)
+ allow(File).to receive(:exist?).with(directory).and_return(false)
+ stub_const('Gitlab::Database::Migrations::Instrumentation::RESULT_DIR', directory)
end
- it 'fails when given no filename argument' do
- expect { run_rake_task('gitlab:db:migration_testing') }.to raise_error(/specify result_file/)
+ after do
+ FileUtils.rm_rf([directory])
end
- it 'fails when the given file already exists' do
- expect(File).to receive(:exist?).with(filename).and_return(true)
+ it 'fails when the directory already exists' do
+ expect(File).to receive(:exist?).with(directory).and_return(true)
- expect { subject }.to raise_error(/File exists/)
+ expect { subject }.to raise_error(/Directory exists/)
end
it 'instruments the pending migration' do
@@ -344,7 +346,27 @@ RSpec.describe 'gitlab:db namespace rake task' do
it 'writes observations out to JSON file' do
subject
- expect(buffer.string).to eq(observations.to_json)
+ expect(File.read(File.join(directory, filename))).to eq(observations.to_json)
+ end
+ end
+
+ describe '#execute_batched_migrations' do
+ subject { run_rake_task('gitlab:db:execute_batched_migrations') }
+
+ let(:migrations) { create_list(:batched_background_migration, 2) }
+ let(:runner) { instance_double('Gitlab::Database::BackgroundMigration::BatchedMigrationRunner') }
+
+ before do
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive_message_chain(:active, :queue_order).and_return(migrations)
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner).to receive(:new).and_return(runner)
+ end
+
+ it 'executes all migrations' do
+ migrations.each do |migration|
+ expect(runner).to receive(:run_entire_migration).with(migration)
+ end
+
+ subject
end
end
diff --git a/spec/tasks/gitlab/sidekiq_rake_spec.rb b/spec/tasks/gitlab/sidekiq_rake_spec.rb
new file mode 100644
index 00000000000..61a8aecfa61
--- /dev/null
+++ b/spec/tasks/gitlab/sidekiq_rake_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'sidekiq.rake', :aggregate_failures do
+ before do
+ Rake.application.rake_require 'tasks/gitlab/sidekiq'
+
+ stub_warn_user_is_not_gitlab
+ end
+
+ shared_examples 'migration rake task' do
+ it 'runs the migrator with a mapping of workers to queues' do
+ test_routes = [
+ ['urgency=high', 'default'],
+ ['*', nil]
+ ]
+
+ test_router = ::Gitlab::SidekiqConfig::WorkerRouter.new(test_routes)
+ migrator = ::Gitlab::SidekiqMigrateJobs.new(sidekiq_set, logger: Logger.new($stdout))
+
+ allow(::Gitlab::SidekiqConfig::WorkerRouter)
+ .to receive(:global).and_return(test_router)
+
+ expect(::Gitlab::SidekiqMigrateJobs)
+ .to receive(:new).with(sidekiq_set, logger: an_instance_of(Logger)).and_return(migrator)
+
+ expect(migrator)
+ .to receive(:execute)
+ .with(a_hash_including('PostReceive' => 'default',
+ 'MergeWorker' => 'default',
+ 'DeleteDiffFilesWorker' => 'delete_diff_files'))
+ .and_call_original
+
+ run_rake_task("gitlab:sidekiq:migrate_jobs:#{sidekiq_set}")
+
+ expect($stdout.string).to include("Processing #{sidekiq_set}")
+ expect($stdout.string).to include('Done')
+ end
+ end
+
+ describe 'gitlab:sidekiq:migrate_jobs:schedule rake task' do
+ let(:sidekiq_set) { 'schedule' }
+
+ it_behaves_like 'migration rake task'
+ end
+
+ describe 'gitlab:sidekiq:migrate_jobs:retry rake task' do
+ let(:sidekiq_set) { 'retry' }
+
+ it_behaves_like 'migration rake task'
+ end
+end
diff --git a/spec/tooling/danger/changelog_spec.rb b/spec/tooling/danger/changelog_spec.rb
index 7ea2288fd45..0a6af3ea798 100644
--- a/spec/tooling/danger/changelog_spec.rb
+++ b/spec/tooling/danger/changelog_spec.rb
@@ -18,6 +18,204 @@ RSpec.describe Tooling::Danger::Changelog do
allow(changelog).to receive(:project_helper).and_return(fake_project_helper)
end
+ describe '#check_changelog_trailer' do
+ subject { changelog.check_changelog_trailer(commit) }
+
+ context "when commit doesn't include a changelog trailer" do
+ let(:commit) { double('commit', message: "Hello world") }
+
+ it { is_expected.to be_nil }
+ end
+
+ context "when commit include a changelog trailer with no category" do
+ let(:commit) { double('commit', message: "Hello world\n\nChangelog:") }
+
+ it { is_expected.to be_nil }
+ end
+
+ context "when commit include a changelog trailer with an unknown category" do
+ let(:commit) { double('commit', message: "Hello world\n\nChangelog: foo", sha: "abc123") }
+
+ it { is_expected.to have_attributes(errors: ["Commit #{commit.sha} uses an invalid changelog category: foo"]) }
+ end
+
+ described_class::CATEGORIES.each do |category|
+ context "when commit include a changelog trailer with category set to '#{category}'" do
+ let(:commit) { double('commit', message: "Hello world\n\nChangelog: #{category}", sha: "abc123") }
+
+ it { is_expected.to have_attributes(errors: []) }
+ end
+ end
+ end
+
+ describe '#check_changelog_yaml' do
+ let(:changelog_path) { 'ee/changelogs/unreleased/entry.yml' }
+ let(:changes) { changes_class.new([change_class.new(changelog_path, :added, :changelog)]) }
+ let(:yaml_title) { 'Fix changelog Dangerfile to convert MR IID to a string before comparison' }
+ let(:yaml_merge_request) { 60899 }
+ let(:mr_iid) { '60899' }
+ let(:yaml_type) { 'fixed' }
+ let(:yaml) do
+ <<~YAML
+ ---
+ title: #{yaml_title}
+ merge_request: #{yaml_merge_request}
+ author:
+ type: #{yaml_type}
+ YAML
+ end
+
+ before do
+ allow(changelog).to receive(:present?).and_return(true)
+ allow(changelog).to receive(:changelog_path).and_return(changelog_path)
+ allow(changelog).to receive(:read_file).with(changelog_path).and_return(yaml)
+ allow(fake_helper).to receive(:security_mr?).and_return(false)
+ allow(fake_helper).to receive(:mr_iid).and_return(mr_iid)
+ allow(fake_helper).to receive(:cherry_pick_mr?).and_return(false)
+ allow(fake_helper).to receive(:stable_branch?).and_return(false)
+ allow(fake_helper).to receive(:html_link).with(changelog_path).and_return(changelog_path)
+ end
+
+ subject { changelog.check_changelog_yaml }
+
+ context "when changelog is not present" do
+ before do
+ allow(changelog).to receive(:present?).and_return(false)
+ end
+
+ it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
+ end
+
+ context "when YAML is invalid" do
+ let(:yaml) { '{ foo bar]' }
+
+ it { is_expected.to have_attributes(errors: ["#{changelog_path} isn't valid YAML! #{described_class::SEE_DOC}"]) }
+ end
+
+ context "when a StandardError is raised" do
+ before do
+ allow(changelog).to receive(:read_file).and_raise(StandardError, "Fail!")
+ end
+
+ it { is_expected.to have_attributes(warnings: ["There was a problem trying to check the Changelog. Exception: StandardError - Fail!"]) }
+ end
+
+ context "when YAML title is nil" do
+ let(:yaml_title) { '' }
+
+ it { is_expected.to have_attributes(errors: ["`title` should be set, in #{changelog_path}! #{described_class::SEE_DOC}"]) }
+ end
+
+ context "when YAML type is nil" do
+ let(:yaml_type) { '' }
+
+ it { is_expected.to have_attributes(errors: ["`type` should be set, in #{changelog_path}! #{described_class::SEE_DOC}"]) }
+ end
+
+ context "when on a security MR" do
+ let(:yaml_merge_request) { '' }
+
+ before do
+ allow(fake_helper).to receive(:security_mr?).and_return(true)
+ end
+
+ it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
+ end
+
+ context "when MR IID is empty" do
+ before do
+ allow(fake_helper).to receive(:mr_iid).and_return("")
+ end
+
+ it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
+ end
+
+ context "when YAML MR IID is empty" do
+ let(:yaml_merge_request) { '' }
+
+ context "and YAML includes a merge_request: line" do
+ it { is_expected.to have_attributes(markdowns: [{ msg: format(described_class::SUGGEST_MR_COMMENT, mr_iid: fake_helper.mr_iid), file: changelog_path, line: 3 }]) }
+ end
+
+ context "and YAML does not include a merge_request: line" do
+ let(:yaml) do
+ <<~YAML
+ ---
+ title: #{yaml_title}
+ author:
+ type: #{yaml_type}
+ YAML
+ end
+
+ it { is_expected.to have_attributes(messages: ["Consider setting `merge_request` to #{mr_iid} in #{changelog_path}. #{described_class::SEE_DOC}"]) }
+ end
+ end
+ end
+
+ describe '#check_changelog_path' do
+ let(:changelog_path) { 'changelog-path.yml' }
+ let(:foss_change) { nil }
+ let(:ee_change) { nil }
+ let(:changelog_change) { nil }
+ let(:changes) { changes_class.new([foss_change, ee_change, changelog_change].compact) }
+
+ before do
+ allow(changelog).to receive(:present?).and_return(true)
+ end
+
+ subject { changelog.check_changelog_path }
+
+ context "when changelog is not present" do
+ before do
+ allow(changelog).to receive(:present?).and_return(false)
+ end
+
+ it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
+ end
+
+ context "with EE changes" do
+ let(:ee_change) { change_class.new('ee/app/models/foo.rb', :added, :backend) }
+
+ context "and a non-EE changelog, and changelog not required" do
+ let(:changelog_change) { change_class.new('changelogs/unreleased/entry.yml', :added, :changelog) }
+
+ before do
+ allow(changelog).to receive(:required?).and_return(false)
+ end
+
+ it { is_expected.to have_attributes(warnings: ["This MR has a Changelog file outside `ee/`, but code changes in `ee/`. Consider moving the Changelog file into `ee/`."]) }
+ end
+
+ context "and a EE changelog" do
+ let(:changelog_change) { change_class.new('ee/changelogs/unreleased/entry.yml', :added, :changelog) }
+
+ it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
+
+ context "and there are DB changes" do
+ let(:foss_change) { change_class.new('db/migrate/foo.rb', :added, :migration) }
+
+ it { is_expected.to have_attributes(warnings: ["This MR has a Changelog file inside `ee/`, but there are database changes which [requires](https://docs.gitlab.com/ee/development/changelog.html#what-warrants-a-changelog-entry) the Changelog placement to be outside of `ee/`. Consider moving the Changelog file outside `ee/`."]) }
+ end
+ end
+ end
+
+ context "with no EE changes" do
+ let(:foss_change) { change_class.new('app/models/foo.rb', :added, :backend) }
+
+ context "and a non-EE changelog" do
+ let(:changelog_change) { change_class.new('changelogs/unreleased/entry.yml', :added, :changelog) }
+
+ it { is_expected.to have_attributes(errors: [], warnings: [], markdowns: [], messages: []) }
+ end
+
+ context "and a EE changelog" do
+ let(:changelog_change) { change_class.new('ee/changelogs/unreleased/entry.yml', :added, :changelog) }
+
+ it { is_expected.to have_attributes(warnings: ["This MR has a Changelog file in `ee/`, but no code changes in `ee/`. Consider moving the Changelog file outside `ee/`."]) }
+ end
+ end
+ end
+
describe '#required_reasons' do
subject { changelog.required_reasons }
@@ -126,8 +324,8 @@ RSpec.describe Tooling::Danger::Changelog do
end
end
- describe '#found' do
- subject { changelog.found }
+ describe '#present?' do
+ subject { changelog.present? }
context 'added files contain a changelog' do
let(:changes) { changes_class.new([change_class.new('foo', :added, :changelog)]) }
@@ -138,7 +336,7 @@ RSpec.describe Tooling::Danger::Changelog do
context 'added files do not contain a changelog' do
let(:changes) { changes_class.new([change_class.new('foo', :added, :backend)]) }
- it { is_expected.to eq(nil) }
+ it { is_expected.to be_falsy }
end
end
@@ -158,6 +356,22 @@ RSpec.describe Tooling::Danger::Changelog do
end
end
+ describe '#changelog_path' do
+ subject { changelog.changelog_path }
+
+ context 'added files contain a changelog' do
+ let(:changes) { changes_class.new([change_class.new('foo', :added, :changelog)]) }
+
+ it { is_expected.to eq('foo') }
+ end
+
+ context 'added files do not contain a changelog' do
+ let(:changes) { changes_class.new([change_class.new('foo', :added, :backend)]) }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#modified_text' do
subject { changelog.modified_text }
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index 5d106f08402..1d2ea0f5ba3 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -220,7 +220,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, changes_size, commit_messages, database, datateam, documentation, duplicate_yarn_dependencies, eslint, karma, pajamas, pipeline, prettier, product_intelligence, utility_css')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, commit_messages, database, datateam, documentation, duplicate_yarn_dependencies, eslint, karma, pajamas, pipeline, prettier, product_intelligence, utility_css')
end
end
@@ -256,7 +256,9 @@ RSpec.describe Tooling::Danger::ProjectHelper do
subject { project_helper.all_ee_changes }
it 'returns all changed files starting with ee/' do
- expect(fake_helper).to receive(:all_changed_files).and_return(%w[fr/ee/beer.rb ee/wine.rb ee/lib/ido.rb ee.k])
+ changes = double
+ expect(project_helper).to receive(:changes).and_return(changes)
+ expect(changes).to receive(:files).and_return(%w[fr/ee/beer.rb ee/wine.rb ee/lib/ido.rb ee.k])
is_expected.to match_array(%w[ee/wine.rb ee/lib/ido.rb])
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index b454b0ad8f8..a1d8695a8c9 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -441,22 +441,6 @@ RSpec.describe ObjectStorage do
end
end
- shared_examples 'extracts base filename' do
- it "returns true for ExtractsBase" do
- expect(subject[:FeatureFlagExtractBase]).to be true
- end
-
- context 'when workhorse_extract_filename_base is disabled' do
- before do
- stub_feature_flags(workhorse_extract_filename_base: false)
- end
-
- it "returns false for ExtractsBase" do
- expect(subject[:FeatureFlagExtractBase]).to be false
- end
- end
- end
-
shared_examples 'uses local storage' do
it_behaves_like 'returns the maximum size given' do
it "returns temporary path" do
@@ -518,7 +502,6 @@ RSpec.describe ObjectStorage do
end
it_behaves_like 'uses local storage'
- it_behaves_like 'extracts base filename'
end
context 'when object storage is enabled' do
@@ -526,8 +509,6 @@ RSpec.describe ObjectStorage do
allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true }
end
- it_behaves_like 'extracts base filename'
-
context 'when direct upload is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true }
diff --git a/spec/validators/addressable_url_validator_spec.rb b/spec/validators/addressable_url_validator_spec.rb
index 394ffc7bbea..ec3ee9aa500 100644
--- a/spec/validators/addressable_url_validator_spec.rb
+++ b/spec/validators/addressable_url_validator_spec.rb
@@ -19,18 +19,20 @@ RSpec.describe AddressableUrlValidator do
it 'returns error when url is nil' do
expect(validator.validate_each(badge, :link_url, nil)).to be_falsey
- expect(badge.errors.first[1]).to eq validator.options.fetch(:message)
+ expect(badge.errors.added?(:link_url, validator.options.fetch(:message))).to be true
end
it 'returns error when url is empty' do
expect(validator.validate_each(badge, :link_url, '')).to be_falsey
- expect(badge.errors.first[1]).to eq validator.options.fetch(:message)
+ expect(badge.errors.added?(:link_url, validator.options.fetch(:message))).to be true
end
it 'does not allow urls with CR or LF characters' do
aggregate_failures do
urls_with_CRLF.each do |url|
- expect(validator.validate_each(badge, :link_url, url)[0]).to eq 'is blocked: URI is invalid'
+ validator.validate_each(badge, :link_url, url)
+
+ expect(badge.errors.added?(:link_url, 'is blocked: URI is invalid')).to be true
end
end
end
@@ -113,7 +115,7 @@ RSpec.describe AddressableUrlValidator do
it 'does block nil url with provided error message' do
expect(validator.validate_each(badge, :link_url, nil)).to be_falsey
- expect(badge.errors.first[1]).to eq message
+ expect(badge.errors.added?(:link_url, message)).to be true
end
end
@@ -126,7 +128,7 @@ RSpec.describe AddressableUrlValidator do
subject
- expect(badge.errors.first[1]).to eq 'is not allowed due to: Only allowed schemes are http, https'
+ expect(badge.errors.added?(:link_url, 'is not allowed due to: Only allowed schemes are http, https')).to be true
end
end
diff --git a/spec/validators/array_members_validator_spec.rb b/spec/validators/array_members_validator_spec.rb
index ff8f0da7651..c6960925487 100644
--- a/spec/validators/array_members_validator_spec.rb
+++ b/spec/validators/array_members_validator_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe ArrayMembersValidator do
object = test_class.new(children: [])
expect(object.valid?).to be_falsey
- expect(object.errors.messages).to eql(children: ['should be an array of children objects'])
+ expect(object.errors.messages).to eq(children: ['should be an array of children objects'])
end
end
@@ -62,7 +62,7 @@ RSpec.describe ArrayMembersValidator do
object = test_class.new(children: [])
expect(object.valid?).to be_falsey
- expect(object.errors.messages).to eql(children: ['should be an array of test objects'])
+ expect(object.errors.messages).to eq(children: ['should be an array of test objects'])
end
end
end
diff --git a/spec/validators/devise_email_validator_spec.rb b/spec/validators/devise_email_validator_spec.rb
index 29a008f858a..64d11d4d963 100644
--- a/spec/validators/devise_email_validator_spec.rb
+++ b/spec/validators/devise_email_validator_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe DeviseEmailValidator do
subject
expect(user.errors).to be_present
- expect(user.errors.first[1]).to eq 'is invalid'
+ expect(user.errors.added?(:public_email)).to be true
end
it 'returns error when email is nil' do
@@ -40,7 +40,7 @@ RSpec.describe DeviseEmailValidator do
subject
expect(user.errors).to be_present
- expect(user.errors.first[1]).to eq 'is invalid'
+ expect(user.errors.added?(:public_email)).to be true
end
end
end
diff --git a/spec/validators/gitlab/utils/zoom_url_validator_spec.rb b/spec/validators/gitlab/utils/zoom_url_validator_spec.rb
index bc8236a2f5c..392d8b3a2fe 100644
--- a/spec/validators/gitlab/utils/zoom_url_validator_spec.rb
+++ b/spec/validators/gitlab/utils/zoom_url_validator_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Utils::ZoomUrlValidator do
expect(zoom_meeting.valid?).to eq(false)
expect(zoom_meeting.errors).to be_present
- expect(zoom_meeting.errors.first[1]).to eq 'must contain one valid Zoom URL'
+ expect(zoom_meeting.errors.added?(:url, 'must contain one valid Zoom URL')).to be true
end
end
diff --git a/spec/validators/qualified_domain_array_validator_spec.rb b/spec/validators/qualified_domain_array_validator_spec.rb
index 865ecffe05a..b2b13d358c4 100644
--- a/spec/validators/qualified_domain_array_validator_spec.rb
+++ b/spec/validators/qualified_domain_array_validator_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe QualifiedDomainArrayValidator do
subject
expect(record.errors).to be_present
- expect(record.errors.first[1]).to eq('entries cannot be nil')
+ expect(record.errors.added?(:domain_array, "entries cannot be nil")).to be true
end
it 'allows when domain is valid' do
@@ -67,7 +67,7 @@ RSpec.describe QualifiedDomainArrayValidator do
subject
expect(record.errors).to be_present
- expect(record.errors.first[1]).to eq 'unicode domains should use IDNA encoding'
+ expect(record.errors.added?(:domain_array, 'unicode domains should use IDNA encoding')).to be true
end
it 'returns error when entry is larger than 255 chars' do
@@ -76,7 +76,7 @@ RSpec.describe QualifiedDomainArrayValidator do
subject
expect(record.errors).to be_present
- expect(record.errors.first[1]).to eq 'entries cannot be larger than 255 characters'
+ expect(record.errors.added?(:domain_array, 'entries cannot be larger than 255 characters')).to be true
end
it 'returns error when entry contains HTML tags' do
@@ -85,7 +85,7 @@ RSpec.describe QualifiedDomainArrayValidator do
subject
expect(record.errors).to be_present
- expect(record.errors.first[1]).to eq 'entries cannot contain HTML tags'
+ expect(record.errors.added?(:domain_array, 'entries cannot contain HTML tags')).to be true
end
end
diff --git a/spec/views/admin/application_settings/repository.html.haml_spec.rb b/spec/views/admin/application_settings/repository.html.haml_spec.rb
index b110bc277ac..47cadd29e33 100644
--- a/spec/views/admin/application_settings/repository.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/repository.html.haml_spec.rb
@@ -12,35 +12,17 @@ RSpec.describe 'admin/application_settings/repository.html.haml' do
end
describe 'default initial branch name' do
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(global_default_branch_name: false)
- end
+ it 'has the setting section' do
+ render
- it 'does not show the setting section' do
- render
-
- expect(rendered).not_to have_css("#js-default-branch-name")
- end
+ expect(rendered).to have_css("#js-default-branch-name")
end
- context 'when the feature flag is enabled' do
- before do
- stub_feature_flags(global_default_branch_name: true)
- end
-
- it 'has the setting section' do
- render
-
- expect(rendered).to have_css("#js-default-branch-name")
- end
-
- it 'renders the correct setting section content' do
- render
+ it 'renders the correct setting section content' do
+ render
- expect(rendered).to have_content("Default initial branch name")
- expect(rendered).to have_content("Set the default name of the initial branch when creating new repositories through the user interface.")
- end
+ expect(rendered).to have_content("Default initial branch name")
+ expect(rendered).to have_content("Set the default name of the initial branch when creating new repositories through the user interface.")
end
end
end
diff --git a/spec/views/devise/shared/_signup_box.html.haml_spec.rb b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
new file mode 100644
index 00000000000..b73e32fa765
--- /dev/null
+++ b/spec/views/devise/shared/_signup_box.html.haml_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'devise/shared/_signup_box' do
+ before do
+ stub_devise
+ allow(view).to receive(:show_omniauth_providers).and_return(false)
+ allow(view).to receive(:url).and_return('_url_')
+ allow(view).to receive(:terms_path).and_return('_terms_path_')
+ allow(view).to receive(:button_text).and_return('_button_text_')
+ allow(view).to receive(:suggestion_path).and_return('_suggestion_path_')
+ stub_template 'devise/shared/_error_messages.html.haml' => ''
+ end
+
+ context 'when terms are enforced' do
+ before do
+ allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enforce_terms?).and_return(true)
+ end
+
+ it 'shows expected text with placeholders' do
+ render
+
+ expect(rendered).to have_content('By clicking _button_text_')
+ expect(rendered).to have_link('Terms of Use and Privacy Policy')
+ end
+
+ context 'when on .com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ end
+
+ it 'shows expected GitLab text' do
+ render
+
+ expect(rendered).to have_content('I have read and accepted the GitLab Terms')
+ end
+ end
+
+ context 'when not on .com' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(false)
+ end
+
+ it 'shows expected text without GitLab' do
+ render
+
+ expect(rendered).to have_content('I have read and accepted the Terms')
+ end
+ end
+ end
+
+ context 'when terms are not enforced' do
+ before do
+ allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enforce_terms?).and_return(false)
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return(true)
+ end
+
+ it 'shows expected text with placeholders' do
+ render
+
+ expect(rendered).not_to have_content('By clicking')
+ end
+ end
+
+ def stub_devise
+ allow(view).to receive(:devise_mapping).and_return(Devise.mappings[:user])
+ allow(view).to receive(:resource).and_return(spy)
+ allow(view).to receive(:resource_name).and_return(:user)
+ end
+end
diff --git a/spec/views/groups/show.html.haml_spec.rb b/spec/views/groups/show.html.haml_spec.rb
new file mode 100644
index 00000000000..f40b03fda2a
--- /dev/null
+++ b/spec/views/groups/show.html.haml_spec.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups/edit.html.haml' do
+ include Devise::Test::ControllerHelpers
+
+ describe '"Share with group lock" setting' do
+ let(:root_owner) { create(:user) }
+ let(:root_group) { create(:group) }
+
+ before do
+ root_group.add_owner(root_owner)
+ end
+
+ shared_examples_for '"Share with group lock" setting' do |checkbox_options|
+ it 'has the correct label, help text, and checkbox options' do
+ assign(:group, test_group)
+ allow(view).to receive(:can?).with(test_user, :admin_group, test_group).and_return(true)
+ allow(view).to receive(:can_change_group_visibility_level?).and_return(false)
+ allow(view).to receive(:current_user).and_return(test_user)
+ expect(view).to receive(:can_change_share_with_group_lock?).and_return(!checkbox_options[:disabled])
+ expect(view).to receive(:share_with_group_lock_help_text).and_return('help text here')
+
+ render
+
+ expect(rendered).to have_content("Prevent sharing a project within #{test_group.name} with other groups")
+ expect(rendered).to have_css('.js-descr', text: 'help text here')
+ expect(rendered).to have_field('group_share_with_group_lock', **checkbox_options)
+ end
+ end
+
+ context 'for a root group' do
+ let(:test_group) { root_group }
+ let(:test_user) { root_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: false }
+ end
+
+ context 'for a subgroup' do
+ let!(:subgroup) { create(:group, parent: root_group) }
+ let(:sub_owner) { create(:user) }
+ let(:test_group) { subgroup }
+
+ context 'when the root_group has "Share with group lock" disabled' do
+ context 'when the subgroup has "Share with group lock" disabled' do
+ context 'as the root_owner' do
+ let(:test_user) { root_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: false }
+ end
+
+ context 'as the sub_owner' do
+ let(:test_user) { sub_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: false }
+ end
+ end
+
+ context 'when the subgroup has "Share with group lock" enabled' do
+ before do
+ subgroup.update_column(:share_with_group_lock, true)
+ end
+
+ context 'as the root_owner' do
+ let(:test_user) { root_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: true }
+ end
+
+ context 'as the sub_owner' do
+ let(:test_user) { sub_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: true }
+ end
+ end
+ end
+
+ context 'when the root_group has "Share with group lock" enabled' do
+ before do
+ root_group.update_column(:share_with_group_lock, true)
+ end
+
+ context 'when the subgroup has "Share with group lock" disabled (parent overridden)' do
+ context 'as the root_owner' do
+ let(:test_user) { root_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: false }
+ end
+
+ context 'as the sub_owner' do
+ let(:test_user) { sub_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: false }
+ end
+ end
+
+ context 'when the subgroup has "Share with group lock" enabled (same as parent)' do
+ before do
+ subgroup.update_column(:share_with_group_lock, true)
+ end
+
+ context 'as the root_owner' do
+ let(:test_user) { root_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: false, checked: true }
+ end
+
+ context 'as the sub_owner' do
+ let(:test_user) { sub_owner }
+
+ it_behaves_like '"Share with group lock" setting', { disabled: true, checked: true }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/views/help/index.html.haml_spec.rb b/spec/views/help/index.html.haml_spec.rb
index c59790a346e..600e431b7ef 100644
--- a/spec/views/help/index.html.haml_spec.rb
+++ b/spec/views/help/index.html.haml_spec.rb
@@ -21,6 +21,11 @@ RSpec.describe 'help/index' do
end
context 'when logged in' do
+ def version_link_regexp(path)
+ base_url = "#{view.source_host_url}/#{view.source_code_group}"
+ %r{#{Regexp.escape(base_url)}/(gitlab|gitlab\-foss)/#{Regexp.escape(path)}}
+ end
+
before do
stub_user
end
@@ -31,7 +36,7 @@ RSpec.describe 'help/index' do
render
expect(rendered).to match '8.0.2'
- expect(rendered).to have_link('8.0.2', href: %r{https://gitlab.com/gitlab-org/(gitlab|gitlab-foss)/-/tags/v8.0.2})
+ expect(rendered).to have_link('8.0.2', href: version_link_regexp('-/tags/v8.0.2'))
end
it 'shows a link to the commit for pre-releases' do
@@ -40,7 +45,7 @@ RSpec.describe 'help/index' do
render
expect(rendered).to match '8.0.2'
- expect(rendered).to have_link('abcdefg', href: %r{https://gitlab.com/gitlab-org/(gitlab|gitlab-foss)/-/commits/abcdefg})
+ expect(rendered).to have_link('abcdefg', href: version_link_regexp('-/commits/abcdefg'))
end
end
end
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
index cec095f93ad..bf81ab577f7 100644
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe 'layouts/header/_new_dropdown' do
end
it 'has a "New project" link' do
+ render('layouts/header/new_repo_experiment')
render
expect(rendered).to have_link('New project', href: new_project_path(namespace_id: group.id))
diff --git a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
index 640f463b45d..d96052d4c03 100644
--- a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'layouts/nav/sidebar/_group' do
- let(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
before do
assign(:group, group)
@@ -11,4 +11,70 @@ RSpec.describe 'layouts/nav/sidebar/_group' do
it_behaves_like 'has nav sidebar'
it_behaves_like 'sidebar includes snowplow attributes', 'render', 'groups_side_navigation', 'groups_side_navigation'
+
+ describe 'Group information' do
+ it 'has a link to the group path' do
+ render
+
+ expect(rendered).to have_link('Group information', href: group_path(group))
+ end
+
+ it 'does not have a link to the details menu item' do
+ render
+
+ expect(rendered).not_to have_link('Details', href: details_group_path(group))
+ end
+
+ it 'has a link to the members page' do
+ render
+
+ expect(rendered).to have_selector('.sidebar-top-level-items > li.home a[title="Members"]')
+ expect(rendered).to have_link('Members', href: group_group_members_path(group))
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it 'has a link to the group path with the "Group overview" title' do
+ render
+
+ expect(rendered).to have_link('Group overview', href: group_path(group))
+ end
+
+ it 'has a link to the details menu item' do
+ render
+
+ expect(rendered).to have_link('Details', href: details_group_path(group))
+ end
+
+ it 'does not have a link to the members page' do
+ render
+
+ expect(rendered).not_to have_selector('.sidebar-top-level-items > li.home a[title="Members"]')
+ end
+ end
+ end
+
+ describe 'Members' do
+ it 'does not have a Members menu' do
+ render
+
+ expect(rendered).not_to have_selector('.nav-item-name', text: 'Members')
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it 'has a Member menu' do
+ render
+
+ expect(rendered).to have_selector('.nav-item-name', text: 'Members')
+ expect(rendered).to have_link('Members', href: group_group_members_path(group))
+ end
+ end
+ end
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index bc5b3b7bfc6..7cb49f635af 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -19,20 +19,41 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
it_behaves_like 'has nav sidebar'
- describe 'Project Overview' do
+ describe 'Project information' do
it 'has a link to the project path' do
render
- expect(rendered).to have_link('Project overview', href: project_path(project), class: %w(shortcuts-project rspec-project-link))
- expect(rendered).to have_selector('[aria-label="Project overview"]')
+ expect(rendered).to have_link('Project information', href: project_path(project), class: %w(shortcuts-project rspec-project-link))
+ expect(rendered).to have_selector('[aria-label="Project information"]')
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'has a link to the project path' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).to have_link('Project overview', href: project_path(project), class: %w(shortcuts-project rspec-project-link))
+ expect(rendered).to have_selector('[aria-label="Project overview"]')
+ end
end
describe 'Details' do
- it 'has a link to the projects path' do
+ it 'does not have a link to the details menu' do
render
- expect(rendered).to have_link('Details', href: project_path(project), class: 'shortcuts-project')
- expect(rendered).to have_selector('[aria-label="Project details"]')
+ expect(rendered).not_to have_link('Details', href: project_path(project))
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'has a link to the projects path' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).to have_link('Details', href: project_path(project), class: 'shortcuts-project')
+ expect(rendered).to have_selector('[aria-label="Project details"]')
+ end
end
end
@@ -45,10 +66,62 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
describe 'Releases' do
- it 'has a link to the project releases path' do
+ it 'does not have a link to the project releases path' do
+ render
+
+ expect(rendered).not_to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-project-releases')
+ end
+
+ context 'when feature flag :sidebar refactor is disabled' do
+ it 'has a link to the project releases path' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-project-releases')
+ end
+ end
+ end
+
+ describe 'Labels' do
+ let(:page) { Nokogiri::HTML.parse(rendered) }
+
+ it 'has a link to the labels path' do
render
- expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-project-releases')
+ expect(page.at_css('.shortcuts-project').parent.css('[aria-label="Labels"]')).not_to be_empty
+ expect(rendered).to have_link('Labels', href: project_labels_path(project))
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'does not have the labels menu item' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(page.at_css('.shortcuts-project').parent.css('[aria-label="Labels"]')).to be_empty
+ end
+ end
+ end
+
+ describe 'Members' do
+ let(:page) { Nokogiri::HTML.parse(rendered) }
+
+ it 'has a link to the members page' do
+ render
+
+ expect(page.at_css('.shortcuts-project').parent.css('[aria-label="Members"]')).not_to be_empty
+ expect(rendered).to have_link('Members', href: project_project_members_path(project))
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'does not have a link to the members page' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(page.at_css('.shortcuts-project').parent.css('[aria-label="Members"]')).to be_empty
+ end
end
end
end
@@ -56,6 +129,9 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
describe 'Learn GitLab' do
it 'has a link to the learn GitLab experiment' do
allow(view).to receive(:learn_gitlab_experiment_enabled?).and_return(true)
+ allow_next_instance_of(LearnGitlab::Onboarding) do |onboarding|
+ expect(onboarding).to receive(:completed_percentage).and_return(20)
+ end
render
@@ -127,145 +203,797 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'issue boards' do
- it 'has board tab' do
+ describe 'Issues' do
+ it 'has a link to the issue list path' do
+ render
+
+ expect(rendered).to have_link('Issues', href: project_issues_path(project))
+ end
+
+ it 'shows pill with the number of open issues' do
render
- expect(rendered).to have_css('a[title="Boards"]')
+ expect(rendered).to have_css('span.badge.badge-pill.issue_counter')
+ end
+
+ describe 'Issue List' do
+ it 'has a link to the issue list path' do
+ render
+
+ expect(rendered).to have_link('List', href: project_issues_path(project))
+ end
+ end
+
+ describe 'Issue Boards' do
+ it 'has a link to the issue boards path' do
+ render
+
+ expect(rendered).to have_link('Boards', href: project_boards_path(project))
+ end
+ end
+
+ describe 'Labels' do
+ let(:page) { Nokogiri::HTML.parse(rendered) }
+
+ it 'does not have a link to the labels page' do
+ render
+
+ expect(page.at_css('.shortcuts-issues').parent.css('[aria-label="Labels"]')).to be_empty
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'has a link to the labels page' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(page.at_css('.shortcuts-issues').parent.css('[aria-label="Labels"]')).not_to be_empty
+ expect(rendered).to have_link('Labels', href: project_labels_path(project))
+ end
+ end
+ end
+
+ describe 'Service Desk' do
+ it 'has a link to the service desk path' do
+ render
+
+ expect(rendered).to have_link('Service Desk', href: service_desk_project_issues_path(project))
+ end
+ end
+
+ describe 'Milestones' do
+ it 'has a link to the milestones path' do
+ render
+
+ expect(rendered).to have_link('Milestones', href: project_milestones_path(project))
+ end
end
end
- describe 'packages tab' do
- before do
- stub_container_registry_config(enabled: true)
+ describe 'External Issue Tracker' do
+ let_it_be_with_refind(:project) { create(:project, has_external_issue_tracker: true) }
+
+ context 'with custom external issue tracker' do
+ let(:external_issue_tracker_url) { 'http://test.com' }
+
+ let!(:external_issue_tracker) do
+ create(:custom_issue_tracker_service, active: external_issue_tracker_active, project: project, project_url: external_issue_tracker_url)
+ end
+
+ context 'when external issue tracker is configured and active' do
+ let(:external_issue_tracker_active) { true }
+
+ it 'has a link to the external issue tracker' do
+ render
+
+ expect(rendered).to have_link(external_issue_tracker.title, href: external_issue_tracker_url)
+ end
+ end
+
+ context 'when external issue tracker is not configured and active' do
+ let(:external_issue_tracker_active) { false }
+
+ it 'does not have a link to the external issue tracker' do
+ render
+
+ expect(rendered).not_to have_link(external_issue_tracker.title)
+ end
+ end
+ end
+
+ context 'with Jira issue tracker' do
+ let_it_be(:jira) { create(:jira_service, project: project, issues_enabled: false) }
+
+ it 'has a link to the Jira issue tracker' do
+ render
- allow(controller).to receive(:controller_name)
- .and_return('repositories')
- allow(controller).to receive(:controller_path)
- .and_return('projects/registry/repositories')
+ expect(rendered).to have_link('Jira', href: project.external_issue_tracker.issue_tracker_path)
+ end
end
+ end
+
+ describe 'Labels' do
+ it 'does not show the labels menu' do
+ project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
- it 'highlights sidebar item and flyout' do
render
- expect(rendered).to have_css('.sidebar-top-level-items > li.active', count: 1)
- expect(rendered).to have_css('.sidebar-sub-level-items > li.fly-out-top-item.active', count: 1)
+ expect(rendered).not_to have_link('Labels', href: project_labels_path(project), class: 'shortcuts-labels')
end
- it 'highlights container registry tab' do
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ context 'when issues are not enabled' do
+ it 'has a link to the labels path' do
+ project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
+
+ render
+
+ expect(rendered).to have_link('Labels', href: project_labels_path(project), class: 'shortcuts-labels')
+ end
+ end
+
+ context 'when issues are enabled' do
+ it 'does not have a link to the labels path' do
+ render
+
+ expect(rendered).not_to have_link('Labels', href: project_labels_path(project), class: 'shortcuts-labels')
+ end
+ end
+ end
+ end
+
+ describe 'Merge Requests' do
+ it 'has a link to the merge request list path' do
+ render
+
+ expect(rendered).to have_link('Merge requests', href: project_merge_requests_path(project), class: 'shortcuts-merge_requests')
+ end
+
+ it 'shows pill with the number of merge requests' do
render
- expect(rendered).to have_css('.sidebar-sub-level-items > li:not(.fly-out-top-item).active', text: 'Container Registry')
+ expect(rendered).to have_css('span.badge.badge-pill.merge_counter.js-merge-counter')
end
end
- describe 'Packages' do
- let_it_be(:user) { create(:user) }
+ describe 'CI/CD' do
+ it 'has a link to pipelines page' do
+ render
- let_it_be(:package_menu_name) { 'Packages & Registries' }
- let_it_be(:package_entry_name) { 'Package Registry' }
+ expect(rendered).to have_link('CI/CD', href: project_pipelines_path(project))
+ end
- before do
- project.team.add_developer(user)
- sign_in(user)
- stub_container_registry_config(enabled: true)
+ describe 'Artifacts' do
+ it 'has a link to the artifacts page' do
+ render
+
+ expect(rendered).to have_link('Artifacts', href: project_artifacts_path(project))
+ end
end
- context 'when packages is enabled' do
- it 'packages link is visible' do
+ describe 'Jobs' do
+ it 'has a link to the jobs page' do
render
- expect(rendered).to have_link(package_menu_name, href: project_packages_path(project))
+ expect(rendered).to have_link('Jobs', href: project_jobs_path(project))
end
+ end
- it 'packages list link is visible' do
+ describe 'Pipeline Schedules' do
+ it 'has a link to the pipeline schedules page' do
render
- expect(rendered).to have_link(package_entry_name, href: project_packages_path(project))
+ expect(rendered).to have_link('Schedules', href: pipeline_schedules_path(project))
end
+ end
- it 'container registry link is visible' do
+ describe 'Pipelines' do
+ it 'has a link to the pipelines page' do
render
- expect(rendered).to have_link('Container Registry', href: project_container_registry_index_path(project))
+ expect(rendered).to have_link('Pipelines', href: project_pipelines_path(project))
+ end
+ end
+
+ describe 'Pipeline Editor' do
+ it 'has a link to the pipeline editor' do
+ render
+
+ expect(rendered).to have_link('Editor', href: project_ci_pipeline_editor_path(project))
+ end
+
+ context 'when user cannot access pipeline editor' do
+ it 'does not has a link to the pipeline editor' do
+ allow(view).to receive(:can_view_pipeline_editor?).and_return(false)
+
+ render
+
+ expect(rendered).not_to have_link('Editor', href: project_ci_pipeline_editor_path(project))
+ end
+ end
+ end
+ end
+
+ describe 'Security and Compliance' do
+ describe 'when user does not have permissions' do
+ before do
+ allow(view).to receive(:current_user).and_return(nil)
+ end
+
+ it 'top level navigation link is not visible' do
+ render
+
+ expect(rendered).not_to have_link('Security & Compliance')
end
end
- context 'when container registry is disabled' do
+ context 'when user has permissions' do
before do
- stub_container_registry_config(enabled: false)
+ allow(view).to receive(:current_user).and_return(user)
+
+ render
+ end
+
+ it 'top level navigation link is visible' do
+ expect(rendered).to have_link('Security & Compliance')
+ end
+
+ it 'security configuration link is visible' do
+ expect(rendered).to have_link('Configuration', href: project_security_configuration_path(project))
end
+ end
+ end
- it 'packages top level and list link are visible' do
+ describe 'Deployments' do
+ let(:page) { Nokogiri::HTML.parse(rendered) }
+
+ describe 'Feature Flags' do
+ it 'has a link to the feature flags page' do
render
- expect(rendered).to have_link(package_menu_name, href: project_packages_path(project))
- expect(rendered).to have_link(package_entry_name, href: project_packages_path(project))
+ expect(page.at_css('.shortcuts-deployments').parent.css('[aria-label="Feature Flags"]')).not_to be_empty
+ expect(rendered).to have_link('Feature Flags', href: project_feature_flags_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the feature flags page' do
+ render
+
+ expect(rendered).not_to have_link('Feature Flags')
+ end
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'does not have a Feature Flags menu item' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).not_to have_selector('.shortcuts-deployments')
+ end
end
+ end
- it 'container registry link is not visible' do
+ describe 'Environments' do
+ it 'has a link to the environments page' do
render
- expect(rendered).not_to have_link('Container Registry', href: project_container_registry_index_path(project))
+ expect(page.at_css('.shortcuts-deployments').parent.css('[aria-label="Environments"]')).not_to be_empty
+ expect(rendered).to have_link('Environments', href: project_environments_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the environments page' do
+ render
+
+ expect(rendered).not_to have_link('Environments')
+ end
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'does not have a Environments menu item' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).not_to have_selector('.shortcuts-deployments')
+ end
+ end
+ end
+
+ describe 'Releases' do
+ it 'has a link to the project releases path' do
+ render
+
+ expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-deployments-releases')
+ end
+
+ context 'when feature flag :sidebar refactor is disabled' do
+ it 'does not have a link to the project releases path' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
+
+ expect(rendered).not_to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-deployments-releases')
+ end
end
end
end
- describe 'wiki entry tab' do
- let(:can_read_wiki) { true }
+ describe 'Monitor' do
+ it 'top level navigation link is visible for user with permissions' do
+ render
- before do
- allow(view).to receive(:can?).with(user, :read_wiki, project).and_return(can_read_wiki)
+ expect(rendered).to have_link('Monitor')
end
- describe 'when wiki is enabled' do
- it 'shows the wiki tab with the wiki internal link' do
+ describe 'Metrics Dashboard' do
+ it 'has a link to the metrics dashboard page' do
render
- expect(rendered).to have_link('Wiki', href: wiki_path(project.wiki))
+ expect(rendered).to have_link('Metrics', href: project_metrics_dashboard_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the metrics page' do
+ render
+
+ expect(rendered).not_to have_link('Metrics')
+ end
end
end
- describe 'when wiki is disabled' do
- let(:can_read_wiki) { false }
+ describe 'Logs' do
+ it 'has a link to the pod logs page' do
+ render
+
+ expect(rendered).to have_link('Logs', href: project_logs_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the pod logs page' do
+ render
+
+ expect(rendered).not_to have_link('Logs')
+ end
+ end
+ end
- it 'does not show the wiki tab' do
+ describe 'Tracing' do
+ it 'has a link to the tracing page' do
render
- expect(rendered).not_to have_link('Wiki')
+ expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
+ end
+
+ context 'without project.tracing_external_url' do
+ it 'has a link to the tracing page' do
+ render
+
+ expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
+ end
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the tracing page' do
+ render
+
+ expect(rendered).not_to have_text 'Tracing'
+ end
+ end
+ end
+
+ describe 'Error Tracking' do
+ it 'has a link to the error tracking page' do
+ render
+
+ expect(rendered).to have_link('Error Tracking', href: project_error_tracking_index_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the error tracking page' do
+ render
+
+ expect(rendered).not_to have_link('Error Tracking')
+ end
+ end
+ end
+
+ describe 'Alert Management' do
+ it 'has a link to the alert management page' do
+ render
+
+ expect(rendered).to have_link('Alerts', href: project_alert_management_index_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the alert management page' do
+ render
+
+ expect(rendered).not_to have_link('Alerts')
+ end
+ end
+ end
+
+ describe 'Incidents' do
+ it 'has a link to the incidents page' do
+ render
+
+ expect(rendered).to have_link('Incidents', href: project_incidents_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the incidents page' do
+ render
+
+ expect(rendered).not_to have_link('Incidents')
+ end
+ end
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ describe 'Serverless' do
+ it 'has a link to the serverless page' do
+ render
+
+ page = Nokogiri::HTML.parse(rendered)
+
+ expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Serverless"]')).not_to be_empty
+ expect(rendered).to have_link('Serverless', href: project_serverless_functions_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the serverless page' do
+ render
+
+ expect(rendered).not_to have_link('Serverless')
+ end
+ end
+ end
+
+ describe 'Terraform' do
+ it 'has a link to the terraform page' do
+ render
+
+ page = Nokogiri::HTML.parse(rendered)
+
+ expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Terraform"]')).not_to be_empty
+ expect(rendered).to have_link('Terraform', href: project_terraform_index_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the terraform page' do
+ render
+
+ expect(rendered).not_to have_link('Terraform')
+ end
+ end
+ end
+
+ describe 'Kubernetes' do
+ it 'has a link to the kubernetes page' do
+ render
+
+ page = Nokogiri::HTML.parse(rendered)
+
+ expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Kubernetes"]')).not_to be_empty
+ expect(rendered).to have_link('Kubernetes', href: project_clusters_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the kubernetes page' do
+ render
+
+ expect(rendered).not_to have_link('Kubernetes')
+ end
+ end
+ end
+ end
+
+ describe 'Environments' do
+ let(:page) { Nokogiri::HTML.parse(rendered) }
+
+ it 'does not have a link to the environments page' do
+ render
+
+ expect(page.at_css('.shortcuts-monitor').parent.css('[aria-label="Environments"]')).to be_empty
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it 'has a link to the environments page' do
+ render
+
+ expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Environments"]')).not_to be_empty
+ expect(rendered).to have_link('Environments', href: project_environments_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the environments page' do
+ render
+
+ expect(rendered).not_to have_link('Environments')
+ end
+ end
+ end
+ end
+
+ describe 'Feature Flags' do
+ let(:page) { Nokogiri::HTML.parse(rendered) }
+
+ it 'does not have a link to the feature flags page' do
+ render
+
+ expect(page.at_css('.shortcuts-monitor').parent.css('[aria-label="Feature Flags"]')).to be_empty
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
+ end
+
+ it 'has a link to the feature flags page' do
+ render
+
+ expect(page.at_css('.shortcuts-operations').parent.css('[aria-label="Feature Flags"]')).not_to be_empty
+ expect(rendered).to have_link('Feature Flags', href: project_feature_flags_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the feature flags page' do
+ render
+
+ expect(rendered).not_to have_link('Feature Flags')
+ end
+ end
+ end
+ end
+
+ describe 'Product Analytics' do
+ it 'has a link to the product analytics page' do
+ render
+
+ expect(rendered).to have_link('Product Analytics', href: project_product_analytics_path(project))
+ end
+
+ describe 'when feature flag :product_analytics is disabled' do
+ it 'does not have a link to the feature flags page' do
+ stub_feature_flags(product_analytics: false)
+
+ render
+
+ expect(rendered).not_to have_link('Product Analytics')
+ end
end
end
end
- describe 'external wiki entry tab' do
- let(:properties) { { 'external_wiki_url' => 'https://gitlab.com' } }
- let(:service_status) { true }
+ describe 'Infrastructure' do
+ describe 'Serverless platform' do
+ it 'has a link to the serverless page' do
+ render
+
+ expect(rendered).to have_link('Serverless platform', href: project_serverless_functions_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the serverless page' do
+ render
+
+ expect(rendered).not_to have_link('Serverless platform')
+ end
+ end
+ end
+
+ describe 'Terraform' do
+ it 'has a link to the terraform page' do
+ render
+
+ expect(rendered).to have_link('Terraform', href: project_terraform_index_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the terraform page' do
+ render
+
+ expect(rendered).not_to have_link('Terraform')
+ end
+ end
+ end
+
+ describe 'Kubernetes clusters' do
+ it 'has a link to the kubernetes page' do
+ render
+
+ expect(rendered).to have_link('Kubernetes clusters', href: project_clusters_path(project))
+ end
+
+ describe 'when the user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the kubernetes page' do
+ render
+
+ expect(rendered).not_to have_link('Kubernetes clusters')
+ end
+ end
+ end
+ end
+
+ describe 'Packages and Registries' do
+ let(:registry_enabled) { true }
+ let(:packages_enabled) { true }
before do
- project.create_external_wiki_service(active: service_status, properties: properties)
- project.reload
+ stub_container_registry_config(enabled: registry_enabled)
+ stub_config(packages: { enabled: packages_enabled })
end
- context 'when it is active' do
- it 'shows the external wiki tab with the external wiki service link' do
+ it 'top level navigation link is visible and points to package registry page' do
+ render
+
+ expect(rendered).to have_link('Packages & Registries', href: project_packages_path(project))
+ end
+
+ describe 'Packages Registry' do
+ it 'shows link to package registry page' do
render
- expect(rendered).to have_link('External wiki', href: properties['external_wiki_url'])
+ expect(rendered).to have_link('Package Registry', href: project_packages_path(project))
+ end
+
+ context 'when packages config setting is not enabled' do
+ let(:packages_enabled) { false }
+
+ it 'does not show link to package registry page' do
+ render
+
+ expect(rendered).not_to have_link('Package Registry', href: project_packages_path(project))
+ end
end
end
- context 'when it is disabled' do
- let(:service_status) { false }
+ describe 'Container Registry' do
+ it 'shows link to container registry page' do
+ render
- it 'does not show the external wiki tab' do
+ expect(rendered).to have_link('Container Registry', href: project_container_registry_index_path(project))
+ end
+
+ context 'when container config setting is not enabled' do
+ let(:registry_enabled) { false }
+
+ it 'does not show link to package registry page' do
+ render
+
+ expect(rendered).not_to have_link('Container Registry', href: project_container_registry_index_path(project))
+ end
+ end
+ end
+
+ describe 'Infrastructure Registry' do
+ it 'shows link to infrastructure registry page' do
render
- expect(rendered).not_to have_link('External wiki')
+ expect(rendered).to have_link('Infrastructure Registry', href: project_infrastructure_registry_index_path(project))
+ end
+
+ context 'when feature flag :infrastructure_registry_page is disabled' do
+ it 'does not show link to package registry page' do
+ stub_feature_flags(infrastructure_registry_page: false)
+
+ render
+
+ expect(rendered).not_to have_link('Infrastructure Registry', href: project_infrastructure_registry_index_path(project))
+ end
end
end
end
- describe 'confluence tab' do
+ describe 'Analytics' do
+ it 'top level navigation link is visible points to the value stream page' do
+ render
+
+ expect(rendered).to have_link('Analytics', href: project_cycle_analytics_path(project))
+ end
+
+ describe 'CI/CD' do
+ it 'has a link to the CI/CD analytics page' do
+ render
+
+ expect(rendered).to have_link('CI/CD', href: charts_project_pipelines_path(project))
+ end
+
+ context 'when user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the CI/CD analytics page' do
+ render
+
+ expect(rendered).not_to have_link('CI/CD', href: charts_project_pipelines_path(project))
+ end
+ end
+ end
+
+ describe 'Repository' do
+ it 'has a link to the repository analytics page' do
+ render
+
+ expect(rendered).to have_link('Repository', href: charts_project_graph_path(project, 'master'))
+ end
+
+ context 'when user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the repository analytics page' do
+ render
+
+ expect(rendered).not_to have_link('Repository', href: charts_project_graph_path(project, 'master'))
+ end
+ end
+ end
+
+ describe 'Value Stream' do
+ it 'has a link to the value stream page' do
+ render
+
+ expect(rendered).to have_link('Value Stream', href: project_cycle_analytics_path(project))
+ end
+
+ context 'when user does not have access' do
+ let(:user) { nil }
+
+ it 'does not have a link to the value stream page' do
+ render
+
+ expect(rendered).not_to have_link('Value Stream', href: project_cycle_analytics_path(project))
+ end
+ end
+ end
+ end
+
+ describe 'Confluence' do
let!(:service) { create(:confluence_service, project: project, active: active) }
before do
@@ -275,11 +1003,11 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
context 'when the Confluence integration is active' do
let(:active) { true }
- it 'shows the Confluence tab' do
+ it 'shows the Confluence link' do
expect(rendered).to have_link('Confluence', href: project_wikis_confluence_path(project))
end
- it 'does not show the GitLab wiki tab' do
+ it 'does not show the GitLab wiki link' do
expect(rendered).not_to have_link('Wiki')
end
end
@@ -287,167 +1015,314 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
context 'when it is disabled' do
let(:active) { false }
- it 'does not show the Confluence tab' do
+ it 'does not show the Confluence link' do
expect(rendered).not_to have_link('Confluence')
end
- it 'shows the GitLab wiki tab' do
+ it 'shows the GitLab wiki link' do
expect(rendered).to have_link('Wiki', href: wiki_path(project.wiki))
end
end
end
- describe 'ci/cd settings tab' do
- before do
- project.update!(archived: project_archived)
+ describe 'Wiki' do
+ describe 'when wiki is enabled' do
+ it 'shows the wiki tab with the wiki internal link' do
+ render
+
+ expect(rendered).to have_link('Wiki', href: wiki_path(project.wiki))
+ end
end
- context 'when project is archived' do
- let(:project_archived) { true }
+ describe 'when wiki is disabled' do
+ let(:user) { nil }
- it 'does not show the ci/cd settings tab' do
+ it 'does not show the wiki link' do
render
- expect(rendered).not_to have_link('CI/CD', href: project_settings_ci_cd_path(project))
+ expect(rendered).not_to have_link('Wiki')
end
end
+ end
- context 'when project is active' do
- let(:project_archived) { false }
+ describe 'External Wiki' do
+ let(:properties) { { 'external_wiki_url' => 'https://gitlab.com' } }
+ let(:service_status) { true }
+
+ before do
+ project.create_external_wiki_service(active: service_status, properties: properties)
+ project.reload
+ end
- it 'shows the ci/cd settings tab' do
+ context 'when it is active' do
+ it 'shows the external wiki tab with the external wiki service link' do
render
- expect(rendered).to have_link('CI/CD', href: project_settings_ci_cd_path(project))
+ expect(rendered).to have_link('External wiki', href: properties['external_wiki_url'])
+ end
+ end
+
+ context 'when it is disabled' do
+ let(:service_status) { false }
+
+ it 'does not show the external wiki link' do
+ render
+
+ expect(rendered).not_to have_link('External wiki')
end
end
end
- describe 'pipeline editor link' do
- it 'shows the pipeline editor link' do
+ describe 'Snippets' do
+ before do
render
-
- expect(rendered).to have_link('Editor', href: project_ci_pipeline_editor_path(project))
end
- it 'does not show the pipeline editor link' do
- allow(view).to receive(:can_view_pipeline_editor?).and_return(false)
+ context 'when user can access snippets' do
+ it 'shows Snippets link' do
+ expect(rendered).to have_link('Snippets', href: project_snippets_path(project))
+ end
+ end
- render
+ context 'when user cannot access snippets' do
+ let(:user) { nil }
- expect(rendered).not_to have_link('Editor', href: project_ci_pipeline_editor_path(project))
+ it 'does not show Snippets link' do
+ expect(rendered).not_to have_link('Snippets')
+ end
end
end
- describe 'operations settings tab' do
- describe 'archive projects' do
- before do
- project.update!(archived: project_archived)
- end
+ describe 'Members' do
+ it 'does not show the Member menu item' do
+ expect(rendered).not_to have_selector('.sidebar-top-level-items > li > a[aria-label="Members"]')
+ end
- context 'when project is archived' do
- let(:project_archived) { true }
+ context 'when feature flag :sidebar_refactor is disabled' do
+ before do
+ stub_feature_flags(sidebar_refactor: false)
- it 'does not show the operations settings tab' do
- render
+ render
+ end
- expect(rendered).not_to have_link('Operations', href: project_settings_operations_path(project))
+ context 'when user can access members' do
+ it 'show Members link' do
+ expect(rendered).to have_selector('.sidebar-top-level-items > li > a[aria-label="Members"]')
+ expect(rendered).to have_link('Members', href: project_project_members_path(project))
end
end
- context 'when project is active' do
- let(:project_archived) { false }
+ context 'when user cannot access members' do
+ let(:user) { nil }
- it 'shows the operations settings tab' do
- render
-
- expect(rendered).to have_link('Operations', href: project_settings_operations_path(project))
+ it 'show Members link' do
+ expect(rendered).not_to have_link('Members')
end
end
end
+ end
- describe 'Tracing' do
- it 'is not visible to unauthorized user' do
- allow(view).to receive(:can?).and_return(false)
+ describe 'Settings' do
+ describe 'General' do
+ it 'has a link to the General settings' do
+ render
+ expect(rendered).to have_link('General', href: edit_project_path(project))
+ end
+ end
+
+ describe 'Integrations' do
+ it 'has a link to the Integrations settings' do
render
- expect(rendered).not_to have_text 'Tracing'
+ expect(rendered).to have_link('Integrations', href: project_settings_integrations_path(project))
end
+ end
- it 'links to Tracing page' do
+ describe 'WebHooks' do
+ it 'has a link to the WebHooks settings' do
render
- expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
+ expect(rendered).to have_link('Webhooks', href: project_hooks_path(project))
end
+ end
- context 'without project.tracing_external_url' do
- it 'links to Tracing page' do
+ describe 'Access Tokens' do
+ context 'self-managed instance' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ it 'has a link to the Access Tokens settings' do
render
- expect(rendered).to have_link('Tracing', href: project_tracing_path(project))
+ expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ end
+ end
+
+ context 'gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'has a link to the Access Tokens settings' do
+ render
+
+ expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
end
end
end
- describe 'Alert Management' do
- it 'shows the Alerts sidebar entry' do
+ describe 'Repository' do
+ it 'has a link to the Repository settings' do
render
- expect(rendered).to have_css('a[title="Alerts"]')
+ expect(rendered).to have_link('Repository', href: project_settings_repository_path(project))
end
end
- end
- describe 'value stream analytics entry' do
- let(:read_cycle_analytics) { true }
+ describe 'CI/CD' do
+ context 'when project is archived' do
+ before do
+ project.update!(archived: true)
+ end
- before do
- allow(view).to receive(:can?).with(user, :read_cycle_analytics, project).and_return(read_cycle_analytics)
- end
+ it 'does not have a link to the CI/CD settings' do
+ render
- describe 'when value stream analytics is enabled' do
- it 'shows the value stream analytics entry' do
- render
+ expect(rendered).not_to have_link('CI/CD', href: project_settings_ci_cd_path(project))
+ end
+ end
- expect(rendered).to have_link('Value Stream', href: project_cycle_analytics_path(project))
+ context 'when project is not archived' do
+ it 'has a link to the CI/CD settings' do
+ render
+
+ expect(rendered).to have_link('CI/CD', href: project_settings_ci_cd_path(project))
+ end
end
end
- describe 'when value stream analytics is disabled' do
- let(:read_cycle_analytics) { false }
+ describe 'Monitor' do
+ context 'when project is archived' do
+ before do
+ project.update!(archived: true)
+ end
- it 'does not show the value stream analytics entry' do
- render
+ it 'does not have a link to the Monitor settings' do
+ render
- expect(rendered).not_to have_link('Value Stream', href: project_cycle_analytics_path(project))
+ expect(rendered).not_to have_link('Monitor', href: project_settings_operations_path(project))
+ end
+ end
+
+ context 'when project is not archived active' do
+ it 'has a link to the Monitor settings' do
+ render
+
+ expect(rendered).to have_link('Monitor', href: project_settings_operations_path(project))
+ end
end
end
- end
- describe 'project access tokens' do
- context 'self-managed instance' do
+ describe 'Pages' do
before do
- allow(Gitlab).to receive(:com?).and_return(false)
+ stub_config(pages: { enabled: pages_enabled })
end
- it 'displays "Access Tokens" nav item' do
- render
+ context 'when pages are enabled' do
+ let(:pages_enabled) { true }
+
+ it 'has a link to the Pages settings' do
+ render
- expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ expect(rendered).to have_link('Pages', href: project_pages_path(project))
+ end
+ end
+
+ context 'when pages are not enabled' do
+ let(:pages_enabled) { false }
+
+ it 'does not have a link to the Pages settings' do
+ render
+
+ expect(rendered).not_to have_link('Pages', href: project_pages_path(project))
+ end
end
end
- context 'gitlab.com' do
+ describe 'Packages & Registries' do
before do
- allow(Gitlab).to receive(:com?).and_return(true)
+ stub_container_registry_config(enabled: registry_enabled)
end
- it 'displays "Access Tokens" nav item' do
- render
+ context 'when registry is enabled' do
+ let(:registry_enabled) { true }
+
+ it 'has a link to the Packages & Registries settings' do
+ render
+
+ expect(rendered).to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
+
+ context 'when feature flag :sidebar_refactor is disabled' do
+ it 'does not have a link to the Packages & Registries settings' do
+ stub_feature_flags(sidebar_refactor: false)
+
+ render
- expect(rendered).to have_link('Access Tokens', href: project_settings_access_tokens_path(project))
+ expect(rendered).not_to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
+ end
end
+
+ context 'when registry is not enabled' do
+ let(:registry_enabled) { false }
+
+ it 'does not have a link to the Packages & Registries settings' do
+ render
+
+ expect(rendered).not_to have_link('Packages & Registries', href: project_settings_packages_and_registries_path(project))
+ end
+ end
+ end
+ end
+
+ describe 'Hidden menus' do
+ it 'has a link to the Activity page' do
+ render
+
+ expect(rendered).to have_link('Activity', href: activity_project_path(project), class: 'shortcuts-project-activity', visible: false)
+ end
+
+ it 'has a link to the Graph page' do
+ render
+
+ expect(rendered).to have_link('Graph', href: project_network_path(project, current_ref), class: 'shortcuts-network', visible: false)
+ end
+
+ it 'has a link to the New Issue page' do
+ render
+
+ expect(rendered).to have_link('Create a new issue', href: new_project_issue_path(project), class: 'shortcuts-new-issue', visible: false)
+ end
+
+ it 'has a link to the Jobs page' do
+ render
+
+ expect(rendered).to have_link('Jobs', href: project_jobs_path(project), class: 'shortcuts-builds', visible: false)
+ end
+
+ it 'has a link to the Commits page' do
+ render
+
+ expect(rendered).to have_link('Commits', href: project_commits_path(project), class: 'shortcuts-commits', visible: false)
+ end
+
+ it 'has a link to the Issue Boards page' do
+ render
+
+ expect(rendered).to have_link('Issue Boards', href: project_boards_path(project), class: 'shortcuts-issue-boards', visible: false)
end
end
diff --git a/spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb
deleted file mode 100644
index d3fb35bff6d..00000000000
--- a/spec/views/layouts/nav/sidebar/_project_security_link.html.haml_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'layouts/nav/sidebar/_project_security_link' do
- let_it_be_with_reload(:project) { create(:project) }
- context 'on security configuration' do
- before do
- assign(:project, project)
- allow(controller).to receive(:controller_name).and_return('configuration')
- allow(controller).to receive(:controller_path).and_return('projects/security/configuration')
- allow(controller).to receive(:action_name).and_return('show')
- allow(view).to receive(:any_project_nav_tab?).and_return(true)
- allow(view).to receive(:project_nav_tab?).and_return(true)
- end
-
- it 'activates Security & Compliance tab' do
- render
-
- expect(rendered).to have_css('li.active', text: 'Security & Compliance')
- end
-
- it 'activates Configuration sub tab' do
- render
-
- expect(rendered).to have_css('.sidebar-sub-level-items > li.active', text: 'Configuration')
- end
- end
-end
diff --git a/spec/views/notify/change_in_merge_request_draft_status_email.html.haml_spec.rb b/spec/views/notify/change_in_merge_request_draft_status_email.html.haml_spec.rb
index 6c25eba03b9..6d56145144f 100644
--- a/spec/views/notify/change_in_merge_request_draft_status_email.html.haml_spec.rb
+++ b/spec/views/notify/change_in_merge_request_draft_status_email.html.haml_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'notify/change_in_merge_request_draft_status_email.html.haml' do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request) }
+ let(:merge_request_link) { merge_request_url(merge_request) }
before do
assign(:updated_by_user, user)
@@ -15,5 +16,7 @@ RSpec.describe 'notify/change_in_merge_request_draft_status_email.html.haml' do
render
expect(rendered).to have_content("#{user.name} changed the draft status of merge request #{merge_request.to_reference}")
+ expect(rendered).to have_link(user.name, href: user_url(user))
+ expect(rendered).to have_link(merge_request.to_reference, href: merge_request_link)
end
end
diff --git a/spec/views/profiles/keys/_form.html.haml_spec.rb b/spec/views/profiles/keys/_form.html.haml_spec.rb
index 62bb271bd9c..0f4d7ecc699 100644
--- a/spec/views/profiles/keys/_form.html.haml_spec.rb
+++ b/spec/views/profiles/keys/_form.html.haml_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe 'profiles/keys/_form.html.haml' do
it 'has the title field', :aggregate_failures do
expect(rendered).to have_field('Title', type: 'text', placeholder: 'e.g. My MacBook key')
- expect(rendered).to have_text('Give your individual key a title.')
+ expect(rendered).to have_text('Give your individual key a title. This will be publicly visible.')
end
it 'has the expires at field', :aggregate_failures do
diff --git a/spec/views/projects/pipelines/new.html.haml_spec.rb b/spec/views/projects/pipelines/new.html.haml_spec.rb
deleted file mode 100644
index 9c5e46b6a17..00000000000
--- a/spec/views/projects/pipelines/new.html.haml_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/pipelines/new' do
- include Devise::Test::ControllerHelpers
- let_it_be(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- before do
- assign(:project, project)
- assign(:pipeline, pipeline)
-
- stub_feature_flags(new_pipeline_form: false)
- end
-
- describe 'warning messages' do
- let(:warning_messages) do
- [double(content: 'warning 1'), double(content: 'warning 2')]
- end
-
- before do
- allow(pipeline).to receive(:warning_messages).and_return(warning_messages)
- end
-
- it 'displays the warnings' do
- render
-
- expect(rendered).to have_css('div.bs-callout-warning')
- expect(rendered).to have_content('warning 1')
- expect(rendered).to have_content('warning 2')
- end
- end
-end
diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb
index b998023b40e..5b5c05527de 100644
--- a/spec/views/projects/pipelines/show.html.haml_spec.rb
+++ b/spec/views/projects/pipelines/show.html.haml_spec.rb
@@ -12,8 +12,6 @@ RSpec.describe 'projects/pipelines/show' do
before do
assign(:project, project)
assign(:pipeline, presented_pipeline)
-
- stub_feature_flags(new_pipeline_form: false)
end
context 'when pipeline has errors' do
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index e6d53c526e2..ab868eb78b8 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe 'projects/settings/operations/show' do
render
expect(rendered).to have_content _('Error tracking')
- expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
+ expect(rendered).to have_content _('Link Sentry to GitLab to discover and view the errors your application generates.')
end
end
end
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index 18b42f98e0b..2702ab9e2a9 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -20,12 +20,6 @@ RSpec.describe 'projects/tags/index.html.haml' do
allow(view).to receive(:current_user).and_return(project.namespace.owner)
end
- it 'defaults sort dropdown toggle to last updated' do
- stub_feature_flags(gldropdown_tags: false)
- render
- expect(rendered).to have_button('Last updated')
- end
-
it 'renders links to the Releases page for tags associated with a release' do
render
expect(rendered).to have_link(release.name, href: project_releases_path(project, anchor: release.tag))
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index 639759ae095..ecdef7918de 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'registrations/welcome/show' do
let(:is_gitlab_com) { false }
- let_it_be(:user) { User.new }
+ let_it_be(:user) { create(:user) }
before do
allow(view).to receive(:current_user).and_return(user)
diff --git a/spec/views/shared/nav/_sidebar.html.haml_spec.rb b/spec/views/shared/nav/_sidebar.html.haml_spec.rb
index 268d2952683..cf9452ba68c 100644
--- a/spec/views/shared/nav/_sidebar.html.haml_spec.rb
+++ b/spec/views/shared/nav/_sidebar.html.haml_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'shared/nav/_sidebar.html.haml' do
let(:project) { build(:project, id: non_existing_record_id) }
- let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project)}
+ let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
let(:sidebar) { Sidebars::Projects::Panel.new(context) }
before do
@@ -32,7 +32,7 @@ RSpec.describe 'shared/nav/_sidebar.html.haml' do
context 'when sidebar has a custom scope menu partial defined' do
it 'renders the custom partial' do
allow(sidebar).to receive(:render_raw_scope_menu_partial).and_return(scope_menu_view)
- allow(sidebar).to receive(:scope_menu).and_return(nil)
+ allow(view).to receive(:scope_menu).and_return(nil)
stub_template(scope_menu_partial => content)
render
diff --git a/spec/views/shared/runners/show.html.haml_spec.rb b/spec/views/shared/runners/_runner_details.html.haml_spec.rb
index 91a6a31daae..f9f93c8160b 100644
--- a/spec/views/shared/runners/show.html.haml_spec.rb
+++ b/spec/views/shared/runners/_runner_details.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'shared/runners/show.html.haml' do
+RSpec.describe 'shared/runners/_runner_details.html.haml' do
include PageLayoutHelper
let(:runner) do
@@ -14,7 +14,7 @@ RSpec.describe 'shared/runners/show.html.haml' do
end
before do
- assign(:runner, runner)
+ allow(view).to receive(:runner) { runner }
end
subject do
@@ -24,7 +24,7 @@ RSpec.describe 'shared/runners/show.html.haml' do
describe 'Page title' do
before do
- expect_any_instance_of(PageLayoutHelper).to receive(:page_title).with("#{runner.description} ##{runner.id}", 'Runners')
+ expect(view).to receive(:page_title).with("##{runner.id} (#{runner.short_sha})")
end
it 'sets proper page title' do
@@ -147,7 +147,7 @@ RSpec.describe 'shared/runners/show.html.haml' do
context 'when runner have already contacted' do
let(:runner) { create(:ci_runner, contacted_at: DateTime.now - 6.days) }
- let(:expected_contacted_at) { I18n.localize(runner.contacted_at, format: "%b %d, %Y") }
+ let(:expected_contacted_at) { I18n.l(runner.contacted_at, format: "%b %d, %Y") }
it { is_expected.to have_content("Last contact #{expected_contacted_at}") }
end
diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
index 0501fc3b8cf..832d5afd957 100644
--- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker do
it_behaves_like 'worker with data consistency',
described_class,
- feature_flag: :periodic_project_authorization_update_via_replica,
+ feature_flag: :delayed_consistency_for_user_refresh_over_range_worker,
data_consistency: :delayed
describe '#perform' do
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 5aca5d68677..3434980341b 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -22,7 +22,6 @@ RSpec.describe BuildFinishedWorker do
end
expect(BuildHooksWorker).to receive(:perform_async)
- expect(ExpirePipelineCacheWorker).to receive(:perform_async)
expect(ChatNotificationWorker).not_to receive(:perform_async)
expect(ArchiveTraceWorker).to receive(:perform_in)
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index 7e469958a84..8395d8fb0e7 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -23,6 +23,24 @@ RSpec.describe BuildHooksWorker do
end
end
+ describe '.perform_async' do
+ context 'when delayed_perform_for_build_hooks_worker feature flag is disabled' do
+ before do
+ stub_feature_flags(delayed_perform_for_build_hooks_worker: false)
+ end
+
+ it 'does not call perform_in' do
+ expect(described_class).not_to receive(:perform_in)
+ end
+ end
+
+ it 'delays scheduling a job by calling perform_in' do
+ expect(described_class).to receive(:perform_in).with(described_class::DATA_CONSISTENCY_DELAY.second, 123)
+
+ described_class.perform_async(123)
+ end
+ end
+
it_behaves_like 'worker with data consistency',
described_class,
feature_flag: :load_balancing_for_build_hooks_worker,
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 5964ec45563..9119394f250 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe BulkImportWorker do
end
context 'when there are created entities to process' do
- it 'marks a batch of entities as started, enqueues BulkImports::EntityWorker and reenqueues' do
+ it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
bulk_import = create(:bulk_import, :created)
@@ -78,6 +78,7 @@ RSpec.describe BulkImportWorker do
expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect(BulkImports::EntityWorker).to receive(:perform_async)
+ expect(BulkImports::ExportRequestWorker).to receive(:perform_async)
subject.perform(bulk_import.id)
diff --git a/spec/workers/bulk_imports/export_request_worker_spec.rb b/spec/workers/bulk_imports/export_request_worker_spec.rb
new file mode 100644
index 00000000000..f7838279212
--- /dev/null
+++ b/spec/workers/bulk_imports/export_request_worker_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::ExportRequestWorker do
+ let_it_be(:bulk_import) { create(:bulk_import) }
+ let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import) }
+ let_it_be(:entity) { create(:bulk_import_entity, source_full_path: 'foo/bar', bulk_import: bulk_import) }
+
+ let(:response_double) { double(code: 200, success?: true, parsed_response: {}) }
+ let(:job_args) { [entity.id] }
+
+ describe '#perform' do
+ before do
+ allow(Gitlab::HTTP).to receive(:post).and_return(response_double)
+ end
+
+ include_examples 'an idempotent worker' do
+ it 'requests relations export' do
+ expected = "/groups/foo%2Fbar/export_relations"
+
+ expect_next_instance_of(BulkImports::Clients::Http) do |client|
+ expect(client).to receive(:post).with(expected).twice
+ end
+
+ perform_multiple(job_args)
+ end
+ end
+ end
+end
diff --git a/spec/workers/bulk_imports/relation_export_worker_spec.rb b/spec/workers/bulk_imports/relation_export_worker_spec.rb
new file mode 100644
index 00000000000..63f1992d186
--- /dev/null
+++ b/spec/workers/bulk_imports/relation_export_worker_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::RelationExportWorker do
+ let_it_be(:jid) { 'jid' }
+ let_it_be(:relation) { 'labels' }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:job_args) { [user.id, group.id, group.class.name, relation] }
+
+ describe '#perform' do
+ include_examples 'an idempotent worker' do
+ context 'when export record does not exist' do
+ let(:another_group) { create(:group) }
+ let(:job_args) { [user.id, another_group.id, another_group.class.name, relation] }
+
+ it 'creates export record' do
+ another_group.add_owner(user)
+
+ expect { perform_multiple(job_args) }
+ .to change { another_group.bulk_import_exports.count }
+ .from(0)
+ .to(1)
+ end
+ end
+
+ it 'executes RelationExportService' do
+ group.add_owner(user)
+
+ service = instance_double(BulkImports::RelationExportService)
+
+ expect(BulkImports::RelationExportService)
+ .to receive(:new)
+ .with(user, group, relation, anything)
+ .twice
+ .and_return(service)
+ expect(service)
+ .to receive(:execute)
+ .twice
+
+ perform_multiple(job_args)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb b/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
index 116e6878281..372b0de1b54 100644
--- a/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
+++ b/spec/workers/ci/create_cross_project_pipeline_worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::CreateCrossProjectPipelineWorker do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
let(:bridge) { create(:ci_bridge, user: user, pipeline: pipeline) }
let(:service) { double('pipeline creation service') }
diff --git a/spec/workers/ci/delete_unit_tests_worker_spec.rb b/spec/workers/ci/delete_unit_tests_worker_spec.rb
new file mode 100644
index 00000000000..ff2575b19c1
--- /dev/null
+++ b/spec/workers/ci/delete_unit_tests_worker_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DeleteUnitTestsWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'executes a service' do
+ expect_next_instance_of(Ci::DeleteUnitTestsService) do |instance|
+ expect(instance).to receive(:execute)
+ end
+
+ worker.perform
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let!(:unit_test_1) { create(:ci_unit_test) }
+ let!(:unit_test_2) { create(:ci_unit_test) }
+ let!(:unit_test_1_recent_failure) { create(:ci_unit_test_failure, unit_test: unit_test_1) }
+ let!(:unit_test_2_old_failure) { create(:ci_unit_test_failure, unit_test: unit_test_2, failed_at: 15.days.ago) }
+
+ it 'only deletes old unit tests and their failures' do
+ subject
+
+ expect(unit_test_1.reload).to be_persisted
+ expect(unit_test_1_recent_failure.reload).to be_persisted
+ expect(Ci::UnitTest.find_by(id: unit_test_2.id)).to be_nil
+ expect(Ci::UnitTestFailure.find_by(id: unit_test_2_old_failure.id)).to be_nil
+ end
+ end
+end
diff --git a/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
index 4690c73d121..e5de0ba0143 100644
--- a/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
+++ b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Ci::MergeRequests::AddTodoWhenBuildFailsWorker do
include_examples 'an idempotent worker' do
it 'executes todo service' do
service = double
- expect(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).with(project, nil).and_return(service).twice
+ expect(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).with(project: project).and_return(service).twice
expect(service).to receive(:execute).with(job).twice
perform_twice
diff --git a/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
index be351032b58..5096691270a 100644
--- a/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/create_quality_report_worker_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe ::Ci::PipelineArtifacts::CreateQualityReportWorker do
it_behaves_like 'an idempotent worker' do
let(:job_args) { pipeline_id }
- it 'creates a pipeline artifact' do
- expect { subject }.to change { pipeline.pipeline_artifacts.count }.by(1)
+ it 'does not create another pipeline artifact if already has one' do
+ expect { subject }.not_to change { pipeline.pipeline_artifacts.count }
end
end
end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
index ad9c08d02cb..274f848ad88 100644
--- a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
describe '#perform' do
let_it_be(:pipeline_artifact) do
- create(:ci_pipeline_artifact, :with_coverage_report, expire_at: 1.week.ago)
+ create(:ci_pipeline_artifact, :with_coverage_report, :unlocked, expire_at: 1.week.ago)
end
it 'executes a service' do
diff --git a/spec/workers/ci/retry_pipeline_worker_spec.rb b/spec/workers/ci/retry_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..c7600a24280
--- /dev/null
+++ b/spec/workers/ci/retry_pipeline_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RetryPipelineWorker do
+ describe '#perform' do
+ subject(:perform) { described_class.new.perform(pipeline_id, user_id) }
+
+ let(:pipeline) { create(:ci_pipeline) }
+
+ context 'when pipeline exists' do
+ let(:pipeline_id) { pipeline.id }
+
+ context 'when user exists' do
+ let(:user) { create(:user) }
+ let(:user_id) { user.id }
+
+ before do
+ pipeline.project.add_maintainer(user)
+ end
+
+ it 'retries the pipeline' do
+ expect(::Ci::Pipeline).to receive(:find_by_id).with(pipeline.id).and_return(pipeline)
+ expect(pipeline).to receive(:retry_failed).with(having_attributes(id: user_id))
+
+ perform
+ end
+ end
+
+ context 'when user does not exist' do
+ let(:user_id) { 1234 }
+
+ it 'does not retry the pipeline' do
+ expect(::Ci::Pipeline).to receive(:find_by_id).with(pipeline_id).and_return(pipeline)
+ expect(pipeline).not_to receive(:retry_failed).with(having_attributes(id: user_id))
+
+ perform
+ end
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ let(:pipeline_id) { 1234 }
+ let(:user_id) { 1234 }
+
+ it 'returns nil' do
+ expect(perform).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/workers/cluster_update_app_worker_spec.rb b/spec/workers/cluster_update_app_worker_spec.rb
index 8b8c1c82099..8f61ee17162 100644
--- a/spec/workers/cluster_update_app_worker_spec.rb
+++ b/spec/workers/cluster_update_app_worker_spec.rb
@@ -46,8 +46,19 @@ RSpec.describe ClusterUpdateAppWorker do
subject.perform(application.name, application.id, project.id, Time.current)
end
+ context 'application is externally installed' do
+ it 'does not execute PrometheusUpdateService' do
+ application = create(:clusters_applications_prometheus, :externally_installed)
+
+ expect(prometheus_update_service).not_to receive(:execute)
+
+ subject.perform(application.name, application.id, project.id, Time.current)
+ end
+ end
+
context 'with exclusive lease' do
let_it_be(:user) { create(:user) }
+
let(:application) { create(:clusters_applications_prometheus, :installed) }
let(:lease_key) { "#{described_class.name.underscore}-#{application.id}" }
diff --git a/spec/workers/concerns/application_worker_spec.rb b/spec/workers/concerns/application_worker_spec.rb
index 07e11f014c3..5c1a1d3ae8f 100644
--- a/spec/workers/concerns/application_worker_spec.rb
+++ b/spec/workers/concerns/application_worker_spec.rb
@@ -3,7 +3,14 @@
require 'spec_helper'
RSpec.describe ApplicationWorker do
- let_it_be(:worker) do
+ # We depend on the lazy-load characteristic of rspec. If the worker is loaded
+ # before setting up, it's likely to go wrong. Consider this catcha:
+ # before do
+ # allow(router).to receive(:route).with(worker).and_return('queue_1')
+ # end
+ # As worker is triggered, it includes ApplicationWorker, and the router is
+ # called before it is stubbed. That makes the stubbing useless.
+ let(:worker) do
Class.new do
def self.name
'Gitlab::Foo::Bar::DummyWorker'
@@ -14,10 +21,77 @@ RSpec.describe ApplicationWorker do
end
let(:instance) { worker.new }
+ let(:router) { double(:router) }
- describe 'Sidekiq options' do
- it 'sets the queue name based on the class name' do
+ before do
+ allow(::Gitlab::SidekiqConfig::WorkerRouter).to receive(:global).and_return(router)
+ allow(router).to receive(:route).and_return('foo_bar_dummy')
+ end
+
+ describe 'Sidekiq attributes' do
+ it 'sets the queue name based on the output of the router' do
expect(worker.sidekiq_options['queue']).to eq('foo_bar_dummy')
+ expect(router).to have_received(:route).with(worker).at_least(:once)
+ end
+
+ context 'when a worker attribute is updated' do
+ before do
+ counter = 0
+ allow(router).to receive(:route) do
+ counter += 1
+ "queue_#{counter}"
+ end
+ end
+
+ it 'updates the queue name afterward' do
+ expect(worker.sidekiq_options['queue']).to eq('queue_1')
+
+ worker.feature_category :pages
+ expect(worker.sidekiq_options['queue']).to eq('queue_2')
+
+ worker.feature_category_not_owned!
+ expect(worker.sidekiq_options['queue']).to eq('queue_3')
+
+ worker.urgency :high
+ expect(worker.sidekiq_options['queue']).to eq('queue_4')
+
+ worker.worker_has_external_dependencies!
+ expect(worker.sidekiq_options['queue']).to eq('queue_5')
+
+ worker.worker_resource_boundary :cpu
+ expect(worker.sidekiq_options['queue']).to eq('queue_6')
+
+ worker.idempotent!
+ expect(worker.sidekiq_options['queue']).to eq('queue_7')
+
+ worker.weight 3
+ expect(worker.sidekiq_options['queue']).to eq('queue_8')
+
+ worker.tags :hello
+ expect(worker.sidekiq_options['queue']).to eq('queue_9')
+
+ worker.big_payload!
+ expect(worker.sidekiq_options['queue']).to eq('queue_10')
+
+ expect(router).to have_received(:route).with(worker).at_least(10).times
+ end
+ end
+
+ context 'when the worker is inherited' do
+ let(:sub_worker) { Class.new(worker) }
+
+ before do
+ allow(router).to receive(:route).and_return('queue_1')
+ worker # Force loading worker 1 to update its queue
+
+ allow(router).to receive(:route).and_return('queue_2')
+ end
+
+ it 'sets the queue name for the inherited worker' do
+ expect(sub_worker.sidekiq_options['queue']).to eq('queue_2')
+
+ expect(router).to have_received(:route).with(sub_worker).at_least(:once)
+ end
end
end
@@ -74,11 +148,24 @@ RSpec.describe ApplicationWorker do
end
describe '.queue_namespace' do
- it 'sets the queue name based on the class name' do
+ before do
+ allow(router).to receive(:route).and_return('foo_bar_dummy', 'some_namespace:foo_bar_dummy')
+ end
+
+ it 'updates the queue name from the router again' do
+ expect(worker.queue).to eq('foo_bar_dummy')
+
worker.queue_namespace :some_namespace
expect(worker.queue).to eq('some_namespace:foo_bar_dummy')
end
+
+ it 'updates the queue_namespace options of the worker' do
+ worker.queue_namespace :some_namespace
+
+ expect(worker.queue_namespace).to eql('some_namespace')
+ expect(worker.sidekiq_options['queue_namespace']).to be(:some_namespace)
+ end
end
describe '.queue' do
diff --git a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
index 75f2c7922de..85e1721461f 100644
--- a/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
+++ b/spec/workers/concerns/gitlab/github_import/object_importer_spec.rb
@@ -18,39 +18,49 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
def counter_description
'This is a counter'
end
+
+ def representation_class
+ MockRepresantation
+ end
end.new
end
+ before do
+ stub_const('MockRepresantation', Class.new do
+ include Gitlab::GithubImport::Representation::ToHash
+ include Gitlab::GithubImport::Representation::ExposeAttribute
+
+ def self.from_json_hash(raw_hash)
+ new(Gitlab::GithubImport::Representation.symbolize_hash(raw_hash))
+ end
+
+ attr_reader :attributes
+
+ def initialize(attributes)
+ @attributes = attributes
+ end
+ end)
+ end
+
describe '#import' do
- let(:representation_class) { double(:representation_class) }
let(:importer_class) { double(:importer_class, name: 'klass_name') }
let(:importer_instance) { double(:importer_instance) }
- let(:representation) { double(:representation) }
let(:project) { double(:project, full_path: 'foo/bar', id: 1) }
let(:client) { double(:client) }
before do
expect(worker)
- .to receive(:representation_class)
- .and_return(representation_class)
-
- expect(worker)
.to receive(:importer_class)
.at_least(:once)
.and_return(importer_class)
+ end
- expect(representation_class)
- .to receive(:from_json_hash)
- .with(an_instance_of(Hash))
- .and_return(representation)
-
+ it 'imports the object' do
expect(importer_class)
.to receive(:new)
- .with(representation, project, client)
+ .with(instance_of(MockRepresantation), project, client)
.and_return(importer_instance)
- end
- it 'imports the object' do
expect(importer_instance)
.to receive(:execute)
@@ -62,6 +72,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:info)
.with(
+ github_id: 1,
message: 'starting importer',
import_source: :github,
project_id: 1,
@@ -70,6 +81,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:info)
.with(
+ github_id: 1,
message: 'importer finished',
import_source: :github,
project_id: 1,
@@ -77,10 +89,15 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
)
end
- worker.import(project, client, { 'number' => 10 })
+ worker.import(project, client, { 'number' => 10, 'github_id' => 1 })
end
it 'logs error when the import fails' do
+ expect(importer_class)
+ .to receive(:new)
+ .with(instance_of(MockRepresantation), project, client)
+ .and_return(importer_instance)
+
exception = StandardError.new('some error')
expect(importer_instance)
.to receive(:execute)
@@ -90,6 +107,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:info)
.with(
+ github_id: 1,
message: 'starting importer',
import_source: :github,
project_id: project.id,
@@ -98,20 +116,64 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter do
expect(logger)
.to receive(:error)
.with(
+ github_id: 1,
message: 'importer failed',
import_source: :github,
project_id: project.id,
importer: 'klass_name',
- 'error.message': 'some error'
+ 'error.message': 'some error',
+ 'github.data': {
+ 'github_id' => 1,
+ 'number' => 10
+ }
)
end
expect(Gitlab::ErrorTracking)
.to receive(:track_and_raise_exception)
- .with(exception, import_source: :github, project_id: 1, importer: 'klass_name')
- .and_call_original
+ .with(
+ exception,
+ import_source: :github,
+ github_id: 1,
+ project_id: 1,
+ importer: 'klass_name'
+ ).and_call_original
+
+ expect { worker.import(project, client, { 'number' => 10, 'github_id' => 1 }) }
+ .to raise_error(exception)
+ end
+
+ it 'logs error when representation does not have a github_id' do
+ expect(importer_class).not_to receive(:new)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ github_id: nil,
+ message: 'importer failed',
+ import_source: :github,
+ project_id: project.id,
+ importer: 'klass_name',
+ 'error.message': 'key not found: :github_id',
+ 'github.data': {
+ 'number' => 10
+ }
+ )
+ end
- expect { worker.import(project, client, { 'number' => 10 }) }.to raise_error(exception)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_exception)
+ .with(
+ an_instance_of(KeyError),
+ import_source: :github,
+ github_id: nil,
+ project_id: 1,
+ importer: 'klass_name'
+ ).and_call_original
+
+ expect { worker.import(project, client, { 'number' => 10 }) }
+ .to raise_error(KeyError, 'key not found: :github_id')
end
end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
index 2c79f347903..f141a1ad7ad 100644
--- a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -7,30 +7,30 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
described_class.new('namespace')
end
+ let(:max_jids) { 10 }
+
describe '#register' do
it 'adds jid to the set' do
- job_tracker.register('a-job-id')
-
+ expect(job_tracker.register('a-job-id', max_jids)). to be true
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
- it 'updates the counter' do
- expect { job_tracker.register('a-job-id') }
- .to change { job_tracker.count }
- .from(0)
- .to(1)
- end
-
- it 'does it in only one Redis call' do
- expect(job_tracker).to receive(:with_redis).once.and_call_original
+ it 'returns false if the jid was not added' do
+ max_jids = 2
+ %w[jid1 jid2].each do |jid|
+ expect(job_tracker.register(jid, max_jids)).to be true
+ end
- job_tracker.register('a-job-id')
+ expect(job_tracker.register('jid3', max_jids)).to be false
+ expect(job_tracker.running_jids).to contain_exactly(*%w[jid1 jid2])
end
end
describe '#remove' do
before do
- job_tracker.register(%w[a-job-id other-job-id])
+ %w[a-job-id other-job-id].each do |jid|
+ job_tracker.register(jid, max_jids)
+ end
end
it 'removes jid from the set' do
@@ -38,24 +38,11 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
-
- it 'updates the counter' do
- expect { job_tracker.remove('other-job-id') }
- .to change { job_tracker.count }
- .from(2)
- .to(1)
- end
-
- it 'does it in only one Redis call' do
- expect(job_tracker).to receive(:with_redis).once.and_call_original
-
- job_tracker.remove('other-job-id')
- end
end
describe '#clean_up' do
before do
- job_tracker.register('a-job-id')
+ job_tracker.register('a-job-id', max_jids)
end
context 'with running jobs' do
@@ -83,13 +70,6 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
.to change { job_tracker.running_jids.include?('a-job-id') }
end
- it 'updates the counter' do
- expect { job_tracker.clean_up }
- .to change { job_tracker.count }
- .from(1)
- .to(0)
- end
-
it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do
expect(job_tracker).to receive(:with_redis).twice.and_call_original
diff --git a/spec/workers/concerns/limited_capacity/worker_spec.rb b/spec/workers/concerns/limited_capacity/worker_spec.rb
index 2c33c8666ec..790b5c3544d 100644
--- a/spec/workers/concerns/limited_capacity/worker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/worker_spec.rb
@@ -44,40 +44,22 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
describe '.perform_with_capacity' do
subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) }
+ let(:max_running_jobs) { 3 }
+
before do
expect_next_instance_of(worker_class) do |instance|
expect(instance).to receive(:remove_failed_jobs)
- expect(instance).to receive(:report_prometheus_metrics)
-
- allow(instance).to receive(:remaining_work_count).and_return(remaining_work_count)
- allow(instance).to receive(:remaining_capacity).and_return(remaining_capacity)
- end
- end
-
- context 'when capacity is larger than work' do
- let(:remaining_work_count) { 2 }
- let(:remaining_capacity) { 3 }
- it 'enqueues jobs for remaining work' do
- expect(worker_class)
- .to receive(:bulk_perform_async)
- .with([[:arg], [:arg]])
-
- perform_with_capacity
+ allow(instance).to receive(:max_running_jobs).and_return(max_running_jobs)
end
end
- context 'when capacity is lower than work' do
- let(:remaining_work_count) { 5 }
- let(:remaining_capacity) { 3 }
-
- it 'enqueues jobs for remaining work' do
- expect(worker_class)
- .to receive(:bulk_perform_async)
- .with([[:arg], [:arg], [:arg]])
+ it 'enqueues jobs' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg], [:arg]])
- perform_with_capacity
- end
+ perform_with_capacity
end
end
@@ -104,34 +86,27 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform
end
- it 'registers itself in the running set' do
+ it 'reports prometheus metrics' do
allow(worker).to receive(:perform_work)
- expect(job_tracker).to receive(:register).with('my-jid')
+ expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
+ expect(worker).to receive(:report_running_jobs_metrics).twice.and_call_original
perform
end
- it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove).with('my-jid')
-
+ it 'updates the running set' do
+ expect(job_tracker.running_jids).to be_empty
allow(worker).to receive(:perform_work)
perform
- end
- it 'reports prometheus metrics' do
- allow(worker).to receive(:perform_work)
- expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
- expect(worker).to receive(:report_running_jobs_metrics).twice.and_call_original
-
- perform
+ expect(job_tracker.running_jids).to be_empty
end
end
context 'with capacity and without work' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
- allow(worker).to receive(:running_jobs_count).and_return(0)
allow(worker).to receive(:remaining_work_count).and_return(0)
allow(worker).to receive(:perform_work)
end
@@ -146,7 +121,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
context 'without capacity' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
- allow(worker).to receive(:running_jobs_count).and_return(15)
+ allow(job_tracker).to receive(:register).and_return(false)
allow(worker).to receive(:remaining_work_count).and_return(10)
end
@@ -161,27 +136,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform
end
-
- it 'does not register in the running set' do
- expect(job_tracker).not_to receive(:register)
-
- perform
- end
-
- it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove).with('my-jid')
-
- perform
- end
-
- it 'reports prometheus metrics' do
- expect(worker).to receive(:report_prometheus_metrics)
-
- perform
- end
end
context 'when perform_work fails' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(job_tracker).to receive(:register).and_return(true)
+ end
+
it 'does not re-enqueue itself' do
expect(worker).not_to receive(:re_enqueue)
@@ -189,7 +151,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end
it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove)
+ expect(job_tracker).to receive(:remove).with('my-jid')
expect { perform }.to raise_error(NotImplementedError)
end
@@ -202,65 +164,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end
end
- describe '#remaining_capacity' do
- subject(:remaining_capacity) { worker.remaining_capacity }
-
- before do
- expect(worker).to receive(:max_running_jobs).and_return(max_capacity)
- end
-
- context 'when changing the capacity to a lower value' do
- let(:max_capacity) { -1 }
-
- it { expect(remaining_capacity).to eq(0) }
- end
-
- context 'when registering new jobs' do
- let(:max_capacity) { 2 }
-
- before do
- job_tracker.register('a-job-id')
- end
-
- it { expect(remaining_capacity).to eq(1) }
- end
-
- context 'with jobs in the queue' do
- let(:max_capacity) { 2 }
-
- before do
- expect(worker_class).to receive(:queue_size).and_return(1)
- end
-
- it { expect(remaining_capacity).to eq(1) }
- end
-
- context 'with both running jobs and queued jobs' do
- let(:max_capacity) { 10 }
-
- before do
- expect(worker_class).to receive(:queue_size).and_return(5)
- expect(worker).to receive(:running_jobs_count).and_return(3)
- end
-
- it { expect(remaining_capacity).to eq(2) }
- end
- end
-
describe '#remove_failed_jobs' do
subject(:remove_failed_jobs) { worker.remove_failed_jobs }
- before do
- job_tracker.register('a-job-id')
- allow(worker).to receive(:max_running_jobs).and_return(2)
+ it 'removes failed jobs' do
+ job_tracker.register('a-job-id', 10)
expect(job_tracker).to receive(:clean_up).and_call_original
- end
-
- context 'with failed jobs' do
- it 'update the available capacity' do
- expect { remove_failed_jobs }.to change { worker.remaining_capacity }.by(1)
- end
+ expect { remove_failed_jobs }.to change { job_tracker.running_jids.size }.by(-1)
end
end
diff --git a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
index eb4faaed769..04f568515ed 100644
--- a/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/container_expiration_policies/cleanup_container_repository_worker_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
using RSpec::Parameterized::TableSyntax
- let_it_be(:repository, reload: true) { create(:container_repository, :cleanup_scheduled) }
- let_it_be(:project) { repository.project }
- let_it_be(:policy) { project.container_expiration_policy }
- let_it_be(:other_repository) { create(:container_repository) }
+ let_it_be(:repository, refind: true) { create(:container_repository, :cleanup_scheduled, expiration_policy_started_at: 1.month.ago) }
+ let_it_be(:other_repository, refind: true) { create(:container_repository, expiration_policy_started_at: 15.days.ago) }
+ let(:project) { repository.project }
+ let(:policy) { project.container_expiration_policy }
let(:worker) { described_class.new }
describe '#perform_work' do
@@ -19,7 +19,7 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
policy.update_column(:enabled, true)
end
- RSpec.shared_examples 'handling all repository conditions' do
+ shared_examples 'handling all repository conditions' do
it 'sends the repository for cleaning' do
service_response = cleanup_service_response(repository: repository)
expect(ContainerExpirationPolicies::CleanupService)
@@ -72,11 +72,21 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
end
+ context 'with an erroneous cleanup' do
+ it 'logs an error' do
+ service_response = ServiceResponse.error(message: 'cleanup in an error')
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response, cleanup_status: :error)
+
+ subject
+ end
+ end
+
context 'with policy running shortly' do
before do
- repository.project
- .container_expiration_policy
- .update_column(:next_run_at, 1.minute.from_now)
+ repository.cleanup_unfinished! if loopless_enabled?
+ policy.update_column(:next_run_at, 1.minute.from_now)
end
it 'skips the repository' do
@@ -84,118 +94,385 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(worker).to receive(:log_extra_metadata_on_done).with(:container_repository_id, repository.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:project_id, repository.project.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_status, :skipped)
-
expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
+
expect(repository.reload.cleanup_unscheduled?).to be_truthy
end
end
context 'with disabled policy' do
before do
- repository.project
- .container_expiration_policy
- .disable!
+ policy.disable!
end
it 'skips the repository' do
expect(ContainerExpirationPolicies::CleanupService).not_to receive(:new)
- expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
- expect(repository.reload.cleanup_unscheduled?).to be_truthy
+ if loopless_enabled?
+ expect { subject }
+ .to not_change { ContainerRepository.waiting_for_cleanup.count }
+ .and not_change { repository.reload.expiration_policy_cleanup_status }
+ else
+ expect { subject }.to change { ContainerRepository.waiting_for_cleanup.count }.from(1).to(0)
+ expect(repository.reload.cleanup_unscheduled?).to be_truthy
+ end
end
end
end
- context 'with repository in cleanup scheduled state' do
- it_behaves_like 'handling all repository conditions'
- end
-
- context 'with repository in cleanup unfinished state' do
+ context 'with loopless enabled' do
before do
- repository.cleanup_unfinished!
+ stub_feature_flags(container_registry_expiration_policies_loopless: true)
end
- it_behaves_like 'handling all repository conditions'
- end
+ context 'with repository in cleanup unscheduled state' do
+ before do
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ end
- context 'with another repository in cleanup unfinished state' do
- let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
+ it_behaves_like 'handling all repository conditions'
+ end
- it 'process the cleanup scheduled repository first' do
- service_response = cleanup_service_response(repository: repository)
- expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: service_response))
- expect_log_extra_metadata(service_response: service_response)
+ context 'with repository in cleanup unfinished state' do
+ before do
+ repository.cleanup_unfinished!
+ end
- subject
+ it_behaves_like 'handling all repository conditions'
end
- end
- context 'with multiple repositories in cleanup unfinished state' do
- let_it_be(:repository2) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 20.minutes.ago) }
- let_it_be(:repository3) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 10.minutes.ago) }
+ context 'container repository selection' do
+ where(:repository_cleanup_status, :repository_policy_status, :other_repository_cleanup_status, :other_repository_policy_status, :expected_selected_repository) do
+ :unscheduled | :disabled | :unscheduled | :disabled | :none
+ :unscheduled | :disabled | :unscheduled | :runnable | :other_repository
+ :unscheduled | :disabled | :unscheduled | :not_runnable | :none
- before do
- repository.update!(expiration_policy_cleanup_status: :cleanup_unfinished, expiration_policy_started_at: 30.minutes.ago)
+ :unscheduled | :disabled | :scheduled | :disabled | :none
+ :unscheduled | :disabled | :scheduled | :runnable | :other_repository
+ :unscheduled | :disabled | :scheduled | :not_runnable | :none
+
+ :unscheduled | :disabled | :unfinished | :disabled | :none
+ :unscheduled | :disabled | :unfinished | :runnable | :other_repository
+ :unscheduled | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :unscheduled | :disabled | :ongoing | :disabled | :none
+ :unscheduled | :disabled | :ongoing | :runnable | :none
+ :unscheduled | :disabled | :ongoing | :not_runnable | :none
+
+ :unscheduled | :runnable | :unscheduled | :disabled | :repository
+ :unscheduled | :runnable | :unscheduled | :runnable | :repository
+ :unscheduled | :runnable | :unscheduled | :not_runnable | :repository
+
+ :unscheduled | :runnable | :scheduled | :disabled | :repository
+ :unscheduled | :runnable | :scheduled | :runnable | :repository
+ :unscheduled | :runnable | :scheduled | :not_runnable | :repository
+
+ :unscheduled | :runnable | :unfinished | :disabled | :repository
+ :unscheduled | :runnable | :unfinished | :runnable | :repository
+ :unscheduled | :runnable | :unfinished | :not_runnable | :repository
+
+ :unscheduled | :runnable | :ongoing | :disabled | :repository
+ :unscheduled | :runnable | :ongoing | :runnable | :repository
+ :unscheduled | :runnable | :ongoing | :not_runnable | :repository
+
+ :scheduled | :disabled | :unscheduled | :disabled | :none
+ :scheduled | :disabled | :unscheduled | :runnable | :other_repository
+ :scheduled | :disabled | :unscheduled | :not_runnable | :none
+
+ :scheduled | :disabled | :scheduled | :disabled | :none
+ :scheduled | :disabled | :scheduled | :runnable | :other_repository
+ :scheduled | :disabled | :scheduled | :not_runnable | :none
+
+ :scheduled | :disabled | :unfinished | :disabled | :none
+ :scheduled | :disabled | :unfinished | :runnable | :other_repository
+ :scheduled | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :scheduled | :disabled | :ongoing | :disabled | :none
+ :scheduled | :disabled | :ongoing | :runnable | :none
+ :scheduled | :disabled | :ongoing | :not_runnable | :none
+
+ :scheduled | :runnable | :unscheduled | :disabled | :repository
+ :scheduled | :runnable | :unscheduled | :runnable | :other_repository
+ :scheduled | :runnable | :unscheduled | :not_runnable | :repository
+
+ :scheduled | :runnable | :scheduled | :disabled | :repository
+ :scheduled | :runnable | :scheduled | :runnable | :repository
+ :scheduled | :runnable | :scheduled | :not_runnable | :repository
+
+ :scheduled | :runnable | :unfinished | :disabled | :repository
+ :scheduled | :runnable | :unfinished | :runnable | :repository
+ :scheduled | :runnable | :unfinished | :not_runnable | :repository
+
+ :scheduled | :runnable | :ongoing | :disabled | :repository
+ :scheduled | :runnable | :ongoing | :runnable | :repository
+ :scheduled | :runnable | :ongoing | :not_runnable | :repository
+
+ :scheduled | :not_runnable | :unscheduled | :disabled | :none
+ :scheduled | :not_runnable | :unscheduled | :runnable | :other_repository
+ :scheduled | :not_runnable | :unscheduled | :not_runnable | :none
+
+ :scheduled | :not_runnable | :scheduled | :disabled | :none
+ :scheduled | :not_runnable | :scheduled | :runnable | :other_repository
+ :scheduled | :not_runnable | :scheduled | :not_runnable | :none
+
+ :scheduled | :not_runnable | :unfinished | :disabled | :none
+ :scheduled | :not_runnable | :unfinished | :runnable | :other_repository
+ :scheduled | :not_runnable | :unfinished | :not_runnable | :other_repository
+
+ :scheduled | :not_runnable | :ongoing | :disabled | :none
+ :scheduled | :not_runnable | :ongoing | :runnable | :none
+ :scheduled | :not_runnable | :ongoing | :not_runnable | :none
+
+ :unfinished | :disabled | :unscheduled | :disabled | :none
+ :unfinished | :disabled | :unscheduled | :runnable | :other_repository
+ :unfinished | :disabled | :unscheduled | :not_runnable | :none
+
+ :unfinished | :disabled | :scheduled | :disabled | :none
+ :unfinished | :disabled | :scheduled | :runnable | :other_repository
+ :unfinished | :disabled | :scheduled | :not_runnable | :none
+
+ :unfinished | :disabled | :unfinished | :disabled | :none
+ :unfinished | :disabled | :unfinished | :runnable | :other_repository
+ :unfinished | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :unfinished | :disabled | :ongoing | :disabled | :none
+ :unfinished | :disabled | :ongoing | :runnable | :none
+ :unfinished | :disabled | :ongoing | :not_runnable | :none
+
+ :unfinished | :runnable | :unscheduled | :disabled | :repository
+ :unfinished | :runnable | :unscheduled | :runnable | :other_repository
+ :unfinished | :runnable | :unscheduled | :not_runnable | :repository
+
+ :unfinished | :runnable | :scheduled | :disabled | :repository
+ :unfinished | :runnable | :scheduled | :runnable | :other_repository
+ :unfinished | :runnable | :scheduled | :not_runnable | :repository
+
+ :unfinished | :runnable | :unfinished | :disabled | :repository
+ :unfinished | :runnable | :unfinished | :runnable | :repository
+ :unfinished | :runnable | :unfinished | :not_runnable | :repository
+
+ :unfinished | :runnable | :ongoing | :disabled | :repository
+ :unfinished | :runnable | :ongoing | :runnable | :repository
+ :unfinished | :runnable | :ongoing | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :unscheduled | :disabled | :repository
+ :unfinished | :not_runnable | :unscheduled | :runnable | :other_repository
+ :unfinished | :not_runnable | :unscheduled | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :scheduled | :disabled | :repository
+ :unfinished | :not_runnable | :scheduled | :runnable | :other_repository
+ :unfinished | :not_runnable | :scheduled | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :unfinished | :disabled | :repository
+ :unfinished | :not_runnable | :unfinished | :runnable | :repository
+ :unfinished | :not_runnable | :unfinished | :not_runnable | :repository
+
+ :unfinished | :not_runnable | :ongoing | :disabled | :repository
+ :unfinished | :not_runnable | :ongoing | :runnable | :repository
+ :unfinished | :not_runnable | :ongoing | :not_runnable | :repository
+
+ :ongoing | :disabled | :unscheduled | :disabled | :none
+ :ongoing | :disabled | :unscheduled | :runnable | :other_repository
+ :ongoing | :disabled | :unscheduled | :not_runnable | :none
+
+ :ongoing | :disabled | :scheduled | :disabled | :none
+ :ongoing | :disabled | :scheduled | :runnable | :other_repository
+ :ongoing | :disabled | :scheduled | :not_runnable | :none
+
+ :ongoing | :disabled | :unfinished | :disabled | :none
+ :ongoing | :disabled | :unfinished | :runnable | :other_repository
+ :ongoing | :disabled | :unfinished | :not_runnable | :other_repository
+
+ :ongoing | :disabled | :ongoing | :disabled | :none
+ :ongoing | :disabled | :ongoing | :runnable | :none
+ :ongoing | :disabled | :ongoing | :not_runnable | :none
+
+ :ongoing | :runnable | :unscheduled | :disabled | :none
+ :ongoing | :runnable | :unscheduled | :runnable | :other_repository
+ :ongoing | :runnable | :unscheduled | :not_runnable | :none
+
+ :ongoing | :runnable | :scheduled | :disabled | :none
+ :ongoing | :runnable | :scheduled | :runnable | :other_repository
+ :ongoing | :runnable | :scheduled | :not_runnable | :none
+
+ :ongoing | :runnable | :unfinished | :disabled | :none
+ :ongoing | :runnable | :unfinished | :runnable | :other_repository
+ :ongoing | :runnable | :unfinished | :not_runnable | :other_repository
+
+ :ongoing | :runnable | :ongoing | :disabled | :none
+ :ongoing | :runnable | :ongoing | :runnable | :none
+ :ongoing | :runnable | :ongoing | :not_runnable | :none
+
+ :ongoing | :not_runnable | :unscheduled | :disabled | :none
+ :ongoing | :not_runnable | :unscheduled | :runnable | :other_repository
+ :ongoing | :not_runnable | :unscheduled | :not_runnable | :none
+
+ :ongoing | :not_runnable | :scheduled | :disabled | :none
+ :ongoing | :not_runnable | :scheduled | :runnable | :other_repository
+ :ongoing | :not_runnable | :scheduled | :not_runnable | :none
+
+ :ongoing | :not_runnable | :unfinished | :disabled | :none
+ :ongoing | :not_runnable | :unfinished | :runnable | :other_repository
+ :ongoing | :not_runnable | :unfinished | :not_runnable | :other_repository
+
+ :ongoing | :not_runnable | :ongoing | :disabled | :none
+ :ongoing | :not_runnable | :ongoing | :runnable | :none
+ :ongoing | :not_runnable | :ongoing | :not_runnable | :none
+ end
+
+ with_them do
+ before do
+ update_container_repository(repository, repository_cleanup_status, repository_policy_status)
+ update_container_repository(other_repository, other_repository_cleanup_status, other_repository_policy_status)
+ end
+
+ subject { worker.send(:container_repository) }
+
+ if params[:expected_selected_repository] == :none
+ it 'does not select any repository' do
+ expect(subject).to eq(nil)
+ end
+ else
+ it 'does select a repository' do
+ selected_repository = expected_selected_repository == :repository ? repository : other_repository
+
+ expect(subject).to eq(selected_repository)
+ end
+ end
+
+ def update_container_repository(container_repository, cleanup_status, policy_status)
+ container_repository.update_column(:expiration_policy_cleanup_status, "cleanup_#{cleanup_status}")
+
+ policy = container_repository.project.container_expiration_policy
+
+ case policy_status
+ when :disabled
+ policy.update!(enabled: false)
+ when :runnable
+ policy.update!(enabled: true)
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ when :not_runnable
+ policy.update!(enabled: true)
+ policy.update_column(:next_run_at, 5.minutes.from_now)
+ end
+ end
+ end
end
- it 'process the repository with the oldest expiration_policy_started_at' do
- service_response = cleanup_service_response(repository: repository)
- expect(ContainerExpirationPolicies::CleanupService)
- .to receive(:new).with(repository).and_return(double(execute: service_response))
- expect_log_extra_metadata(service_response: service_response)
+ context 'with another repository in cleanup unfinished state' do
+ let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
- subject
+ before do
+ policy.update_column(:next_run_at, 5.minutes.ago)
+ end
+
+ it 'process the cleanup scheduled repository first' do
+ service_response = cleanup_service_response(repository: repository)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
+
+ subject
+ end
end
end
- context 'with repository in cleanup ongoing state' do
+ context 'with loopless disabled' do
before do
- repository.cleanup_ongoing!
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
end
- it 'does not process it' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with repository in cleanup scheduled state' do
+ it_behaves_like 'handling all repository conditions'
+ end
+
+ context 'with repository in cleanup unfinished state' do
+ before do
+ repository.cleanup_unfinished!
+ end
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
- expect(repository.cleanup_ongoing?).to be_truthy
+ it_behaves_like 'handling all repository conditions'
end
- end
- context 'with no repository in any cleanup state' do
- before do
- repository.cleanup_unscheduled!
+ context 'with another repository in cleanup unfinished state' do
+ let_it_be(:another_repository) { create(:container_repository, :cleanup_unfinished) }
+
+ it 'process the cleanup scheduled repository first' do
+ service_response = cleanup_service_response(repository: repository)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
+
+ subject
+ end
end
- it 'does not process it' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with multiple repositories in cleanup unfinished state' do
+ let_it_be(:repository2) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 20.minutes.ago) }
+ let_it_be(:repository3) { create(:container_repository, :cleanup_unfinished, expiration_policy_started_at: 10.minutes.ago) }
+
+ before do
+ repository.update!(expiration_policy_cleanup_status: :cleanup_unfinished, expiration_policy_started_at: 30.minutes.ago)
+ end
+
+ it 'process the repository with the oldest expiration_policy_started_at' do
+ service_response = cleanup_service_response(repository: repository)
+ expect(ContainerExpirationPolicies::CleanupService)
+ .to receive(:new).with(repository).and_return(double(execute: service_response))
+ expect_log_extra_metadata(service_response: service_response)
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
- expect(repository.cleanup_unscheduled?).to be_truthy
+ subject
+ end
end
- end
- context 'with no container repository waiting' do
- before do
- repository.destroy!
+ context 'with repository in cleanup ongoing state' do
+ before do
+ repository.cleanup_ongoing!
+ end
+
+ it 'does not process it' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ expect(repository.cleanup_ongoing?).to be_truthy
+ end
end
- it 'does not execute the cleanup tags service' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with no repository in any cleanup state' do
+ before do
+ repository.cleanup_unscheduled!
+ end
+
+ it 'does not process it' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ expect(repository.cleanup_unscheduled?).to be_truthy
+ end
end
- end
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(container_registry_expiration_policies_throttling: false)
+ context 'with no container repository waiting' do
+ before do
+ repository.destroy!
+ end
+
+ it 'does not execute the cleanup tags service' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ end
end
- it 'is a no-op' do
- expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: false)
+ end
- expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ it 'is a no-op' do
+ expect(Projects::ContainerRepository::CleanupTagsService).not_to receive(:new)
+
+ expect { subject }.not_to change { ContainerRepository.waiting_for_cleanup.count }
+ end
end
end
@@ -224,44 +501,77 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_tags_service_truncated, truncated)
expect(worker).to receive(:log_extra_metadata_on_done).with(:running_jobs_count, 0)
+
+ if service_response.error?
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:cleanup_error_message, service_response.message)
+ end
end
end
describe '#remaining_work_count' do
subject { worker.remaining_work_count }
- context 'with container repositoires waiting for cleanup' do
- let_it_be(:unfinished_repositories) { create_list(:container_repository, 2, :cleanup_unfinished) }
+ shared_examples 'handling all conditions' do
+ context 'with container repositories waiting for cleanup' do
+ let_it_be(:unfinished_repositories) { create_list(:container_repository, 2, :cleanup_unfinished) }
- it { is_expected.to eq(3) }
+ it { is_expected.to eq(3) }
- it 'logs the work count' do
- expect_log_info(
- cleanup_scheduled_count: 1,
- cleanup_unfinished_count: 2,
- cleanup_total_count: 3
- )
+ it 'logs the work count' do
+ expect_log_info(
+ cleanup_scheduled_count: 1,
+ cleanup_unfinished_count: 2,
+ cleanup_total_count: 3
+ )
- subject
+ subject
+ end
+ end
+
+ context 'with no container repositories waiting for cleanup' do
+ before do
+ repository.cleanup_ongoing!
+ policy.update_column(:next_run_at, 5.minutes.from_now)
+ end
+
+ it { is_expected.to eq(0) }
+
+ it 'logs 0 work count' do
+ expect_log_info(
+ cleanup_scheduled_count: 0,
+ cleanup_unfinished_count: 0,
+ cleanup_total_count: 0
+ )
+
+ subject
+ end
end
end
- context 'with no container repositories waiting for cleanup' do
+ context 'with loopless enabled' do
+ let_it_be(:disabled_repository) { create(:container_repository, :cleanup_scheduled) }
+
+ let(:capacity) { 10 }
+
before do
- repository.cleanup_ongoing!
- end
+ stub_feature_flags(container_registry_expiration_policies_loopless: true)
+ stub_application_setting(container_registry_expiration_policies_worker_capacity: capacity)
- it { is_expected.to eq(0) }
+ # loopless mode is more accurate that non loopless: policies need to be enabled
+ ContainerExpirationPolicy.update_all(enabled: true)
+ repository.project.container_expiration_policy.update_column(:next_run_at, 5.minutes.ago)
+ disabled_repository.project.container_expiration_policy.update_column(:enabled, false)
+ end
- it 'logs 0 work count' do
- expect_log_info(
- cleanup_scheduled_count: 0,
- cleanup_unfinished_count: 0,
- cleanup_total_count: 0
- )
+ it_behaves_like 'handling all conditions'
+ end
- subject
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
end
+
+ it_behaves_like 'handling all conditions'
end
end
@@ -289,4 +599,8 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
expect(worker.logger)
.to receive(:info).with(worker.structured_payload(structure))
end
+
+ def loopless_enabled?
+ Feature.enabled?(:container_registry_expiration_policies_loopless)
+ end
end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index 2d5176e874d..e8f9a972f10 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -35,10 +35,16 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
context 'With no container expiration policies' do
- it 'does not execute any policies' do
- expect(ContainerRepository).not_to receive(:for_project_id)
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
+ end
- expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ it 'does not execute any policies' do
+ expect(ContainerRepository).not_to receive(:for_project_id)
+
+ expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ end
end
end
diff --git a/spec/workers/deployments/hooks_worker_spec.rb b/spec/workers/deployments/hooks_worker_spec.rb
new file mode 100644
index 00000000000..f1fe7b0fc5d
--- /dev/null
+++ b/spec/workers/deployments/hooks_worker_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Deployments::HooksWorker do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ before do
+ allow(ProjectServiceWorker).to receive(:perform_async)
+ end
+
+ it 'executes project services for deployment_hooks' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ service = create(:service, type: 'SlackService', project: project, deployment_events: true, active: true)
+
+ expect(ProjectServiceWorker).to receive(:perform_async).with(service.id, an_instance_of(Hash))
+
+ worker.perform(deployment_id: deployment.id, status_changed_at: Time.current)
+ end
+
+ it 'does not execute an inactive service' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ create(:service, type: 'SlackService', project: project, deployment_events: true, active: false)
+
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ worker.perform(deployment_id: deployment.id, status_changed_at: Time.current)
+ end
+
+ it 'does not execute if a deployment does not exist' do
+ expect(ProjectServiceWorker).not_to receive(:perform_async)
+
+ worker.perform(deployment_id: non_existing_record_id, status_changed_at: Time.current)
+ end
+
+ it 'execute webhooks' do
+ deployment = create(:deployment, :running)
+ project = deployment.project
+ web_hook = create(:project_hook, deployment_events: true, project: project)
+
+ status_changed_at = Time.current
+
+ expect_next_instance_of(WebHookService, web_hook, hash_including(status_changed_at: status_changed_at), "deployment_hooks") do |service|
+ expect(service).to receive(:async_execute)
+ end
+
+ worker.perform(deployment_id: deployment.id, status_changed_at: status_changed_at)
+ end
+ end
+end
diff --git a/spec/workers/email_receiver_worker_spec.rb b/spec/workers/email_receiver_worker_spec.rb
index 8bf7f3f552d..d26c08fb221 100644
--- a/spec/workers/email_receiver_worker_spec.rb
+++ b/spec/workers/email_receiver_worker_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe EmailReceiverWorker, :mailer do
it "calls the email receiver" do
expect(Gitlab::Email::Receiver).to receive(:new).with(raw_message).and_call_original
expect_any_instance_of(Gitlab::Email::Receiver).to receive(:execute)
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(message: "Successfully processed message")).and_call_original
described_class.new.perform(raw_message)
end
@@ -20,10 +21,11 @@ RSpec.describe EmailReceiverWorker, :mailer do
context "when an error occurs" do
before do
allow_any_instance_of(Gitlab::Email::Receiver).to receive(:execute).and_raise(error)
+ expect(Sidekiq.logger).to receive(:error).with(hash_including('exception.class' => error.class.name)).and_call_original
end
context 'when the error is Gitlab::Email::EmptyEmailError' do
- let(:error) { Gitlab::Email::EmptyEmailError }
+ let(:error) { Gitlab::Email::EmptyEmailError.new }
it 'sends out a rejection email' do
perform_enqueued_jobs do
@@ -38,7 +40,7 @@ RSpec.describe EmailReceiverWorker, :mailer do
end
context 'when the error is Gitlab::Email::AutoGeneratedEmailError' do
- let(:error) { Gitlab::Email::AutoGeneratedEmailError }
+ let(:error) { Gitlab::Email::AutoGeneratedEmailError.new }
it 'does not send out any rejection email' do
perform_enqueued_jobs do
@@ -63,6 +65,21 @@ RSpec.describe EmailReceiverWorker, :mailer do
expect(email.body.parts.last.to_s).to include("Could not deal with that")
end
end
+
+ context 'when the error is ActiveRecord::StatementTimeout' do
+ let(:error) { ActiveRecord::StatementTimeout.new("Statement timeout") }
+
+ it 'does not report the error to the sender' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(error).and_call_original
+
+ perform_enqueued_jobs do
+ described_class.new.perform(raw_message)
+ end
+
+ email = ActionMailer::Base.deliveries.last
+ expect(email).to be_nil
+ end
+ end
end
end
diff --git a/spec/workers/environments/canary_ingress/update_worker_spec.rb b/spec/workers/environments/canary_ingress/update_worker_spec.rb
index 7bc5108719c..e7782c2fba1 100644
--- a/spec/workers/environments/canary_ingress/update_worker_spec.rb
+++ b/spec/workers/environments/canary_ingress/update_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Environments::CanaryIngress::UpdateWorker do
let_it_be(:environment) { create(:environment) }
+
let(:worker) { described_class.new }
describe '#perform' do
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 5a22529b6d6..de848e59d57 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -104,4 +104,374 @@ RSpec.describe 'Every Sidekiq worker' do
end
end
end
+
+ context 'retries' do
+ let(:cronjobs) do
+ workers_without_defaults.select { |worker| worker.klass < CronjobQueue }
+ end
+
+ let(:retry_exception_workers) do
+ workers_without_defaults.select { |worker| retry_exceptions.has_key?(worker.klass.to_s) }
+ end
+
+ let(:retry_exceptions) do
+ {
+ 'AdjournedProjectDeletionWorker' => 3,
+ 'AdminEmailsWorker' => 3,
+ 'Analytics::CodeReviewMetricsWorker' => 3,
+ 'Analytics::DevopsAdoption::CreateSnapshotWorker' => 3,
+ 'Analytics::InstanceStatistics::CounterJobWorker' => 3,
+ 'Analytics::UsageTrends::CounterJobWorker' => 3,
+ 'ApprovalRules::ExternalApprovalRulePayloadWorker' => 3,
+ 'ApproveBlockedPendingApprovalUsersWorker' => 3,
+ 'ArchiveTraceWorker' => 3,
+ 'AuthorizedKeysWorker' => 3,
+ 'AuthorizedProjectUpdate::ProjectCreateWorker' => 3,
+ 'AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker' => 3,
+ 'AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker' => 3,
+ 'AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker' => 3,
+ 'AuthorizedProjectsWorker' => 3,
+ 'AutoDevops::DisableWorker' => 3,
+ 'AutoMergeProcessWorker' => 3,
+ 'BackgroundMigrationWorker' => 3,
+ 'BuildFinishedWorker' => 3,
+ 'BuildHooksWorker' => 3,
+ 'BuildQueueWorker' => 3,
+ 'BuildSuccessWorker' => 3,
+ 'BulkImportWorker' => false,
+ 'BulkImports::EntityWorker' => false,
+ 'BulkImports::PipelineWorker' => false,
+ 'Chaos::CpuSpinWorker' => 3,
+ 'Chaos::DbSpinWorker' => 3,
+ 'Chaos::KillWorker' => false,
+ 'Chaos::LeakMemWorker' => 3,
+ 'Chaos::SleepWorker' => 3,
+ 'ChatNotificationWorker' => false,
+ 'Ci::BatchResetMinutesWorker' => 10,
+ 'Ci::BuildPrepareWorker' => 3,
+ 'Ci::BuildScheduleWorker' => 3,
+ 'Ci::BuildTraceChunkFlushWorker' => 3,
+ 'Ci::CreateCrossProjectPipelineWorker' => 3,
+ 'Ci::DailyBuildGroupReportResultsWorker' => 3,
+ 'Ci::DeleteObjectsWorker' => 0,
+ 'Ci::DropPipelineWorker' => 3,
+ 'Ci::InitialPipelineProcessWorker' => 3,
+ 'Ci::MergeRequests::AddTodoWhenBuildFailsWorker' => 3,
+ 'Ci::PipelineArtifacts::CoverageReportWorker' => 3,
+ 'Ci::PipelineArtifacts::CreateQualityReportWorker' => 3,
+ 'Ci::PipelineBridgeStatusWorker' => 3,
+ 'Ci::PipelineSuccessUnlockArtifactsWorker' => 3,
+ 'Ci::RefDeleteUnlockArtifactsWorker' => 3,
+ 'Ci::ResourceGroups::AssignResourceFromResourceGroupWorker' => 3,
+ 'Ci::TestFailureHistoryWorker' => 3,
+ 'Ci::TriggerDownstreamSubscriptionsWorker' => 3,
+ 'CleanupContainerRepositoryWorker' => 3,
+ 'ClusterConfigureIstioWorker' => 3,
+ 'ClusterInstallAppWorker' => 3,
+ 'ClusterPatchAppWorker' => 3,
+ 'ClusterProvisionWorker' => 3,
+ 'ClusterUpdateAppWorker' => 3,
+ 'ClusterUpgradeAppWorker' => 3,
+ 'ClusterWaitForAppInstallationWorker' => 3,
+ 'ClusterWaitForAppUpdateWorker' => 3,
+ 'ClusterWaitForIngressIpAddressWorker' => 3,
+ 'Clusters::Applications::ActivateServiceWorker' => 3,
+ 'Clusters::Applications::DeactivateServiceWorker' => 3,
+ 'Clusters::Applications::UninstallWorker' => 3,
+ 'Clusters::Applications::WaitForUninstallAppWorker' => 3,
+ 'Clusters::Cleanup::AppWorker' => 3,
+ 'Clusters::Cleanup::ProjectNamespaceWorker' => 3,
+ 'Clusters::Cleanup::ServiceAccountWorker' => 3,
+ 'ContainerExpirationPolicies::CleanupContainerRepositoryWorker' => 0,
+ 'CreateCommitSignatureWorker' => 3,
+ 'CreateGithubWebhookWorker' => 3,
+ 'CreateNoteDiffFileWorker' => 3,
+ 'CreatePipelineWorker' => 3,
+ 'DastSiteValidationWorker' => 3,
+ 'DeleteContainerRepositoryWorker' => 3,
+ 'DeleteDiffFilesWorker' => 3,
+ 'DeleteMergedBranchesWorker' => 3,
+ 'DeleteStoredFilesWorker' => 3,
+ 'DeleteUserWorker' => 3,
+ 'Deployments::AutoRollbackWorker' => 3,
+ 'Deployments::DropOlderDeploymentsWorker' => 3,
+ 'Deployments::ExecuteHooksWorker' => 3,
+ 'Deployments::FinishedWorker' => 3,
+ 'Deployments::ForwardDeploymentWorker' => 3,
+ 'Deployments::LinkMergeRequestWorker' => 3,
+ 'Deployments::SuccessWorker' => 3,
+ 'Deployments::UpdateEnvironmentWorker' => 3,
+ 'DesignManagement::CopyDesignCollectionWorker' => 3,
+ 'DesignManagement::NewVersionWorker' => 3,
+ 'DestroyPagesDeploymentsWorker' => 3,
+ 'DetectRepositoryLanguagesWorker' => 1,
+ 'DisallowTwoFactorForGroupWorker' => 3,
+ 'DisallowTwoFactorForSubgroupsWorker' => 3,
+ 'Dora::DailyMetrics::RefreshWorker' => 3,
+ 'ElasticAssociationIndexerWorker' => 3,
+ 'ElasticCommitIndexerWorker' => 2,
+ 'ElasticDeleteProjectWorker' => 2,
+ 'ElasticFullIndexWorker' => 2,
+ 'ElasticIndexerWorker' => 2,
+ 'ElasticIndexingControlWorker' => 3,
+ 'ElasticNamespaceIndexerWorker' => 2,
+ 'ElasticNamespaceRolloutWorker' => 2,
+ 'EmailReceiverWorker' => 3,
+ 'EmailsOnPushWorker' => 3,
+ 'Environments::CanaryIngress::UpdateWorker' => false,
+ 'Epics::UpdateEpicsDatesWorker' => 3,
+ 'ErrorTrackingIssueLinkWorker' => 3,
+ 'Experiments::RecordConversionEventWorker' => 3,
+ 'ExpireBuildInstanceArtifactsWorker' => 3,
+ 'ExpireJobCacheWorker' => 3,
+ 'ExpirePipelineCacheWorker' => 3,
+ 'ExportCsvWorker' => 3,
+ 'ExternalServiceReactiveCachingWorker' => 3,
+ 'FileHookWorker' => false,
+ 'FlushCounterIncrementsWorker' => 3,
+ 'Geo::Batch::ProjectRegistrySchedulerWorker' => 3,
+ 'Geo::Batch::ProjectRegistryWorker' => 3,
+ 'Geo::ContainerRepositorySyncWorker' => 3,
+ 'Geo::DesignRepositoryShardSyncWorker' => false,
+ 'Geo::DesignRepositorySyncWorker' => 3,
+ 'Geo::DestroyWorker' => 3,
+ 'Geo::EventWorker' => 3,
+ 'Geo::FileDownloadWorker' => 3,
+ 'Geo::FileRegistryRemovalWorker' => 3,
+ 'Geo::FileRemovalWorker' => 3,
+ 'Geo::HashedStorageAttachmentsMigrationWorker' => 3,
+ 'Geo::HashedStorageMigrationWorker' => 3,
+ 'Geo::ProjectSyncWorker' => 3,
+ 'Geo::RenameRepositoryWorker' => 3,
+ 'Geo::RepositoriesCleanUpWorker' => 3,
+ 'Geo::RepositoryCleanupWorker' => 3,
+ 'Geo::RepositoryShardSyncWorker' => false,
+ 'Geo::RepositoryVerification::Primary::ShardWorker' => false,
+ 'Geo::RepositoryVerification::Primary::SingleWorker' => false,
+ 'Geo::RepositoryVerification::Secondary::SingleWorker' => false,
+ 'Geo::ReverificationBatchWorker' => 0,
+ 'Geo::Scheduler::Primary::SchedulerWorker' => 3,
+ 'Geo::Scheduler::SchedulerWorker' => 3,
+ 'Geo::Scheduler::Secondary::SchedulerWorker' => 3,
+ 'Geo::VerificationBatchWorker' => 0,
+ 'Geo::VerificationTimeoutWorker' => false,
+ 'Geo::VerificationWorker' => 3,
+ 'GeoRepositoryDestroyWorker' => 3,
+ 'GitGarbageCollectWorker' => false,
+ 'Gitlab::GithubImport::AdvanceStageWorker' => 3,
+ 'Gitlab::GithubImport::ImportDiffNoteWorker' => 5,
+ 'Gitlab::GithubImport::ImportIssueWorker' => 5,
+ 'Gitlab::GithubImport::ImportLfsObjectWorker' => 5,
+ 'Gitlab::GithubImport::ImportNoteWorker' => 5,
+ 'Gitlab::GithubImport::ImportPullRequestMergedByWorker' => 5,
+ 'Gitlab::GithubImport::ImportPullRequestReviewWorker' => 5,
+ 'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
+ 'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
+ 'Gitlab::GithubImport::Stage::FinishImportWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportLfsObjectsWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportNotesWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsMergedByWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsReviewsWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportPullRequestsWorker' => 5,
+ 'Gitlab::GithubImport::Stage::ImportRepositoryWorker' => 5,
+ 'Gitlab::JiraImport::AdvanceStageWorker' => 5,
+ 'Gitlab::JiraImport::ImportIssueWorker' => 5,
+ 'Gitlab::JiraImport::Stage::FinishImportWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportAttachmentsWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportIssuesWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportLabelsWorker' => 5,
+ 'Gitlab::JiraImport::Stage::ImportNotesWorker' => 5,
+ 'Gitlab::JiraImport::Stage::StartImportWorker' => 5,
+ 'Gitlab::PhabricatorImport::ImportTasksWorker' => 5,
+ 'GitlabPerformanceBarStatsWorker' => 3,
+ 'GitlabShellWorker' => 3,
+ 'GitlabUsagePingWorker' => 3,
+ 'GroupDestroyWorker' => 3,
+ 'GroupExportWorker' => false,
+ 'GroupImportWorker' => false,
+ 'GroupSamlGroupSyncWorker' => 3,
+ 'GroupWikis::GitGarbageCollectWorker' => false,
+ 'Groups::ScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'Groups::UpdateRepositoryStorageWorker' => 3,
+ 'Groups::UpdateStatisticsWorker' => 3,
+ 'HashedStorage::MigratorWorker' => 3,
+ 'HashedStorage::ProjectMigrateWorker' => 3,
+ 'HashedStorage::ProjectRollbackWorker' => 3,
+ 'HashedStorage::RollbackerWorker' => 3,
+ 'ImportIssuesCsvWorker' => 3,
+ 'ImportSoftwareLicensesWorker' => 3,
+ 'IncidentManagement::AddSeveritySystemNoteWorker' => 3,
+ 'IncidentManagement::ApplyIncidentSlaExceededLabelWorker' => 3,
+ 'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob' => 3,
+ 'IncidentManagement::OncallRotations::PersistShiftsJob' => 3,
+ 'IncidentManagement::PagerDuty::ProcessIncidentWorker' => 3,
+ 'IncidentManagement::ProcessAlertWorker' => 3,
+ 'IncidentManagement::ProcessPrometheusAlertWorker' => 3,
+ 'InvalidGpgSignatureUpdateWorker' => 3,
+ 'IrkerWorker' => 3,
+ 'IssuableExportCsvWorker' => 3,
+ 'IssuePlacementWorker' => 3,
+ 'IssueRebalancingWorker' => 3,
+ 'IterationsUpdateStatusWorker' => 3,
+ 'JiraConnect::SyncBranchWorker' => 3,
+ 'JiraConnect::SyncBuildsWorker' => 3,
+ 'JiraConnect::SyncDeploymentsWorker' => 3,
+ 'JiraConnect::SyncFeatureFlagsWorker' => 3,
+ 'JiraConnect::SyncMergeRequestWorker' => 3,
+ 'JiraConnect::SyncProjectWorker' => 3,
+ 'LdapGroupSyncWorker' => 3,
+ 'MailScheduler::IssueDueWorker' => 3,
+ 'MailScheduler::NotificationServiceWorker' => 3,
+ 'MembersDestroyer::UnassignIssuablesWorker' => 3,
+ 'MergeRequestCleanupRefsWorker' => 3,
+ 'MergeRequestMergeabilityCheckWorker' => 3,
+ 'MergeRequestResetApprovalsWorker' => 3,
+ 'MergeRequests::AssigneesChangeWorker' => 3,
+ 'MergeRequests::CreatePipelineWorker' => 3,
+ 'MergeRequests::DeleteSourceBranchWorker' => 3,
+ 'MergeRequests::HandleAssigneesChangeWorker' => 3,
+ 'MergeRequests::ResolveTodosWorker' => 3,
+ 'MergeRequests::SyncCodeOwnerApprovalRulesWorker' => 3,
+ 'MergeTrains::RefreshWorker' => 3,
+ 'MergeWorker' => 3,
+ 'Metrics::Dashboard::PruneOldAnnotationsWorker' => 3,
+ 'Metrics::Dashboard::SyncDashboardsWorker' => 3,
+ 'MigrateExternalDiffsWorker' => 3,
+ 'NamespacelessProjectDestroyWorker' => 3,
+ 'Namespaces::OnboardingIssueCreatedWorker' => 3,
+ 'Namespaces::OnboardingPipelineCreatedWorker' => 3,
+ 'Namespaces::OnboardingProgressWorker' => 3,
+ 'Namespaces::OnboardingUserAddedWorker' => 3,
+ 'Namespaces::RootStatisticsWorker' => 3,
+ 'Namespaces::ScheduleAggregationWorker' => 3,
+ 'NetworkPolicyMetricsWorker' => 3,
+ 'NewEpicWorker' => 3,
+ 'NewIssueWorker' => 3,
+ 'NewMergeRequestWorker' => 3,
+ 'NewNoteWorker' => 3,
+ 'ObjectPool::CreateWorker' => 3,
+ 'ObjectPool::DestroyWorker' => 3,
+ 'ObjectPool::JoinWorker' => 3,
+ 'ObjectPool::ScheduleJoinWorker' => 3,
+ 'ObjectStorage::BackgroundMoveWorker' => 5,
+ 'ObjectStorage::MigrateUploadsWorker' => 3,
+ 'Packages::Composer::CacheUpdateWorker' => 3,
+ 'Packages::Go::SyncPackagesWorker' => 3,
+ 'Packages::Maven::Metadata::SyncWorker' => 3,
+ 'Packages::Nuget::ExtractionWorker' => 3,
+ 'Packages::Rubygems::ExtractionWorker' => 3,
+ 'PagesDomainSslRenewalWorker' => 3,
+ 'PagesDomainVerificationWorker' => 3,
+ 'PagesRemoveWorker' => 3,
+ 'PagesTransferWorker' => 3,
+ 'PagesUpdateConfigurationWorker' => 3,
+ 'PagesWorker' => 3,
+ 'PersonalAccessTokens::Groups::PolicyWorker' => 3,
+ 'PersonalAccessTokens::Instance::PolicyWorker' => 3,
+ 'PipelineHooksWorker' => 3,
+ 'PipelineMetricsWorker' => 3,
+ 'PipelineNotificationWorker' => 3,
+ 'PipelineProcessWorker' => 3,
+ 'PipelineUpdateWorker' => 3,
+ 'PostReceive' => 3,
+ 'ProcessCommitWorker' => 3,
+ 'ProjectCacheWorker' => 3,
+ 'ProjectDailyStatisticsWorker' => 3,
+ 'ProjectDestroyWorker' => 3,
+ 'ProjectExportWorker' => false,
+ 'ProjectImportScheduleWorker' => false,
+ 'ProjectScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'ProjectServiceWorker' => 3,
+ 'ProjectTemplateExportWorker' => false,
+ 'ProjectUpdateRepositoryStorageWorker' => 3,
+ 'Projects::GitGarbageCollectWorker' => false,
+ 'Projects::PostCreationWorker' => 3,
+ 'Projects::ScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'Projects::UpdateRepositoryStorageWorker' => 3,
+ 'Prometheus::CreateDefaultAlertsWorker' => 3,
+ 'PropagateIntegrationGroupWorker' => 3,
+ 'PropagateIntegrationInheritDescendantWorker' => 3,
+ 'PropagateIntegrationInheritWorker' => 3,
+ 'PropagateIntegrationProjectWorker' => 3,
+ 'PropagateIntegrationWorker' => 3,
+ 'PropagateServiceTemplateWorker' => 3,
+ 'PurgeDependencyProxyCacheWorker' => 3,
+ 'ReactiveCachingWorker' => 3,
+ 'RebaseWorker' => 3,
+ 'RefreshLicenseComplianceChecksWorker' => 3,
+ 'Releases::CreateEvidenceWorker' => 3,
+ 'RemoteMirrorNotificationWorker' => 3,
+ 'RepositoryCheck::BatchWorker' => false,
+ 'RepositoryCheck::ClearWorker' => false,
+ 'RepositoryCheck::SingleRepositoryWorker' => false,
+ 'RepositoryCleanupWorker' => 3,
+ 'RepositoryForkWorker' => 5,
+ 'RepositoryImportWorker' => false,
+ 'RepositoryPushAuditEventWorker' => 3,
+ 'RepositoryRemoveRemoteWorker' => 3,
+ 'RepositoryUpdateMirrorWorker' => false,
+ 'RepositoryUpdateRemoteMirrorWorker' => 3,
+ 'RequirementsManagement::ImportRequirementsCsvWorker' => 3,
+ 'RequirementsManagement::ProcessRequirementsReportsWorker' => 3,
+ 'RunPipelineScheduleWorker' => 3,
+ 'ScanSecurityReportSecretsWorker' => 17,
+ 'Security::AutoFixWorker' => 3,
+ 'Security::StoreScansWorker' => 3,
+ 'SelfMonitoringProjectCreateWorker' => 3,
+ 'SelfMonitoringProjectDeleteWorker' => 3,
+ 'ServiceDeskEmailReceiverWorker' => 3,
+ 'SetUserStatusBasedOnUserCapSettingWorker' => 3,
+ 'SnippetScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'SnippetUpdateRepositoryStorageWorker' => 3,
+ 'Snippets::ScheduleBulkRepositoryShardMovesWorker' => 3,
+ 'Snippets::UpdateRepositoryStorageWorker' => 3,
+ 'StageUpdateWorker' => 3,
+ 'StatusPage::PublishWorker' => 5,
+ 'StoreSecurityReportsWorker' => 3,
+ 'StoreSecurityScansWorker' => 3,
+ 'SyncSeatLinkRequestWorker' => 20,
+ 'SyncSeatLinkWorker' => 12,
+ 'SyncSecurityReportsToReportApprovalRulesWorker' => 3,
+ 'SystemHookPushWorker' => 3,
+ 'TodosDestroyer::ConfidentialEpicWorker' => 3,
+ 'TodosDestroyer::ConfidentialIssueWorker' => 3,
+ 'TodosDestroyer::DestroyedIssuableWorker' => 3,
+ 'TodosDestroyer::EntityLeaveWorker' => 3,
+ 'TodosDestroyer::GroupPrivateWorker' => 3,
+ 'TodosDestroyer::PrivateFeaturesWorker' => 3,
+ 'TodosDestroyer::ProjectPrivateWorker' => 3,
+ 'UpdateExternalPullRequestsWorker' => 3,
+ 'UpdateHeadPipelineForMergeRequestWorker' => 3,
+ 'UpdateHighestRoleWorker' => 3,
+ 'UpdateMergeRequestsWorker' => 3,
+ 'UpdateProjectStatisticsWorker' => 3,
+ 'UploadChecksumWorker' => 3,
+ 'Vulnerabilities::Statistics::AdjustmentWorker' => 3,
+ 'VulnerabilityExports::ExportDeletionWorker' => 3,
+ 'VulnerabilityExports::ExportWorker' => 3,
+ 'WaitForClusterCreationWorker' => 3,
+ 'WebHookWorker' => 4,
+ 'WebHooks::DestroyWorker' => 3,
+ 'Wikis::GitGarbageCollectWorker' => false,
+ 'X509CertificateRevokeWorker' => 3
+ }
+ end
+
+ it 'uses the default number of retries for new jobs' do
+ expect(workers_without_defaults - cronjobs - retry_exception_workers).to all(have_attributes(retries: true))
+ end
+
+ it 'uses zero retries for cronjobs' do
+ expect(cronjobs - retry_exception_workers).to all(have_attributes(retries: false))
+ end
+
+ it 'uses specified numbers of retries for workers with exceptions encoded here', :aggregate_failures do
+ retry_exception_workers.each do |worker|
+ expect(worker.retries).to eq(retry_exceptions[worker.klass.to_s]),
+ "#{worker.klass} has #{worker.retries} retries, expected #{retry_exceptions[worker.klass]}"
+ end
+ end
+ end
end
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index 8efead31a42..cbd9dd39336 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ExpireJobCacheWorker do
let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+
let(:project) { pipeline.project }
describe '#perform' do
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
deleted file mode 100644
index 3df64c35166..00000000000
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ /dev/null
@@ -1,26 +0,0 @@
-# frozen_string_literal: true
-
-require 'fileutils'
-
-require 'spec_helper'
-
-RSpec.describe GitGarbageCollectWorker do
- let_it_be(:project) { create(:project, :repository) }
-
- let(:lease_uuid) { SecureRandom.uuid }
- let(:lease_key) { "project_housekeeping:#{project.id}" }
- let(:task) { :full_repack }
- let(:params) { [project.id, task, lease_key, lease_uuid] }
-
- subject { described_class.new }
-
- describe "#perform" do
- it 'calls the Projects::GitGarbageGitGarbageCollectWorker with the same params' do
- expect_next_instance_of(Projects::GitGarbageCollectWorker) do |instance|
- expect(instance).to receive(:perform).with(*params)
- end
-
- subject.perform(*params)
- end
- end
-end
diff --git a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
index 4039cdac721..6476d82eb85 100644
--- a/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportDiffNoteWorker do
importer = double(:importer)
hash = {
'noteable_id' => 42,
+ 'github_id' => 42,
'path' => 'README.md',
'commit_id' => '123abc',
'diff_hunk' => "@@ -1 +1 @@\n-Hello\n+Hello world",
diff --git a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
index c25e89f6928..9f5bd1d9e5e 100644
--- a/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_issue_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportIssueWorker do
importer = double(:importer)
hash = {
'iid' => 42,
+ 'github_id' => 42,
'title' => 'My Issue',
'description' => 'This is my issue',
'milestone_number' => 4,
diff --git a/spec/workers/gitlab/github_import/import_note_worker_spec.rb b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
index bfb40d7c3d3..94bc8e26e4a 100644
--- a/spec/workers/gitlab/github_import/import_note_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_note_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportNoteWorker do
importer = double(:importer)
hash = {
'noteable_id' => 42,
+ 'github_id' => 42,
'noteable_type' => 'issues',
'user' => { 'id' => 4, 'login' => 'alice' },
'note' => 'Hello world',
diff --git a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
index 12b21abf910..1238929fbcb 100644
--- a/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_pull_request_worker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GithubImport::ImportPullRequestWorker do
importer = double(:importer)
hash = {
'iid' => 42,
+ 'github_id' => 42,
'title' => 'My Pull Request',
'description' => 'This is my pull request',
'source_branch' => 'my-feature',
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 324e8010887..695e21f4733 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker do
let_it_be(:project) { create(:project) }
let_it_be(:jira_issue_label_1) { create(:label, project: project) }
let_it_be(:jira_issue_label_2) { create(:label, project: project) }
+
let(:some_key) { 'some-key' }
describe 'modules' do
diff --git a/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb b/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb
index 7066e6e912f..e440884553f 100644
--- a/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stage/start_import_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::JiraImport::Stage::StartImportWorker do
let_it_be(:project) { create(:project, import_type: 'jira') }
let_it_be(:jid) { '12345678' }
+
let(:worker) { described_class.new }
describe 'modules' do
diff --git a/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb b/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
index 7f1cb8a2076..92754513988 100644
--- a/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ::Gitlab::JiraImport::StuckJiraImportJobsWorker do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:worker) { described_class.new }
describe 'with scheduled Jira import' do
diff --git a/spec/workers/import_issues_csv_worker_spec.rb b/spec/workers/import_issues_csv_worker_spec.rb
index 6a698af49c0..919ab2b1adf 100644
--- a/spec/workers/import_issues_csv_worker_spec.rb
+++ b/spec/workers/import_issues_csv_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ImportIssuesCsvWorker do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:upload) { create(:upload, :with_file) }
let(:worker) { described_class.new }
diff --git a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
index 203c62ffe6f..bda6f729759 100644
--- a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
+++ b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe IncidentManagement::AddSeveritySystemNoteWorker do
context 'when issue is not an incident' do
let_it_be(:issue) { create(:issue, project: project) }
+
let(:incident_id) { issue.id }
it_behaves_like 'does not add a system note'
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
index 41d4f31da24..7db9b191677 100644
--- a/spec/workers/incident_management/process_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
let_it_be(:started_at) { Time.now.rfc3339 }
let_it_be(:payload) { { 'title' => 'title', 'start_time' => started_at } }
let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
+
let(:created_issue) { Issue.last! }
subject { described_class.new.perform(nil, nil, alert.id) }
diff --git a/spec/workers/incident_management/process_alert_worker_v2_spec.rb b/spec/workers/incident_management/process_alert_worker_v2_spec.rb
new file mode 100644
index 00000000000..6cde8b758fa
--- /dev/null
+++ b/spec/workers/incident_management/process_alert_worker_v2_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::ProcessAlertWorkerV2 do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
+
+ describe '#perform' do
+ let_it_be(:started_at) { Time.now.rfc3339 }
+ let_it_be(:payload) { { 'title' => 'title', 'start_time' => started_at } }
+ let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
+
+ let(:created_issue) { Issue.last! }
+
+ subject(:perform_worker) { described_class.new.perform(alert.id) }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:warn).and_call_original
+
+ allow(AlertManagement::CreateAlertIssueService)
+ .to receive(:new).with(alert, User.alert_bot)
+ .and_call_original
+ end
+
+ shared_examples 'creates issue successfully' do
+ it 'creates an issue' do
+ expect(AlertManagement::CreateAlertIssueService)
+ .to receive(:new).with(alert, User.alert_bot)
+
+ expect { perform_worker }.to change { Issue.count }.by(1)
+ end
+
+ it 'updates AlertManagement::Alert#issue_id' do
+ perform_worker
+
+ expect(alert.reload.issue_id).to eq(created_issue.id)
+ end
+
+ it 'does not write a warning to log' do
+ perform_worker
+
+ expect(Gitlab::AppLogger).not_to have_received(:warn)
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [alert.id] }
+
+ it 'does not create a second issue' do
+ expect { perform_worker }.to change { Issue.count }.by(1)
+ end
+ end
+ end
+
+ context 'with valid alert' do
+ it_behaves_like 'creates issue successfully'
+
+ context 'when alert cannot be updated' do
+ let_it_be(:alert) { create(:alert_management_alert, :with_validation_errors, project: project, payload: payload) }
+
+ it 'updates AlertManagement::Alert#issue_id' do
+ expect { perform_worker }.not_to change { alert.reload.issue_id }
+ end
+
+ it 'logs a warning' do
+ perform_worker
+
+ expect(Gitlab::AppLogger).to have_received(:warn).with(
+ message: 'Cannot process an Incident',
+ issue_id: created_issue.id,
+ alert_id: alert.id,
+ errors: 'Hosts hosts array is over 255 chars'
+ )
+ end
+ end
+
+ context 'prometheus alert' do
+ let_it_be(:alert) { create(:alert_management_alert, :prometheus, project: project, started_at: started_at) }
+
+ it_behaves_like 'creates issue successfully'
+ end
+ end
+
+ context 'with invalid alert' do
+ let(:invalid_alert_id) { non_existing_record_id }
+
+ subject(:perform_worker) { described_class.new.perform(invalid_alert_id) }
+
+ it 'does not create issues' do
+ expect(AlertManagement::CreateAlertIssueService).not_to receive(:new)
+
+ expect { perform_worker }.not_to change { Issue.count }
+ end
+ end
+ end
+end
diff --git a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
index 2ca4193aa72..56f07459a15 100644
--- a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe IncidentManagement::ProcessPrometheusAlertWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
+
let(:payload_key) { Gitlab::AlertManagement::Payload::Prometheus.new(project: project, payload: alert_params).gitlab_fingerprint }
let!(:prometheus_alert_event) { create(:prometheus_alert_event, prometheus_alert: prometheus_alert, payload_key: payload_key) }
let!(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
diff --git a/spec/workers/issuable/label_links_destroy_worker_spec.rb b/spec/workers/issuable/label_links_destroy_worker_spec.rb
new file mode 100644
index 00000000000..a838f1c8017
--- /dev/null
+++ b/spec/workers/issuable/label_links_destroy_worker_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuable::LabelLinksDestroyWorker do
+ let(:job_args) { [1, 'MergeRequest'] }
+ let(:service) { double }
+
+ include_examples 'an idempotent worker' do
+ it 'calls the Issuable::DestroyLabelLinksService' do
+ expect(::Issuable::DestroyLabelLinksService).to receive(:new).twice.and_return(service)
+ expect(service).to receive(:execute).twice
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb b/spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb
new file mode 100644
index 00000000000..ac430f42e7a
--- /dev/null
+++ b/spec/workers/issuables/clear_groups_issue_counter_worker_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issuables::ClearGroupsIssueCounterWorker do
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:root_group) { create(:group, parent: parent_group) }
+ let_it_be(:subgroup) { create(:group, parent: root_group) }
+
+ let(:count_service) { Groups::OpenIssuesCountService }
+ let(:instance1) { instance_double(count_service) }
+ let(:instance2) { instance_double(count_service) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [[root_group.id]] }
+ let(:exec_times) { IdempotentWorkerHelper::WORKER_EXEC_TIMES }
+
+ it 'clears the cached issue count in given groups and ancestors' do
+ expect(count_service).to receive(:new)
+ .exactly(exec_times).times.with(root_group).and_return(instance1)
+ expect(count_service).to receive(:new)
+ .exactly(exec_times).times.with(parent_group).and_return(instance2)
+ expect(count_service).not_to receive(:new).with(subgroup)
+
+ [instance1, instance2].all? do |instance|
+ expect(instance).to receive(:clear_all_cache_keys).exactly(exec_times).times
+ end
+
+ subject
+ end
+ end
+
+ it 'does not call count service or rise error when group_ids is empty' do
+ expect(count_service).not_to receive(:new)
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ described_class.new.perform([])
+ end
+ end
+end
diff --git a/spec/workers/issue_placement_worker_spec.rb b/spec/workers/issue_placement_worker_spec.rb
index 5d4d41b90d0..e0c17bfadee 100644
--- a/spec/workers/issue_placement_worker_spec.rb
+++ b/spec/workers/issue_placement_worker_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe IssuePlacementWorker do
describe '#perform' do
let_it_be(:time) { Time.now.utc }
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
let_it_be(:author) { create(:user) }
let_it_be(:common_attrs) { { author: author, project: project } }
let_it_be(:unplaced) { common_attrs.merge(relative_position: nil) }
@@ -117,6 +118,19 @@ RSpec.describe IssuePlacementWorker do
let(:worker_arguments) { { issue_id: issue_id, project_id: nil } }
it_behaves_like 'running the issue placement worker'
+
+ context 'when block_issue_repositioning is enabled' do
+ let(:issue_id) { issue.id }
+ let(:project_id) { project.id }
+
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it 'does not run repositioning tasks' do
+ expect { run_worker }.not_to change { issue.reset.relative_position }
+ end
+ end
end
context 'passing a project ID' do
@@ -129,4 +143,9 @@ RSpec.describe IssuePlacementWorker do
it_behaves_like 'running the issue placement worker'
end
end
+
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ expect(described_class.get_deduplication_options).to include({ including_scheduled: true })
+ end
end
diff --git a/spec/workers/issue_rebalancing_worker_spec.rb b/spec/workers/issue_rebalancing_worker_spec.rb
index 8b0fcd4bc5a..e5c6ac3f854 100644
--- a/spec/workers/issue_rebalancing_worker_spec.rb
+++ b/spec/workers/issue_rebalancing_worker_spec.rb
@@ -4,7 +4,21 @@ require 'spec_helper'
RSpec.describe IssueRebalancingWorker do
describe '#perform' do
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ context 'when block_issue_repositioning is enabled' do
+ before do
+ stub_feature_flags(block_issue_repositioning: group)
+ end
+
+ it 'does not run an instance of IssueRebalancingService' do
+ expect(IssueRebalancingService).not_to receive(:new)
+
+ described_class.new.perform(nil, issue.project_id)
+ end
+ end
it 'runs an instance of IssueRebalancingService' do
service = double(execute: nil)
diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb
index 04cc3bec3af..5c0e7e7609c 100644
--- a/spec/workers/jira_connect/sync_project_worker_spec.rb
+++ b/spec/workers/jira_connect/sync_project_worker_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep do
describe '#perform' do
let_it_be(:project) { create_default(:project).freeze }
+
let!(:mr_with_jira_title) { create(:merge_request, :unique_branches, title: 'TEST-123') }
let!(:mr_with_jira_description) { create(:merge_request, :unique_branches, description: 'TEST-323') }
let!(:mr_with_other_title) { create(:merge_request, :unique_branches) }
diff --git a/spec/workers/merge_requests/create_pipeline_worker_spec.rb b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
index 8efce5220be..06d44c45706 100644
--- a/spec/workers/merge_requests/create_pipeline_worker_spec.rb
+++ b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe MergeRequests::CreatePipelineWorker do
context 'when the objects exist' do
it 'calls the merge request create pipeline service and calls update head pipeline' do
aggregate_failures do
- expect_next_instance_of(MergeRequests::CreatePipelineService, project, user) do |service|
+ expect_next_instance_of(MergeRequests::CreatePipelineService, project: project, current_user: user) do |service|
expect(service).to receive(:execute).with(merge_request)
end
diff --git a/spec/workers/merge_worker_spec.rb b/spec/workers/merge_worker_spec.rb
index 417e6edce96..0268bc2388f 100644
--- a/spec/workers/merge_worker_spec.rb
+++ b/spec/workers/merge_worker_spec.rb
@@ -29,5 +29,23 @@ RSpec.describe MergeWorker do
source_project.repository.expire_branches_cache
expect(source_project.repository.branch_names).not_to include('markdown')
end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) do
+ [
+ merge_request.id,
+ merge_request.author_id,
+ commit_message: 'wow such merge',
+ sha: merge_request.diff_head_sha
+ ]
+ end
+
+ it 'the merge request is still shown as merged' do
+ subject
+
+ merge_request.reload
+ expect(merge_request).to be_merged
+ end
+ end
end
end
diff --git a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
index 459e4f953d0..32e7bdd563d 100644
--- a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Namespaces::OnboardingIssueCreatedWorker, '#perform' do
let_it_be(:issue) { create(:issue) }
+
let(:namespace) { issue.namespace }
it_behaves_like 'records an onboarding progress action', :issue_created do
diff --git a/spec/workers/packages/composer/cache_update_worker_spec.rb b/spec/workers/packages/composer/cache_update_worker_spec.rb
index cc6b48c80eb..a0d8aa5d375 100644
--- a/spec/workers/packages/composer/cache_update_worker_spec.rb
+++ b/spec/workers/packages/composer/cache_update_worker_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Packages::Composer::CacheUpdateWorker, type: :worker do
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
+
let(:last_sha) { nil }
let!(:package) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
let(:job_args) { [project.id, package_name, last_sha] }
diff --git a/spec/workers/packages/debian/process_changes_worker_spec.rb b/spec/workers/packages/debian/process_changes_worker_spec.rb
new file mode 100644
index 00000000000..4a8eb855398
--- /dev/null
+++ b/spec/workers/packages/debian/process_changes_worker_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProcessChangesWorker, type: :worker do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
+
+ let(:incoming) { create(:debian_incoming, project: distribution.project) }
+ let(:package_file) { incoming.package_files.last }
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:package_file_id) { package_file.id }
+ let(:user_id) { user.id }
+
+ subject { worker.perform(package_file_id, user_id) }
+
+ context 'with mocked service' do
+ it 'calls ProcessChangesService' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect_next_instance_of(::Packages::Debian::ProcessChangesService) do |service|
+ expect(service).to receive(:execute)
+ .with(no_args)
+ end
+
+ subject
+ end
+ end
+
+ context 'with non existing package file' do
+ let(:package_file_id) { non_existing_record_id }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'with nil package file id' do
+ let(:package_file_id) { nil }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'with non existing user' do
+ let(:user_id) { non_existing_record_id }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'with nil user id' do
+ let(:user_id) { nil }
+
+ it 'returns early without error' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ expect(::Packages::Debian::ProcessChangesService).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'when the service raises an error' do
+ let(:package_file) { incoming.package_files.first }
+
+ it 'removes package file', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(Packages::Debian::ExtractChangesMetadataService::ExtractionError),
+ package_file_id: package_file_id,
+ user_id: user_id
+ )
+ expect { subject }
+ .to not_change { Packages::Package.count }
+ .and change { Packages::PackageFile.count }.by(-1)
+ .and change { incoming.package_files.count }.from(7).to(6)
+
+ expect { package_file.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [package_file.id, user.id] }
+
+ it 'sets the Debian file type as changes', :aggregate_failures do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ # Using subject inside this block will process the job multiple times
+ expect { subject }
+ .to change { Packages::Package.count }.from(1).to(2)
+ .and not_change { Packages::PackageFile.count }
+ .and change { incoming.package_files.count }.from(7).to(0)
+ .and change { package_file&.debian_file_metadatum&.reload&.file_type }.from('unknown').to('changes')
+
+ created_package = Packages::Package.last
+ expect(created_package.name).to eq 'sample'
+ expect(created_package.version).to eq '1.2.3~alpha2'
+ expect(created_package.creator).to eq user
+ end
+ end
+ end
+end
diff --git a/spec/workers/packages/nuget/extraction_worker_spec.rb b/spec/workers/packages/nuget/extraction_worker_spec.rb
index 4703afc9413..5186c037dc5 100644
--- a/spec/workers/packages/nuget/extraction_worker_spec.rb
+++ b/spec/workers/packages/nuget/extraction_worker_spec.rb
@@ -14,14 +14,15 @@ RSpec.describe Packages::Nuget::ExtractionWorker, type: :worker do
subject { described_class.new.perform(package_file_id) }
shared_examples 'handling the metadata error' do |exception_class: ::Packages::Nuget::UpdatePackageFromMetadataService::InvalidMetadataError|
- it 'removes the package and the package file' do
+ it 'updates package status to error', :aggregate_failures do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(exception_class),
project_id: package.project_id
)
- expect { subject }
- .to change { Packages::Package.count }.by(-1)
- .and change { Packages::PackageFile.count }.by(-1)
+
+ subject
+
+ expect(package.reload).to be_error
end
end
@@ -102,5 +103,14 @@ RSpec.describe Packages::Nuget::ExtractionWorker, type: :worker do
it_behaves_like 'handling the metadata error'
end
end
+
+ context 'handles a processing an unaccounted for error' do
+ before do
+ expect(::Packages::Nuget::UpdatePackageFromMetadataService).to receive(:new)
+ .and_raise(Zip::Error)
+ end
+
+ it_behaves_like 'handling the metadata error', exception_class: Zip::Error
+ end
end
end
diff --git a/spec/workers/packages/rubygems/extraction_worker_spec.rb b/spec/workers/packages/rubygems/extraction_worker_spec.rb
index 15c0a3be90c..0e67f3ac62e 100644
--- a/spec/workers/packages/rubygems/extraction_worker_spec.rb
+++ b/spec/workers/packages/rubygems/extraction_worker_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
describe '#perform' do
- let_it_be(:package) { create(:rubygems_package) }
+ let_it_be(:package) { create(:rubygems_package, :processing) }
let(:package_file) { package.package_files.first }
let(:package_file_id) { package_file.id }
@@ -14,15 +14,13 @@ RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
subject { described_class.new.perform(*job_args) }
- include_examples 'an idempotent worker' do
- it 'processes the gem', :aggregate_failures do
- expect { subject }
- .to change { Packages::Package.count }.by(0)
- .and change { Packages::PackageFile.count }.by(2)
+ it 'processes the gem', :aggregate_failures do
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(1)
- expect(Packages::Package.last.id).to be(package.id)
- expect(package.name).not_to be(package_name)
- end
+ expect(Packages::Package.last.id).to be(package.id)
+ expect(package.name).not_to be(package_name)
end
it 'handles a processing failure', :aggregate_failures do
@@ -34,9 +32,23 @@ RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
project_id: package.project_id
)
- expect { subject }
- .to change { Packages::Package.count }.by(-1)
- .and change { Packages::PackageFile.count }.by(-2)
+ subject
+
+ expect(package.reload).to be_error
+ end
+
+ it 'handles processing an unaccounted for error', :aggregate_failures do
+ expect(::Packages::Rubygems::ProcessGemService).to receive(:new)
+ .and_raise(Zip::Error)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(Zip::Error),
+ project_id: package.project_id
+ )
+
+ subject
+
+ expect(package.reload).to be_error
end
context 'returns when there is no package file' do
diff --git a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
index dac8c529984..563bbdef1be 100644
--- a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
+++ b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe PagesDomainSslRenewalCronWorker do
describe '#perform' do
let_it_be(:project) { create :project }
+
let!(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: false) }
let!(:domain_with_enabled_auto_ssl) { create(:pages_domain, project: project, auto_ssl_enabled: true) }
let!(:domain_with_obtained_letsencrypt) do
diff --git a/spec/workers/pipeline_process_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index 5d45a131095..0c1db3ccc5a 100644
--- a/spec/workers/pipeline_process_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
@@ -20,5 +20,10 @@ RSpec.describe PipelineProcessWorker do
.not_to raise_error
end
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :load_balancing_for_pipeline_process_worker,
+ data_consistency: :delayed
end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index f7fd1b1a0a7..a468c8c3482 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -94,30 +94,12 @@ RSpec.describe PostReceive do
perform
end
- it 'tracks an event for the empty_repo_upload experiment', :snowplow do
- allow_next_instance_of(ApplicationExperiment) do |e|
- allow(e).to receive(:should_track?).and_return(true)
- allow(e).to receive(:track_initial_writes)
+ it 'tracks an event for the empty_repo_upload experiment', :experiment do
+ expect_next_instance_of(EmptyRepoUploadExperiment) do |e|
+ expect(e).to receive(:track_initial_write)
end
perform
-
- expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
- data: anything
- }])
- end
-
- it 'does not track an event for the empty_repo_upload experiment when project is not empty', :snowplow do
- allow(empty_project).to receive(:empty_repo?).and_return(false)
- allow_next_instance_of(ApplicationExperiment) do |e|
- allow(e).to receive(:should_track?).and_return(true)
- allow(e).to receive(:track_initial_writes)
- end
-
- perform
-
- expect_no_snowplow_event
end
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 7a168bf054e..294a05c652b 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe ProcessCommitWorker do
project.repository.after_create_branch
MergeRequests::MergeService
- .new(project, merge_request.author, { sha: merge_request.diff_head_sha })
+ .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
.execute(merge_request)
merge_request.reload.merge_commit
diff --git a/spec/workers/project_service_worker_spec.rb b/spec/workers/project_service_worker_spec.rb
index c638b7472ff..237f501e0ec 100644
--- a/spec/workers/project_service_worker_spec.rb
+++ b/spec/workers/project_service_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe ProjectServiceWorker, '#perform' do
let(:service) { JiraService.new }
before do
- allow(Service).to receive(:find).and_return(service)
+ allow(Integration).to receive(:find).and_return(service)
end
it 'executes service with given data' do
diff --git a/spec/workers/projects/git_garbage_collect_worker_spec.rb b/spec/workers/projects/git_garbage_collect_worker_spec.rb
index 8c44643ae51..7b54d7df4b2 100644
--- a/spec/workers/projects/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/projects/git_garbage_collect_worker_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Projects::GitGarbageCollectWorker do
context 'LFS object garbage collection' do
let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
+
let(:lfs_object) { lfs_reference.lfs_object }
before do
diff --git a/spec/workers/projects/post_creation_worker_spec.rb b/spec/workers/projects/post_creation_worker_spec.rb
index b15b7b76b56..c2f42f03299 100644
--- a/spec/workers/projects/post_creation_worker_spec.rb
+++ b/spec/workers/projects/post_creation_worker_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Projects::PostCreationWorker do
let(:job_args) { [nil] }
it 'does not create prometheus service' do
- expect { subject }.not_to change { Service.count }
+ expect { subject }.not_to change { Integration.count }
end
end
diff --git a/spec/workers/prometheus/create_default_alerts_worker_spec.rb b/spec/workers/prometheus/create_default_alerts_worker_spec.rb
index 105fa0415d9..887d677c95f 100644
--- a/spec/workers/prometheus/create_default_alerts_worker_spec.rb
+++ b/spec/workers/prometheus/create_default_alerts_worker_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Prometheus::CreateDefaultAlertsWorker do
let_it_be(:project) { create(:project) }
+
let(:worker) { described_class.new }
let(:logger) { worker.send(:logger) }
let(:service) { instance_double(Prometheus::CreateDefaultAlertsService) }
diff --git a/spec/workers/propagate_integration_group_worker_spec.rb b/spec/workers/propagate_integration_group_worker_spec.rb
index fbf1fbf1fea..1c72bed323a 100644
--- a/spec/workers/propagate_integration_group_worker_spec.rb
+++ b/spec/workers/propagate_integration_group_worker_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe PropagateIntegrationGroupWorker do
let_it_be(:subgroup1) { create(:group, parent: group) }
let_it_be(:subgroup2) { create(:group, parent: group) }
let_it_be(:integration) { create(:redmine_service, :instance) }
+
let(:job_args) { [integration.id, group.id, subgroup2.id] }
it_behaves_like 'an idempotent worker' do
diff --git a/spec/workers/propagate_integration_project_worker_spec.rb b/spec/workers/propagate_integration_project_worker_spec.rb
index 0302af2acc9..c8293744bec 100644
--- a/spec/workers/propagate_integration_project_worker_spec.rb
+++ b/spec/workers/propagate_integration_project_worker_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe PropagateIntegrationProjectWorker do
let_it_be(:project2) { create(:project, group: group) }
let_it_be(:project3) { create(:project, group: group) }
let_it_be(:integration) { create(:redmine_service, :instance) }
+
let(:job_args) { [integration.id, project1.id, project3.id] }
it_behaves_like 'an idempotent worker' do
diff --git a/spec/workers/rebase_worker_spec.rb b/spec/workers/rebase_worker_spec.rb
index 9246b283be5..4bdfd7219f2 100644
--- a/spec/workers/rebase_worker_spec.rb
+++ b/spec/workers/rebase_worker_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe RebaseWorker, '#perform' do
it 'sets the correct project for running hooks' do
expect(MergeRequests::RebaseService)
- .to receive(:new).with(forked_project, merge_request.author).and_call_original
+ .to receive(:new).with(project: forked_project, current_user: merge_request.author).and_call_original
subject.perform(merge_request.id, merge_request.author.id)
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 0b9f95e09fe..fc572c0d9c3 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe RunPipelineScheduleWorker do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly, project: project ) }
+
let(:worker) { described_class.new }
context 'when a project not found' do
diff --git a/spec/workers/service_desk_email_receiver_worker_spec.rb b/spec/workers/service_desk_email_receiver_worker_spec.rb
index d3bfa51348e..60fc951f627 100644
--- a/spec/workers/service_desk_email_receiver_worker_spec.rb
+++ b/spec/workers/service_desk_email_receiver_worker_spec.rb
@@ -9,11 +9,12 @@ RSpec.describe ServiceDeskEmailReceiverWorker, :mailer do
context 'when service_desk_email config is enabled' do
before do
- stub_service_desk_email_setting(enabled: true, address: 'foo')
+ stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com')
end
it 'does not ignore the email' do
- expect(Gitlab::Email::ServiceDeskReceiver).to receive(:new)
+ expect(Gitlab::Email::ServiceDeskReceiver).to receive(:new).and_call_original
+ expect(Sidekiq.logger).to receive(:error).with(hash_including('exception.class' => Gitlab::Email::ProjectNotFound.to_s)).and_call_original
worker.perform(email)
end
@@ -23,6 +24,7 @@ RSpec.describe ServiceDeskEmailReceiverWorker, :mailer do
allow_next_instance_of(Gitlab::Email::ServiceDeskReceiver) do |receiver|
allow(receiver).to receive(:find_handler).and_return(nil)
end
+ expect(Sidekiq.logger).to receive(:error).with(hash_including('exception.class' => Gitlab::Email::UnknownIncomingEmail.to_s)).and_call_original
end
it 'sends a rejection email' do
diff --git a/spec/workers/update_external_pull_requests_worker_spec.rb b/spec/workers/update_external_pull_requests_worker_spec.rb
index 80f22470977..cb6a4e2ebf8 100644
--- a/spec/workers/update_external_pull_requests_worker_spec.rb
+++ b/spec/workers/update_external_pull_requests_worker_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe UpdateExternalPullRequestsWorker do
describe '#perform' do
let_it_be(:project) { create(:project, import_source: 'tanuki/repository') }
let_it_be(:user) { create(:user) }
+
let(:worker) { described_class.new }
before do
diff --git a/spec/workers/update_merge_requests_worker_spec.rb b/spec/workers/update_merge_requests_worker_spec.rb
index fb12086c2f4..bd0dc2f9ef4 100644
--- a/spec/workers/update_merge_requests_worker_spec.rb
+++ b/spec/workers/update_merge_requests_worker_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe UpdateMergeRequestsWorker do
end
it 'executes MergeRequests::RefreshService with expected values' do
- expect_next_instance_of(MergeRequests::RefreshService, project, user) do |refresh_service|
+ expect_next_instance_of(MergeRequests::RefreshService, project: project, current_user: user) do |refresh_service|
expect(refresh_service).to receive(:execute).with(oldrev, newrev, ref)
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
new file mode 100644
index 00000000000..32291a143ee
--- /dev/null
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::DeactivateDormantUsersWorker do
+ describe '#perform' do
+ subject(:worker) { described_class.new }
+
+ it 'does not run for GitLab.com' do
+ create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
+ create(:user, last_activity_on: nil)
+
+ expect(Gitlab).to receive(:com?).and_return(true)
+ expect(Gitlab::CurrentSettings).not_to receive(:current_application_settings)
+
+ worker.perform
+
+ expect(User.dormant.count).to eq(1)
+ expect(User.with_no_activity.count).to eq(1)
+ end
+
+ context 'when automatic deactivation of dormant users is enabled' do
+ before do
+ stub_application_setting(deactivate_dormant_users: true)
+ end
+
+ it 'deactivates dormant users' do
+ freeze_time do
+ stub_const("#{described_class.name}::BATCH_SIZE", 1)
+ stub_const("#{described_class.name}::PAUSE_SECONDS", 0)
+
+ create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
+ create(:user, last_activity_on: nil)
+
+ expect(worker).to receive(:sleep).twice
+
+ worker.perform
+
+ expect(User.dormant.count).to eq(0)
+ expect(User.with_no_activity.count).to eq(0)
+ end
+ end
+ end
+
+ context 'when automatic deactivation of dormant users is disabled' do
+ before do
+ stub_application_setting(deactivate_dormant_users: false)
+ end
+
+ it 'does nothing' do
+ create(:user, last_activity_on: User::MINIMUM_INACTIVE_DAYS.days.ago.to_date)
+ create(:user, last_activity_on: nil)
+
+ worker.perform
+
+ expect(User.dormant.count).to eq(1)
+ expect(User.with_no_activity.count).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/workers/users/update_open_issue_count_worker_spec.rb b/spec/workers/users/update_open_issue_count_worker_spec.rb
new file mode 100644
index 00000000000..700055980d8
--- /dev/null
+++ b/spec/workers/users/update_open_issue_count_worker_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UpdateOpenIssueCountWorker do
+ let_it_be(:first_user) { create(:user) }
+ let_it_be(:second_user) { create(:user) }
+
+ describe '#perform' do
+ let(:target_user_ids) { [first_user.id, second_user.id] }
+
+ subject { described_class.new.perform(target_user_ids) }
+
+ context 'when arguments are missing' do
+ context 'when target_user_ids are missing' do
+ context 'when nil' do
+ let(:target_user_ids) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /No target user ID provided/)
+ end
+ end
+
+ context 'when empty array' do
+ let(:target_user_ids) { [] }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /No target user ID provided/)
+ end
+ end
+
+ context 'when not an ID' do
+ let(:target_user_ids) { "nonsense" }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, /No valid target user ID provided/)
+ end
+ end
+ end
+ end
+
+ context 'when successful' do
+ let(:job_args) { [target_user_ids] }
+ let(:fake_service1) { double }
+ let(:fake_service2) { double }
+
+ it 'calls the user update service' do
+ expect(Users::UpdateAssignedOpenIssueCountService).to receive(:new).with(target_user: first_user).and_return(fake_service1)
+ expect(Users::UpdateAssignedOpenIssueCountService).to receive(:new).with(target_user: second_user).and_return(fake_service2)
+ expect(fake_service1).to receive(:execute)
+ expect(fake_service2).to receive(:execute)
+
+ subject
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ it 'recalculates' do
+ subject
+
+ expect(first_user.assigned_open_issues_count).to eq(0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/web_hook_worker_spec.rb b/spec/workers/web_hook_worker_spec.rb
new file mode 100644
index 00000000000..becc7461f2a
--- /dev/null
+++ b/spec/workers/web_hook_worker_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe WebHookWorker do
+ include AfterNextHelpers
+
+ let_it_be(:project_hook) { create(:project_hook) }
+ let_it_be(:data) { { foo: 'bar' } }
+ let_it_be(:hook_name) { 'push_hooks' }
+
+ describe '#perform' do
+ it 'delegates to WebHookService' do
+ expect_next(WebHookService, project_hook, data.with_indifferent_access, hook_name).to receive(:execute)
+
+ subject.perform(project_hook.id, data, hook_name)
+ end
+ end
+end